commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
70116d7181f48c16d614063df4de54dff172e8c6
|
Add internal note
|
conda_env/cli/main_export.py
|
conda_env/cli/main_export.py
|
from argparse import RawDescriptionHelpFormatter
from copy import copy
import os
import sys
import textwrap
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import config
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
p.add_argument(
'-n', '--name',
action='store',
help='name of environment (in %s)' % os.pathsep.join(config.envs_dirs),
default=None,
)
p.add_argument(
'-f', '--file',
default=None,
required=False
)
p.set_defaults(func=execute)
def execute(args, parser):
if not args.name:
# Note, this is a hack fofr get_prefix that assumes argparse results
# TODO Refactor common.get_prefix
name = os.environ.get('CONDA_DEFAULT_ENV', False)
if not name:
msg = "Unable to determine environment\n\n"
msg += textwrap.dedent("""
Please re-run this command with one of the following options:
* Provide an environment name via --name or -n
* Re-run this command inside an activated conda environment.""").lstrip()
# TODO Add json support
common.error_and_exit(msg, json=False)
args.name = name
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
if len(pip_pkgs) > 0:
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
data = {
'name': args.name,
'dependencies': dependencies,
}
if args.file is None:
fp = sys.stdout
else:
fp = open(args.file, 'wb')
yaml.dump(data, default_flow_style=False, stream=fp)
|
Python
| 0
|
@@ -903,16 +903,86 @@
cute)%0A%0A%0A
+# TODO Make this aware of channels that were used to install packages%0A
def exec
|
f66038d1599843913dbe88eb02fa80b79e0d6e57
|
add script for bitwise operation
|
codecademy/bitwise.py
|
codecademy/bitwise.py
|
Python
| 0
|
@@ -0,0 +1,1244 @@
+%0Aprint 5 %3E%3E 4 # Right Shift%0Aprint 5 %3C%3C 1 # Left Shift%0Aprint 8 & 5 # Bitwise AND%0Aprint 9 %7C 4 # Bitwise OR%0Aprint 12 %5E 42 # Bitwise XOR%0Aprint ~88 # Bitwise NOT%0A%0Aprint %22the base 2 number system%22%0Aprint 0b1, #1%0Aprint 0b10, #2%0Aprint 0b11, #3%0Aprint 0b100, #4%0Aprint 0b101, #5%0Aprint 0b110, #6%0Aprint 0b111 #7%0Aprint %22******%22%0Aprint 0b1 + 0b11%0Aprint 0b11 * 0b11%0A%0A# binary nubmer 1~12%0Aone = 0b1%0Atwo = 0b10%0Athree = 0b11%0Afour = 0b100%0Afive = 0b101%0Asix = 0b110%0Aseven = 0b111%0Aeight =0b1000%0Anine = 0b1001%0Aten = 0b1010%0Aeleven = 0b1011%0Atwelve = 0b1100%0A%0Aprint bin(1)%0A%0Aprint int(bin(5),2)%0A# Print out the decimal equivalent of the binary 11001001.%0Aprint int(%2211001001%22, 2)%0A%0A%0A# Slide to the Left! Slide to the Right!%0Ashift_right = 0b1100%0Ashift_left = 0b1%0A%0A# Your code here!%0Ashift_right %3E%3E= 2%0Ashift_left %3C%3C= 2%0Aprint bin(shift_right)%0Aprint bin(shift_left)%0A%0Aprint bin(0b1110 & 0b101)%0Aprint bin(0b1110 %7C 0b101)%0A%0A%0A%0Adef check_bit4(input):%0A%09mask = 0b1000%0A%09desired = input & mask%0A%09if desired %3E 0:%0A%09%09return %22on%22%0A%09else:%0A%09%09return %22off%22%0A%0Aa = 0b10111011%0Amask = 0b100%0Aprint bin(a %7C mask)%0A%0A%0Aa = 0b11101110%0Amask = 0b11111111%0Aprint bin(a %5E mask)%0A%0Aprint a, mask, a%5Emask%0A%0A%0Adef flip_bit(number, n):%0A%09mask = (0b1) %3C%3C (n-1)%0A%09result = number %5E mask%0A%09return bin(result)%0A%0A%0A
|
|
059ab529b05d0640e7099e307878db58d6f2ffc9
|
update board test
|
scripts/test-board.py
|
scripts/test-board.py
|
Python
| 0
|
@@ -0,0 +1,370 @@
+%22%22%22Test script for the game board.%0A%0AAuthor: Yuhuang Hu%0AEmail : duguyue100@gmail.com%0A%22%22%22%0A%0Afrom __future__ import print_function%0Afrom minesweeper.msgame import MSGame%0A%0A%0Agame = MSGame(10, 10, 20)%0A%0Agame.print_board()%0A%0Atry:%0A input = raw_input%0Aexcept NameError:%0A pass%0A%0Awhile game.game_status == 2:%0A # play move%0A move = input(%22Move: %22)%0A game.play_move_msg(move)%0A
|
|
65d2202bc686019ebdaf292693c79ace326ef798
|
Create MyoThalmic.py
|
service/MyoThalmic.py
|
service/MyoThalmic.py
|
Python
| 0
|
@@ -0,0 +1,198 @@
+%0Afrom com.thalmic.myo import Pose%0A%0Amyo = Runtime.start(%22python%22, %22Python%22)%0Amyo = Runtime.start(%22myo%22, %22MyoThalmic%22)%0A%0Amyo.connect()%0Amyo.addPoseListener(python)%0A%0AonPose(pose):%0A print(pose.getType())%0A
|
|
c1b27a617c9050799bb11f4c161f925f153da5bc
|
add test_gst_rtsp_server.py
|
test_gst_rtsp_server.py
|
test_gst_rtsp_server.py
|
Python
| 0.000002
|
@@ -0,0 +1,1286 @@
+#!/usr/bin/env python%0A# -*- coding:utf-8 vi:ts=4:noexpandtab%0A# Simple RTSP server. Run as-is or with a command-line to replace the default pipeline%0A%0Aimport sys%0Aimport gi%0A%0Agi.require_version('Gst', '1.0')%0Afrom gi.repository import Gst, GstRtspServer, GObject%0A%0Aloop = GObject.MainLoop()%0AGObject.threads_init()%0AGst.init(None)%0A%0Aclass MyFactory(GstRtspServer.RTSPMediaFactory):%0A%09def __init__(self):%0A%09%09GstRtspServer.RTSPMediaFactory.__init__(self)%0A%0A%09def do_create_element(self, url):%0A%09%09s_src = %22v4l2src ! video/x-raw,rate=30,width=320,height=240 ! videoconvert ! video/x-raw,format=I420%22%0A%09%09s_h264 = %22videoconvert ! vaapiencode_h264 bitrate=1000%22%0A%09%09s_src = %22videotestsrc ! video/x-raw,rate=30,width=320,height=240,format=I420%22%0A%09%09s_h264 = %22x264enc tune=zerolatency%22%0A%09%09pipeline_str = %22( %7Bs_src%7D ! queue max-size-buffers=1 name=q_enc ! %7Bs_h264%7D ! rtph264pay name=pay0 pt=96 )%22.format(**locals())%0A%09%09if len(sys.argv) %3E 1:%0A%09%09%09pipeline_str = %22 %22.join(sys.argv%5B1:%5D)%0A%09%09print(pipeline_str)%0A%09%09return Gst.parse_launch(pipeline_str)%0A%0Aclass GstServer():%0A%09def __init__(self):%0A%09%09self.server = GstRtspServer.RTSPServer()%0A%09%09f = MyFactory()%0A%09%09f.set_shared(True)%0A%09%09m = self.server.get_mount_points()%0A%09%09m.add_factory(%22/test%22, f)%0A%09%09self.server.attach(None)%0A%0Aif __name__ == '__main__':%0A%09s = GstServer()%0A%09loop.run()%0A%0A
|
|
3e9fc3e3b4b5b870578d2c642d88a6ef14b340dd
|
max path 1: python
|
max_path_1/python/max_path_1.py
|
max_path_1/python/max_path_1.py
|
Python
| 0.999998
|
@@ -0,0 +1,1070 @@
+triangle = %5B%0A %5B75%5D,%0A %5B95, 64%5D,%0A %5B17, 47, 82%5D,%0A %5B18, 35, 87, 10%5D,%0A %5B20, 4, 82, 47, 65%5D,%0A %5B19, 1, 23, 75, 3, 34%5D,%0A %5B88, 2, 77, 73, 7, 63, 67%5D,%0A %5B99, 65, 4, 28, 6, 16, 70, 92%5D,%0A %5B41, 41, 26, 56, 83, 40, 80, 70, 33%5D,%0A %5B41, 48, 72, 33, 47, 32, 37, 16, 94, 29%5D,%0A %5B53, 71, 44, 65, 25, 43, 91, 52, 97, 51, 14%5D,%0A %5B70, 11, 33, 28, 77, 73, 17, 78, 39, 68, 17, 57%5D,%0A %5B91, 71, 52, 38, 17, 14, 91, 43, 58, 50, 27, 29, 48%5D,%0A %5B63, 66, 4, 68, 89, 53, 67, 30, 73, 16, 69, 87, 40, 31%5D,%0A %5B04, 62, 98, 27, 23, 9, 70, 98, 73, 93, 38, 53, 60, 4, 23%5D%0A%5D%0A%0A# Initial brute-force (but recursive) method:%0A# 53 ms%0A# def recurse_level(arr, row, col):%0A# %09if row == len(arr)-1:%0A# %09%09return arr%5Brow%5D%5Bcol%5D%0A# %09return arr%5Brow%5D%5Bcol%5D+max(recurse_level(arr, row+1, col), recurse_level(arr, row+1, col+1))%0A#%0A# print recurse_level(triangle, 0, 0)%0A%0A# smarter method%0A# 33ms%0Afor row in xrange(len(triangle)-2,-1,-1):%0A%09for col in xrange(0,len(triangle%5Brow%5D)):%0A%09%09triangle%5Brow%5D%5Bcol%5D = max(triangle%5Brow%5D%5Bcol%5D+triangle%5Brow+1%5D%5Bcol%5D, triangle%5Brow%5D%5Bcol%5D+triangle%5Brow+1%5D%5Bcol+1%5D)%0A%0Aprint triangle%5B0%5D%5B0%5D
|
|
497f1c70d0ecedb904f5b71be494e01246d874f6
|
Add weight test
|
kansha/card_addons/weight/tests.py
|
kansha/card_addons/weight/tests.py
|
Python
| 0.000006
|
@@ -0,0 +1,767 @@
+# -*- coding:utf-8 -*-%0A#--%0A# Copyright (c) 2012-2014 Net-ng.%0A# All rights reserved.%0A#%0A# This software is licensed under the BSD License, as described in%0A# the file LICENSE.txt, which you should have received as part of%0A# this distribution.%0A#--%0A%0Afrom kansha.cardextension.tests import CardExtensionTestCase%0A%0Afrom .comp import CardWeightEditor%0A%0A%0Aclass CardWeightTest(CardExtensionTestCase):%0A def create_instance(self, card, action_log):%0A return CardWeightEditor(card, action_log)%0A%0A def test_copy(self):%0A self.extension.weight(u'25')%0A self.extension.commit()%0A self.assertEqual(self.extension.get_data().weight, u'25')%0A cpy = self.extension.copy(self.card_copy, %7B%7D)%0A self.assertEqual(self.extension.weight(), cpy.weight())%0A
|
|
6aed81e89e321f45ba2ff95bfb0c78504c0bf79e
|
add setup_database script (tests) using scripts/import_osm.sh (in progress)
|
tests/setup_database.py
|
tests/setup_database.py
|
Python
| 0
|
@@ -0,0 +1,826 @@
+#!/usr/bin/env python%0A%0Aimport argparse%0Aimport subprocess%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22-l%22, %22--osm_url%22, required=True, help=%22OSM download URL (*.osm.bz2)%22, type=str)%0A parser.add_argument(%22-p%22, %22--user%22, required=True, help=%22PostgreSQL database password%22, type=str)%0A parser.add_argument(%22-u%22, %22--password%22, required=True, help=%22PostgreSQL database user%22, type=str,%0A choices=%5B%22postgres%22%5D)%0A args = parser.parse_args()%0A%0A print 'Importing OSM data ...'%0A if subprocess.call(%5B'scripts/import_osm.sh', args.osm_url, args.user, args.password%5D) == 0:%0A print '... Success!'%0A else:%0A print '... Failed.'%0A return 1%0A%0A print 'Creating iBis tables ...'%0A # TODO%0A print '... Success!'%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
783b04ad8da2b65d9a07a0bdd4f236273f9ad39d
|
Create test.py
|
ProjectMidway/test.py
|
ProjectMidway/test.py
|
Python
| 0.000005
|
@@ -0,0 +1 @@
+%0A
|
|
7383343f7fb77c74455a50490ad2886fcf36bbd5
|
Comment test for the moment
|
dlstats/fetchers/test_ecb.py
|
dlstats/fetchers/test_ecb.py
|
Python
| 0
|
@@ -0,0 +1,325 @@
+import unittest%0Aimport mongomock%0Aimport ulstats %0Afrom dlstats.fetchers._skeleton import (Skeleton, Category, Series, BulkSeries,%0A Dataset, Provider)%0Aimport datetime%0Afrom bson import ObjectId%0A%0A#class CategoriesTestCase(unittest.TestCase):%0A%0A#if __name__ == '__main__':%0A# unittest.main()%0A
|
|
e446ab24ba981b22bf84ae2e09a8ba62cf17528e
|
Create batch_download.py
|
batch_download.py
|
batch_download.py
|
Python
| 0.000001
|
@@ -0,0 +1,1126 @@
+import time #used to pause script%0Aimport os #library used to open magnet link%0Afrom selenium import webdriver #use selenium%0A%0A#global variables%0AdriverLocation = %22C:/Users/Kevin/Downloads/Browsers/chromedriver.exe%22%0Aurl = %22http://horriblesubs.info/shows/shigatsu-wa-kimi-no-uso/%22%0Aquality = %221080p%22%0Adownload_format = %22Magnet%22%0A%0Abrowser = webdriver.Chrome(driverLocation)%0Abrowser.get(url)%0A#time.sleep(1) # Let the user actually see something!%0A%0Alinks = browser.find_elements_by_link_text(quality)%0Alength = len(links)%0Aprint (str(length) + %22 episodes found%22)%0Acounter = 1%0Afor link in links:%0A print(%22Clicking '%7B%7D': %7B%7D/%7B%7D%22.format(quality, counter, length))%0A link.click()%0A counter+=1%0A time.sleep(0.1) #delay of animation%0A #break # temp%0A%0Aprint%0A%0Alinks = browser.find_elements_by_link_text(download_format)%0Acounter = 1%0Afor link in links:%0A print(%22Clicking '%7B%7D': %7B%7D/%7B%7D%22.format(download_format, counter, length))%0A os.startfile(link.get_attribute(%22href%22))%0A counter+=1%0A time.sleep(0.1) #not too fast...%0A #break # temporary, only do first iteration%0A%0A#time.sleep(5) # Let the user actually see something!%0Abrowser.quit()%0A
|
|
fa0886bdeab19cb326a3e751dff1c46fb7911228
|
Apply migration 1160 again
|
migrations/versions/1180_set_framework_datetimes_not_nullable_again.py
|
migrations/versions/1180_set_framework_datetimes_not_nullable_again.py
|
Python
| 0
|
@@ -0,0 +1,2724 @@
+%22%22%22Remove deprecated application_close_date field and set the remaining date fields to non-nullable.%0A%0ARevision ID: 1180%0ARevises: 1170%0ACreate Date: 2018-05-08 09:53:43.699711%0A%0A%22%22%22%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0Afrom sqlalchemy.sql import table, column, and_%0A%0A# revision identifiers, used by Alembic.%0Arevision = '1180'%0Adown_revision = '1170'%0A%0A%0Adef upgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.alter_column('frameworks', 'applications_close_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.alter_column('frameworks', 'clarifications_close_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.alter_column('frameworks', 'clarifications_publish_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.alter_column('frameworks', 'framework_expires_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.alter_column('frameworks', 'framework_live_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.alter_column('frameworks', 'intention_to_award_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=False)%0A op.drop_column('frameworks', 'application_close_date')%0A # ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('frameworks',%0A sa.Column('application_close_date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))%0A op.alter_column('frameworks', 'intention_to_award_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A op.alter_column('frameworks', 'framework_live_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A op.alter_column('frameworks', 'framework_expires_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A op.alter_column('frameworks', 'clarifications_publish_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A op.alter_column('frameworks', 'clarifications_close_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A op.alter_column('frameworks', 'applications_close_at_utc',%0A existing_type=postgresql.TIMESTAMP(),%0A nullable=True)%0A # ### end Alembic commands ###%0A%0A
|
|
9167643047c61bae50a7c73775631c7bfe434cc9
|
Add a new wrapper class for managing ansible static inventory.
|
spam/ansiInventory.py
|
spam/ansiInventory.py
|
Python
| 0
|
@@ -0,0 +1,936 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AAnsibleInventory:%0A%0AINTRO:%0A%0AUSAGE:%0A%0A%22%22%22%0A%0Aimport os%0Aimport ansible.inventory%0A%0A%0Aclass AnsibleInventory(object):%0A '''%0A Ansible Inventory wrapper class.%0A '''%0A def __init__(self, inventory_filename):%0A '''%0A Initialize Inventory%0A '''%0A if not os.path.exists(inventory_filename):%0A print %22Provide a valid inventory filename%22%0A return%0A%0A self.inventory = ansible.inventory.InventoryParser(inventory_filename)%0A%0A def get_hosts(self, group=None):%0A '''%0A Get the hosts%0A '''%0A if not group:%0A return self.inventory.hosts.keys()%0A%0A groupobj = self.inventory.groups.get(group, None)%0A if not groupobj:%0A return None%0A%0A hostobjs = groupobj.get_hosts()%0A hostlist = %5B%5D%0A for host in hostobjs:%0A hostlist.append(host.name)%0A%0A return hostlist%0A%0A%0A%0A
|
|
761a0afb8576f8bcdf9c50e79f21e55bf0f2243c
|
Correct path to doxyxml (#182) and break long line
|
doc/build.py
|
doc/build.py
|
#!/usr/bin/env python
# Build the documentation.
from __future__ import print_function
import os, shutil, tempfile
from subprocess import check_call, CalledProcessError, Popen, PIPE
def pip_install(package, commit=None):
"Install package using pip."
if commit:
cmd = ['pip', 'show', package.split('/')[1]]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if p.returncode != 0:
# Check if pip supports the show command.
if 'No command by the name pip show' not in stderr:
raise CalledProcessError(p.returncode, cmd)
elif stdout:
return # Already installed
package = 'git+git://github.com/{0}.git@{1}'.format(package, commit)
check_call(['pip', 'install', '-q', package])
def build_docs():
# Create virtualenv.
doc_dir = os.path.dirname(os.path.realpath(__file__))
virtualenv_dir = 'virtualenv'
check_call(['virtualenv', virtualenv_dir])
activate_this_file = os.path.join(virtualenv_dir, 'bin', 'activate_this.py')
execfile(activate_this_file, dict(__file__=activate_this_file))
# Install Sphinx and Breathe.
pip_install('sphinx==1.3.1')
pip_install('michaeljones/breathe', '511b0887293e7c6b12310bb61b3659068f48f0f4')
# Build docs.
cmd = ['doxygen', '-']
p = Popen(cmd, stdin=PIPE)
p.communicate(input=r'''
PROJECT_NAME = C++ Format
GENERATE_LATEX = NO
GENERATE_MAN = NO
GENERATE_RTF = NO
CASE_SENSE_NAMES = NO
INPUT = {0}/format.h
EXCLUDE_SYMBOLS = fmt::internal::*
QUIET = YES
JAVADOC_AUTOBRIEF = YES
AUTOLINK_SUPPORT = NO
GENERATE_HTML = NO
GENERATE_XML = YES
XML_OUTPUT = doxyxml
ALIASES = "rst=\verbatim embed:rst"
ALIASES += "endrst=\endverbatim"
PREDEFINED = _WIN32=1 \
FMT_USE_VARIADIC_TEMPLATES=1 \
FMT_USE_RVALUE_REFERENCES=1
EXCLUDE_SYMBOLS = fmt::internal::* StringValue write_str
'''.format(os.path.dirname(doc_dir)))
if p.returncode != 0:
raise CalledProcessError(p.returncode, cmd)
check_call(['sphinx-build', '-D', 'breathe_projects.format=doxyxml',
'-b', 'html', doc_dir, 'html'])
check_call(['lessc', '--clean-css', '--include-path=' + os.path.join(doc_dir, 'bootstrap'),
os.path.join(doc_dir, 'cppformat.less'), 'html/_static/cppformat.css'])
return 'html'
if __name__ == '__main__':
build_docs()
|
Python
| 0
|
@@ -1166,16 +1166,30 @@
reathe',
+%0A
'511b08
@@ -2201,16 +2201,30 @@
', '-D',
+%0A
'breath
@@ -2245,16 +2245,47 @@
mat=
+' + os.path.join(os.getcwd(), '
doxyxml'
,%0A
@@ -2280,16 +2280,17 @@
doxyxml'
+)
,%0A
@@ -2366,16 +2366,30 @@
an-css',
+%0A
'--incl
@@ -2491,16 +2491,30 @@
.less'),
+%0A
'html/_
|
672b6b3b3936b262e3cf9232693ddbf26a1629d4
|
Remove RoundRobin from _strategy_utils
|
axelrod/_strategy_utils.py
|
axelrod/_strategy_utils.py
|
"""Utilities used by various strategies"""
import functools
import collections
import itertools
from axelrod import RoundRobin, update_history
from axelrod import Actions
from axelrod.strategies.cycler import Cycler
C, D = Actions.C, Actions.D
def detect_cycle(history, min_size=1, offset=0):
"""Detects cycles in the sequence history.
Mainly used by hunter strategies.
Parameters
history: sequence of C and D
The sequence to look for cycles within
min_size: int, 1
The minimum length of the cycle
offset: int, 0
The amount of history to skip initially
"""
history_tail = history[-offset:]
for i in range(min_size, len(history_tail) // 2):
cycle = tuple(history_tail[:i])
for j, elem in enumerate(history_tail):
if elem != cycle[j % len(cycle)]:
break
if j == len(history_tail) - 1:
# We made it to the end, is the cycle itself a cycle?
# I.E. CCC is not ok as cycle if min_size is really 2
# Since this is the same as C
return cycle
return None
def limited_simulate_play(player_1, player_2, h1):
"""Here we want to replay player_1's history to player_2, allowing
player_2's strategy method to set any internal variables as needed. If you
need a more complete simulation, see `simulate_play` in player.py. This
function is specifically designed for the needs of MindReader."""
h2 = player_2.strategy(player_1)
update_history(player_1, h1)
update_history(player_2, h2)
def simulate_match(player_1, player_2, strategy, rounds=10):
"""Simulates a number of matches."""
for match in range(rounds):
limited_simulate_play(player_1, player_2, strategy)
def look_ahead(player_1, player_2, game, rounds=10):
"""Looks ahead for `rounds` and selects the next strategy appropriately."""
results = []
# Simulate plays for `rounds` rounds
strategies = [C, D]
for strategy in strategies:
# Instead of a deepcopy, create a new opponent and play out the history
opponent_ = player_2.clone()
player_ = Cycler(strategy) # Either cooperator or defector
for h1 in player_1.history:
limited_simulate_play(player_, opponent_, h1)
round_robin = RoundRobin(players=[player_, opponent_], game=game,
turns=rounds)
simulate_match(player_, opponent_, strategy, rounds)
results.append(round_robin._calculate_scores(player_, opponent_)[0])
return strategies[results.index(max(results))]
class Memoized(object):
"""Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated). From:
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
@Memoized
def recursive_thue_morse(n):
"""The recursive definition of the Thue-Morse sequence. The first few terms
of the Thue-Morse sequence are:
0 1 1 0 1 0 0 1 1 0 0 1 0 1 1 0 . . ."""
if n == 0:
return 0
if n % 2 == 0:
return recursive_thue_morse(n / 2)
if n % 2 == 1:
return 1 - recursive_thue_morse((n - 1) / 2)
def thue_morse_generator(start=0):
"""A generator for the Thue-Morse sequence."""
for n in itertools.count(start):
yield recursive_thue_morse(n)
|
Python
| 0.00001
|
@@ -113,20 +113,8 @@
port
- RoundRobin,
upd
@@ -1746,24 +1746,289 @@
strategy)%0A%0A%0A
+def calculate_scores(p1, p2, game):%0A %22%22%22Calculates the score for two players based their history%22%22%22%0A s1, s2 = 0, 0%0A for pair in zip(p1.history, p2.history):%0A score = game.score(pair)%0A s1 += score%5B0%5D%0A s2 += score%5B1%5D%0A return s1, s2%0A%0A%0A
def look_ahe
@@ -2547,129 +2547,8 @@
1)%0A%0A
- round_robin = RoundRobin(players=%5Bplayer_, opponent_%5D, game=game,%0A turns=rounds)%0A
@@ -2631,21 +2631,8 @@
end(
-round_robin._
calc
@@ -2666,12 +2666,15 @@
ent_
-)%5B0%5D
+, game)
)%0A%0A
|
8090098d90141ebe6f340cb5917d385f280d2d86
|
version 1.9.6. also, reset seed_version in version.py
|
lib/version.py
|
lib/version.py
|
ELECTRUM_VERSION = "1.9.5" # version of the client package
PROTOCOL_VERSION = '0.6' # protocol version requested
SEED_VERSION = 6 # bump this every time the seed generation is modified
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
|
Python
| 0.000007
|
@@ -21,9 +21,9 @@
1.9.
-5
+6
%22 #
@@ -129,17 +129,17 @@
N =
-6
+4
|
77eecb7a809a7b4f56d70e6d7e09deb2c7e0188b
|
add template engine
|
template-engine/code/templite.py
|
template-engine/code/templite.py
|
Python
| 0
|
@@ -0,0 +1,1402 @@
+#!/usr/bin/env python%0A# coding: utf-8%0A%0A%0Aclass CodeBuilder(object):%0A INDENT_STEP = 4%0A%0A def __init__(self, indent=0):%0A self.code = %5B%5D%0A self.indent_level = indent%0A%0A def add_line(self, line):%0A self.code.extend(%5B%22 %22 * self.indent_level, line, %22%5Cn%22%5D)%0A%0A def indent(self):%0A self.indent_level += self.INDENT_STEP%0A%0A def dedent(self):%0A self.indent_level -= self.INDENT_STEP%0A%0A def add_section(self):%0A section = CodeBuilder(self.indent_level)%0A self.code.append(section)%0A return section%0A%0A def __str__(self):%0A return %22%22.join(str(c) for c in self.code)%0A%0A def get_globals(self):%0A assert self.indent_level == 0%0A python_source = str(self)%0A global_namespace = %7B%7D%0A exec(python_source, global_namespace)%0A return global_namespace%0A%0A%0Aclass Templite(object):%0A%0A def __init__(self, text, *contexts):%0A self.context = %7B%7D%0A for context in contexts:%0A self.context.update(context)%0A%0A self.all_vars = set()%0A self.loop_vars = set()%0A%0A code = Codebuilder()%0A code.add_line(%22def render_function(context, do_dots):%22)%0A code.indent()%0A vars_code = code.add_section()%0A code.add_line(%22result = %5B%5D%22)%0A code.add_line(%22append_result = result.append%22)%0A code.add_line(%22extend_result = result.extend%22)%0A code.add_line(%22to_str = str%22)%0A%0A%0A%0A
|
|
c8f504c52f9e981b3974f4be1581da890021473a
|
add new collector for cassandra cfstats
|
src/collectors/mmcassandra/mmcassandra.py
|
src/collectors/mmcassandra/mmcassandra.py
|
Python
| 0
|
@@ -0,0 +1,2548 @@
+import subprocess, socket, math%0A%0Aimport diamond.collector%0A%0A%0Adef parse_line(line):%0A metric_name, rhs = line.strip().split(':', 1)%0A rhs = rhs.strip()%0A if ' ' in rhs:%0A str_value, units = rhs.split(' ', 1)%0A if units not in ('ms', 'ms.'):%0A raise ValueError(%22Cannot parse %22 + repr(line))%0A else:%0A str_value = rhs%0A%0A try:%0A value = float(str_value)%0A except:%0A value = str_value%0A return metric_name, value%0A%0A%0Aclass Keyspace(object):%0A def __init__(self, name, stats, tables):%0A self.name = name%0A self.stats = stats%0A self.tables = tables%0A%0Aclass Table(object):%0A def __init__(self, name, stats):%0A self.name = name%0A self.stats = stats%0A%0Adef cfstats():%0A output = subprocess.check_output(%5B'nodetool', 'cfstats'%5D)%0A lines = %5Bline for line in output.splitlines()%0A if line and (line != '----------------')%5D%0A%0A # cfstats output is structured in a very specific way: all lines are%0A # key: value pairs prefixed by tabs. everything indented belongs to the %0A%0A keyspaces = %5B%5D%0A for line in lines:%0A tab_count = len(line) - len(line.lstrip('%5Ct'))%0A if tab_count == 0:%0A key, value = parse_line(line)%0A assert key == 'Keyspace'%0A keyspaces.append(Keyspace(value, %5B%5D, %5B%5D))%0A elif tab_count == 1:%0A key, value = parse_line(line)%0A if not math.isnan(value):%0A keyspaces%5B-1%5D.stats.append(parse_line(line))%0A elif tab_count == 2:%0A key, value = parse_line(line)%0A if key == 'Table':%0A keyspaces%5B-1%5D.tables.append(Table(value, %5B%5D))%0A else:%0A if not math.isnan(value):%0A keyspaces%5B-1%5D.tables%5B-1%5D.stats.append((key, value))%0A else:%0A raise ValueError%0A%0A return keyspaces%0A%0Abad_keyspaces = ('system', 'system_traces')%0A%0Aclass ColumnFamilyStatsCollector(diamond.collector.Collector):%0A def collect(self):%0A for keyspace in cfstats():%0A if keyspace.name not in bad_keyspaces:%0A for (key, value) in keyspace.stats:%0A name = 'cassandra.cfstats.%7B%7D.%7B%7D'.format(%0A keyspace.name, key)%0A self.publish(name, value)%0A for table in keyspace.tables:%0A for (key, value) in table.stats:%0A name = 'cassandra.cfstats.%7B%7D.%7B%7D.%7B%7D'.format(%0A keyspace.name, table.name, key)%0A self.publish(name, value)%0A%0A%0A%0A
|
|
ebd15d9bcf5a46417af7f3d46769716c4d12b793
|
rename pre_push hook
|
pre_push.py
|
pre_push.py
|
Python
| 0.000001
|
@@ -0,0 +1,2722 @@
+#!/usr/bin/env python%0Aimport atexit%0Aimport glob%0Aimport os%0Aimport re%0Aimport subprocess%0Aimport sys%0A%0Astable_branch_re = re.compile(r'master%7Cstable%7Cprod%7Cproduction')%0A%0Adef chain_cmds(cmds, stdin=None):%0A for cmd in cmds:%0A p = subprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE)%0A stdin = p.stdout%0A return p.stdout.read().strip().decode()%0A%0Adef get_current_branch():%0A branch_cmd = %22git rev-parse HEAD %7C git branch -a --contains %7C grep remotes %7C sed s/.*remotes.origin.//%22%0A return os.environ.get(%0A %22GIT_BRANCH%22,%0A chain_cmds(%0A %5Bpipe_cmd.split(%22 %22) for pipe_cmd in branch_cmd.split(%22 %7C %22)%5D%0A )%0A )%0A%0Adef set_mode_prefixes():%0A cov_34_mode = ''%0A current_branch = get_current_branch()%0A if not stable_branch_re.search(current_branch):%0A prefix = %22* On branch %7B%7D which is likely not production-bound, so executing low-threshold checks...%22.format(current_branch)%0A cov_34_mode = %22_light%22%0A else:%0A prefix = %22* Push to production branch detected, executing full-scale checks...%22%0A print(prefix + %22%5Cn* Running pre-flight checks, please hold...%22)%0A return cov_34_mode%0A%0Adef run_checks(GIT_ROOT):%0A test_dir = glob.glob(GIT_ROOT + %22/test*%22)%0A assert test_dir and os.path.isdir(test_dir%5B0%5D), %5C%0A %22Package's test directory not found%22%0A COV_SUMMARY = os.path.join(GIT_ROOT, %22.cov_temp/coverage-summary%22)%0A LINT_OUT = os.path.join(GIT_ROOT, %22.lint_out%22)%0A os.chdir(GIT_ROOT)%0A os.system(%22bash %7B%7D/run_checks_in_docker.sh%22.format(GIT_ROOT))%0A out = %22%22%0A with open(COV_SUMMARY) as cov_summary, open(LINT_OUT) as lint:%0A # on account of insignificantly low no of lines of .lint_out,%0A # let's just read the whole thing at once%0A if os.path.getsize(LINT_OUT):%0A # file not empty, errors/warnings present, interrupt and print%0A out += %22%5Cn* Lint returned some errors/warnings: %5Cn%7B%7D%5Cn%5Cn%22.format(lint.read().strip())%0A total_covs = cov_summary.read()%0A if total_covs:%0A out += %22* Istanbul complained about too low coverage rates: %5Cn%7B%7D%22.format(total_covs)%0A%0A if out:%0A print(out)%0A sys.exit(1)%0A print(%22* All checked out good%22)%0A%0Aif __name__ == %22__main__%22:%0A cov_34_mode = set_mode_prefixes()%0A HOOKS_PATH = os.path.dirname(os.path.abspath(__file__))%0A GIT_ROOT = os.path.join(HOOKS_PATH, %22../..%22)%0A COV_34_CONFIG = os.path.join(GIT_ROOT, %22.istanbul%22, %22.istanbul-config.yml%22 + cov_34_mode)%0A os.system(%22cp %7B%7D ../.istanbul-config.yml%22.format(COV_34_CONFIG))%0A @atexit.register%0A def restore_default_34_config():%0A os.system(%22cp %7B%7D ../.istanbul-config.yml%22.format(%0A COV_34_CONFIG.replace(cov_34_mode, %22%22)))%0A run_checks(GIT_ROOT)%0A
|
|
a123b42eb3aed078aea26109056cf786aec2664a
|
add link_flair.py for interacting with link flair on submissions
|
bin/link_flair.py
|
bin/link_flair.py
|
Python
| 0
|
@@ -0,0 +1,819 @@
+import argparse%0Aimport praw%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser(description='Get or set link flair')%0A parser.add_argument('action', choices=%5B'get', 'set'%5D, help='get or set')%0A parser.add_argument('id', help='id of the submission')%0A parser.add_argument('--text', help='link flair text to set')%0A parser.add_argument('--class', help='link flair CSS class to set')%0A args = vars(parser.parse_args())%0A%0A reddit = praw.Reddit('moderation')%0A submission = reddit.submission(args%5B'id'%5D)%0A%0A if args%5B'action'%5D == 'get':%0A print('Flair text: %7B0%7D'.format(submission.link_flair_text))%0A print('Flair class: %7B0%7D'.format(submission.link_flair_css_class))%0A elif args%5B'action'%5D == 'set':%0A submission.mod.flair(args%5B'text'%5D, args%5B'class'%5D)%0A print('Link flair set')%0A%0Amain()%0A
|
|
f9d1c81055ee4479b2c69229cf72f27fb33cdc48
|
Remove unused imports. Add missing imports.
|
blues/application/tasks.py
|
blues/application/tasks.py
|
import os
from fabric.context_managers import settings
from fabric.decorators import task
from fabric.state import env
from fabric.utils import indent, abort
from blues.application.deploy import maybe_install_requirements
from refabric.utils import info
from refabric.contrib import blueprints
from .. import git
from .. import slack
blueprint = blueprints.get('blues.app')
__all__ = []
def get_providers(*args, **kw):
from .providers import get_providers as real
return real(*args, **kw)
@task
def setup():
"""
Install project user, structure, env, source, dependencies and providers
"""
from .deploy import install_project, install_virtualenv, \
install_requirements, install_providers
from .project import requirements_txt, use_virtualenv
install_project()
if use_virtualenv():
install_virtualenv()
install_requirements(requirements_txt())
install_providers()
configure_providers()
@task
def configure():
"""
Deploy and configure providers
"""
code_changed = deploy(auto_reload=False)
configure_providers(force_reload=code_changed)
@task
def deploy(auto_reload=True, force=False):
"""
Reset source to configured branch and install requirements, if needed
:param bool auto_reload: Reload application providers if source has changed
:param bool force: Force install of requirements
:return bool: Source code has changed?
"""
from .deploy import update_source, install_requirements
from .project import git_repository_path, requirements_txt, use_virtualenv
# Reset git repo
previous_commit, current_commit = update_source()
code_changed = current_commit is not None and \
previous_commit != current_commit
if code_changed or force:
# Install python dependencies
if use_virtualenv():
maybe_install_requirements(previous_commit, current_commit, force)
# Reload providers
if auto_reload:
reload()
return code_changed
@task
def install_requirements():
from .deploy import install_requirements
if use_virtualenv():
install_requirements()
else:
abort('Cannot install requirements without virtualenv')
@task
def deployed():
"""
Show deployed and last origin commit
"""
from .project import sudo_project, git_repository_path
with sudo_project():
repository_path = git_repository_path()
git.fetch(repository_path)
head_commit, head_message = git.log(repository_path)[0]
origin_commit, origin_message = git.log(repository_path,
commit='origin')[0]
info('Deployed commit: {} - {}', head_commit[:7], head_message)
if head_commit == origin_commit:
info(indent('(up-to-date with origin)'))
else:
info('Pending release: {} - {}', origin_commit[:7], origin_message)
return head_commit, origin_commit
@task
def start():
"""
Start all application providers on current host
"""
providers = get_providers(env.host_string)
for provider in set(providers.values()):
provider.start()
@task
def stop():
"""
Stop all application providers on current host
"""
providers = get_providers(env.host_string)
for provider in set(providers.values()):
provider.stop()
@task
def reload():
"""
Reload all application providers on current host
"""
providers = get_providers(env.host_string)
for provider in set(providers.values()):
provider.reload()
@task
def configure_providers(force_reload=False):
"""
Render, upload and reload web & worker config
:param bool force_reload: Force reload of providers, even if not updated
:return dict: Application providers for current host
"""
from .project import sudo_project
with sudo_project():
providers = get_providers(env.host_string)
if 'web' in providers:
providers['web'].configure_web()
if 'worker' in providers:
providers['worker'].configure_worker()
for provider in set(providers.values()):
if provider.updates or force_reload:
provider.reload()
return providers
@task
def generate_nginx_conf(role='www'):
"""
Genereate nginx site config for web daemon
:param str role: Name of role (directory) to generate config to
"""
name = blueprint.get('project')
socket = blueprint.get('web.socket', default='0.0.0.0:3030')
host, _, port = socket.partition(':')
if port:
if len(env.hosts) > 1:
# Multiple hosts -> Bind upstream to each host:port
sockets = ['{}:{}'.format(host, port) for host in env.hosts]
else:
# Single host -> Bind upstream to unique configured socket
sockets = [socket]
else:
sockets = ['unix:{}'.format(socket)]
context = {
'name': name,
'sockets': sockets,
'domain': blueprint.get('web.domain', default='_'),
'ssl': blueprint.get('web.ssl', False),
'ip_hash': blueprint.get('web.ip_hash', False)
}
template = blueprint.get('web.nginx_conf')
if template is None:
template = 'nginx/site.conf'
else:
template = 'nginx/{}.conf'.format(template)
web_provider = blueprint.get('web.provider')
if web_provider and web_provider == 'uwsgi':
template = 'nginx/uwsgi_site.conf'
with settings(template_dirs=['templates']):
conf = blueprint.render_template(template, context)
conf_dir = os.path.join(
os.path.dirname(env['real_fabfile']),
'templates',
role,
'nginx',
'sites-available')
conf_path = os.path.join(conf_dir, '{}.conf'.format(name))
if not os.path.exists(conf_dir):
os.makedirs(conf_dir)
with open(conf_path, 'w+') as f:
f.write(conf)
def notify_deploy(role=None):
from .project import project_name, git_repository_path
variables = {
'deployer': git.get_local_commiter(),
'project': project_name(),
'state': env.get('state', 'unknown'),
'role': role,
'commit': git.get_commit(repository_path=git_repository_path(), short=True),
'user': env['user'],
'host': env['host_string'],
}
if role:
msg = ('`{deployer}` deployed `{project}::{state}:{role}` '
'at `{commit}` to `{user}@{host}`').format(**variables)
else:
msg = ('`{deployer}` deployed `{project}::{state}` '
'at `{commit}` to `{user}@{host}`').format(**variables)
slack.notify(msg)
|
Python
| 0
|
@@ -1490,30 +1490,8 @@
urce
-, install_requirements
%0A
@@ -1515,47 +1515,8 @@
port
- git_repository_path, requirements_txt,
use
@@ -2063,16 +2063,56 @@
irements
+%0A from .project import use_virtualenv
%0A%0A if
|
495e9680ae7c1b9c1071c9f840df7881f5d4934b
|
add a Spider to KFC#15
|
locations/spiders/kfc.py
|
locations/spiders/kfc.py
|
Python
| 0.000004
|
@@ -0,0 +1,1629 @@
+import json%0Aimport re%0Aimport scrapy%0Afrom locations.items import GeojsonPointItem%0A%0Aclass KFCSpider(scrapy.Spider):%0A name = %22kfc%22%0A allowed_domains = %5B%22www.kfc.com%22%5D%0A%0A def start_requests(self):%0A url = 'https://services.kfc.com/services/query/locations'%0A%0A headers = %7B%0A 'Accept-Language': 'en-US,en;q=0.9',%0A 'Origin': 'https://www.kfc.com',%0A 'Accept-Encoding': 'gzip, deflate, br',%0A 'Accept': 'application/json, text/javascript, */*; q=0.01',%0A 'Referer': 'https://www.kfc.com/store-locator?query=90210',%0A 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',%0A 'X-Requested-With': 'XMLHttpRequest',%0A %7D%0A form_data = %7B%0A 'address': '90210',%0A 'distance': '100'%0A %7D%0A%0A yield scrapy.http.FormRequest(%0A url=url, method='POST', formdata=form_data,%0A headers=headers, callback=self.parse%0A )%0A%0A def parse(self, response):%0A data = json.loads(response.body_as_unicode())%0A stores = data%5B'results'%5D%0A print(stores)%0A for store in stores:%0A properties = %7B%0A 'ref': store%5B'entityID'%5D,%0A 'name': store%5B'storeNumber'%5D,%0A 'addr_full': store%5B'addressLine'%5D,%0A 'city': store%5B'city'%5D,%0A 'state': store%5B'state'%5D,%0A 'postcode': store%5B'postalCode'%5D,%0A 'lat': store%5B'latitude'%5D,%0A 'lon': store%5B'longitude'%5D,%0A 'phone': store%5B'businessPhone'%5D%0A %7D%0A%0A yield GeojsonPointItem(**properties)%0A%0A
|
|
a67a4e15ce25e9e9a795534b4e629d6680fb491b
|
Implement player choosing a random pawn to move
|
ludo/playermoverandom.py
|
ludo/playermoverandom.py
|
Python
| 0.000002
|
@@ -0,0 +1,368 @@
+# Player%0A%0Afrom playerbase import PlayerBase, Players%0Afrom random import randint%0A%0A%0Aclass PlayerMoveRandom(PlayerBase):%0A%0A def get_desc(self):%0A %22%22%22%22Return description string%22%22%22%22%22%0A return %22Chooses a random pawn to move%22%0A%0A def _choose_move_impl(self, moves):%0A if not moves:%0A return None%0A%0A return moves%5Brandint(0, len(moves)-1)%5D%0A
|
|
c7e7430d76337ef5cfd6779d9a32c2c9d948eb86
|
Add guess phred encoding script
|
carbon/guess-encoding.py
|
carbon/guess-encoding.py
|
Python
| 0.000005
|
@@ -0,0 +1,2046 @@
+%22%22%22%0A awk 'NR %25 4 == 0' your.fastq %7C python %25prog %5Boptions%5D%0A%0Aguess the encoding of a stream of qual lines.%0A%22%22%22%0Aimport sys%0Aimport optparse%0A%0ARANGES = %7B%0A 'Sanger': (33, 93),%0A 'Solexa': (59, 104),%0A 'Illumina-1.3': (64, 104),%0A 'Illumina-1.5': (67, 104)%0A%7D%0A%0A%0Adef get_qual_range(qual_str):%0A %22%22%22%0A %3E%3E%3E get_qual_range(%22DLXYXXRXWYYTPMLUUQWTXTRSXSWMDMTRNDNSMJFJFFRMV%22)%0A (68, 89)%0A %22%22%22%0A%0A vals = %5Bord(c) for c in qual_str%5D%0A return min(vals), max(vals)%0A%0Adef get_encodings_in_range(rmin, rmax, ranges=RANGES):%0A valid_encodings = %5B%5D%0A for encoding, (emin, emax) in ranges.items():%0A if rmin %3E= emin and rmax %3C= emax:%0A valid_encodings.append(encoding)%0A return valid_encodings%0A%0Adef main():%0A p = optparse.OptionParser(__doc__)%0A p.add_option(%22-n%22, dest=%22n%22, help=%22number of qual lines to test default:-1%22%0A %22 means test until end of file or until it it possible to %22%0A %22 determine a single file-type%22,%0A type='int', default=-1)%0A%0A opts, args = p.parse_args()%0A print %3E%3Esys.stderr, %22# reading qualities from stdin%22%0A gmin, gmax = 99, 0%0A valid = %5B%5D%0A for i, line in enumerate(sys.stdin):%0A lmin, lmax = get_qual_range(line.rstrip())%0A if lmin %3C gmin or lmax %3E gmax:%0A gmin, gmax = min(lmin, gmin), max(lmax, gmax)%0A valid = get_encodings_in_range(gmin, gmax)%0A if len(valid) == 0:%0A print %3E%3Esys.stderr, %22no encodings for range: %25s%22 %25 str((gmin, gmax))%0A sys.exit()%0A if len(valid) == 1 and opts.n == -1:%0A print %22%5Ct%22.join(valid) + %22%5Ct%22 + str((gmin, gmax))%0A sys.exit()%0A%0A if opts.n %3E 0 and i %3E opts.n:%0A print %22%5Ct%22.join(valid) + %22%5Ct%22 + str((gmin, gmax))%0A sys.exit()%0A%0A print %22%5Ct%22.join(valid) + %22%5Ct%22 + str((gmin, gmax))%0A%0A%0Aif __name__ == %22__main__%22:%0A import doctest%0A if doctest.testmod(optionflags=doctest.ELLIPSIS %7C%5C%0A doctest.NORMALIZE_WHITESPACE).failed == 0:%0A main()%0A
|
|
06092ce552c78de4efdc5845d94146fd5cf6fd38
|
add plot tool
|
plot_csv.py
|
plot_csv.py
|
Python
| 0.000001
|
@@ -0,0 +1,1429 @@
+import pandas as pd%0Aimport numpy as np%0Aimport plotly.plotly as py%0Aimport plotly.graph_objs as go%0Aimport argparse%0A%0Aclean_text = lambda s: %22%22.join(%5Bc for c in s if c.isalpha() or c.isdigit() or c==' '%5D).rstrip()%0A%0Adef make_hbar_plot(options_table, symbol, parameter):%0A data = %5B%0A go.Bar(%0A name=otype,%0A x=options_table%5B'%7B%7D_%7B%7D'.format(otype, parameter)%5D,%0A y=options_table%5B'Strike'%5D,%0A orientation='h',%0A marker=%7B%0A %22color%22: color,%0A %7D,%0A )%0A for otype, color in %5B(%22call%22, %22green%22), (%22put%22, %22red%22)%5D%0A %5D%0A%0A layout = go.Layout(%0A title=%22%7B%7D - %7B%7D%22.format(symbol, parameter),%0A barmode='stack'%0A )%0A fig = go.Figure(data=data, layout=layout)%0A return py.plot(fig, filename=clean_text(%22%7B%7D_%7B%7D%22.format(symbol, parameter)))%0A%0Aif __name__ == %22__main__%22:%0A typical_params = %5B%22Ask%22, %22Bid%22, %22Change%22, %22Last%22, %22Open Int.%22, %22Symbol%22, %22Vol%22%5D%0A parser = argparse.ArgumentParser(description=%22Plots a parameter from an options CSV%22)%0A parser.add_argument(%22--csv%22, default=%22spx.csv%22, help=%22CSV file to pull parameter from%22)%0A parser.add_argument(%22--param%22, default=%22Open Int.%22,%0A help=%22Parameter to pull and plot. Typical params are %7B%7D %22.format(typical_params))%0A%0A args = parser.parse_args()%0A options_table = pd.DataFrame.from_csv(args.csv)%0A url = make_hbar_plot(options_table, args.csv.split(%22.%22)%5B0%5D, args.param)%0A
|
|
dbe71d02a95e65b644a1ac811712a31059975457
|
test update
|
tests/api/v1/test_jobs_update.py
|
tests/api/v1/test_jobs_update.py
|
Python
| 0.000002
|
@@ -0,0 +1,1750 @@
+# -*- coding: utf-8 -*-%0A#%0A# Copyright (C) 2015-2016 Red Hat, Inc%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0A%0Adef test_update_jobs(admin, remoteci_context, job_user_id, topic_user_id):%0A # test update schedule latest components%0A data = %7B%0A 'name': 'pname',%0A 'type': 'type_1',%0A 'url': 'http://example.com/',%0A 'topic_id': topic_user_id,%0A 'export_control': True,%0A 'state': 'active'%7D%0A c1 = admin.post('/api/v1/components', data=data).data%5B'component'%5D%5B'id'%5D%0A data.update(%7B'type': 'type_2', 'name': 'pname1'%7D)%0A c2 = admin.post('/api/v1/components', data=data).data%5B'component'%5D%5B'id'%5D%0A data.update(%7B'type': 'type_3', 'name': 'pname2'%7D)%0A c3 = admin.post('/api/v1/components', data=data).data%5B'component'%5D%5B'id'%5D%0A latest_components = %7Bc1, c2, c3%7D%0A%0A r = remoteci_context.post('/api/v1/jobs/%25s/update' %25 job_user_id)%0A assert r.status_code == 201%0A update_job = r.data%5B'job'%5D%0A%0A assert update_job%5B'update_previous_job_id'%5D == job_user_id%0A assert update_job%5B'topic_id'%5D == topic_user_id%0A%0A update_cmpts = admin.get('/api/v1/jobs/%25s/components' %25 update_job%5B'id'%5D)%0A update_cmpts = %7Bcmpt%5B'id'%5D for cmpt in update_cmpts.data%5B'components'%5D%7D%0A assert latest_components == update_cmpts%0A
|
|
fdada5e48a13ef5b1c55710a584d281d36a32375
|
Add stub for testing `generic_decorators`.
|
tests/test_generic_decorators.py
|
tests/test_generic_decorators.py
|
Python
| 0
|
@@ -0,0 +1,99 @@
+__author__ = %22John Kirkham %3Ckirkhamj@janelia.hhmi.org%3E%22%0A__date__ = %22$Mar 25, 2015 13:30:52 EDT$%22%0A%0A%0A
|
|
61e0c6e325a91564250a937c0b1769992f65a7f5
|
Add initial unit tests for swarm module
|
tests/unit/modules/test_swarm.py
|
tests/unit/modules/test_swarm.py
|
Python
| 0
|
@@ -0,0 +1,1481 @@
+# -*- coding: utf-8 -*-%0A%0A# Import Python libs%0Afrom __future__ import absolute_import, print_function, unicode_literals%0A%0A# Import Salt Libs%0Aimport salt.modules.swarm%0A%0A# Import Salt Testing Libs%0Afrom tests.support.mixins import LoaderModuleMockMixin%0Afrom tests.support.mock import patch%0Afrom tests.support.unit import TestCase%0A%0A%0Aclass SwarmTestCase(TestCase, LoaderModuleMockMixin):%0A %22%22%22%0A Test cases for salt.modules.swarm%0A %22%22%22%0A%0A def setup_loader_modules(self):%0A return %7Bsalt.modules.swarm: %7B%7D%7D%0A%0A def test___virtual___valid_docker_module(self):%0A %22%22%22%0A Test that __virtual__ requires a proper loaded docker library%0A %22%22%22%0A%0A class ValidDockerModule(object):%0A class APIClient:%0A pass%0A%0A def from_env(self):%0A pass%0A%0A with patch(%0A %22salt.modules.swarm.HAS_DOCKER%22,%0A salt.modules.swarm._is_docker_module(ValidDockerModule()),%0A ):%0A self.assertEqual(%0A salt.modules.swarm.__virtual__(), salt.modules.swarm.__virtualname__%0A )%0A%0A def test___virtual___not_valid_docker_module(self):%0A class NotValidDockerModule(object):%0A pass%0A%0A with patch(%0A %22salt.modules.swarm.HAS_DOCKER%22,%0A salt.modules.swarm._is_docker_module(NotValidDockerModule()),%0A ):%0A ret = salt.modules.swarm.__virtual__()%0A self.assertEqual(len(ret), 2)%0A self.assertFalse(ret%5B0%5D)%0A
|
|
a88959202e66d47f032797c2c5790461fe458392
|
add tests boilerplates
|
api/v1/tests/test_api_tokens.py
|
api/v1/tests/test_api_tokens.py
|
Python
| 0.000001
|
@@ -0,0 +1,429 @@
+import unittest%0Aimport json%0A%0Aclass TestAuthentication(unittest.TestCase):%0A def setUp(self):%0A pass%0A%0A def tearDown(self):%0A pass%0A%0A def test_validates_user_inputs(self):%0A self.assertTrue(True)%0A%0A def test_user_can_register(self):%0A self.assertTrue(True)%0A%0A def test_no_ducplicated_usernames(self):%0A self.assertTrue(True)%0A%0A def test_user_can_login(self):%0A self.assertTrue(True)
|
|
16850052ced6975ab99c73c2c15497a3f91ccab9
|
Add reader back in again.. will use for blender
|
edm/reader.py
|
edm/reader.py
|
Python
| 0
|
@@ -0,0 +1,2222 @@
+#!/usr/bin/env python3%0A%0Aimport struct%0Afrom collections import namedtuple%0A%0Afrom .typereader import get_type_reader%0A%0Aimport logging%0Alogger = logging.getLogger(__name__)%0A%0Aclass Reader(object):%0A def __init__(self, filename):%0A self.filename = filename%0A self.stream = open(filename, %22rb%22)%0A%0A def tell(self):%0A return self.stream.tell()%0A%0A def seek(self, offset, from_what=0):%0A self.stream.seek(offset, from_what)%0A%0A def read_constant(self, data):%0A filedata = self.stream.read(len(data))%0A assert data == filedata, %22Fixed byte data mismatch%22%0A%0A def read(self, length):%0A return self.stream.read(length)%0A %0A def read_uchar(self):%0A return struct.unpack(%22B%22, self.stream.read(1))%5B0%5D%0A%0A def read_ushort(self):%0A return struct.unpack(%22%3CH%22, self.stream.read(2))%5B0%5D%0A%0A def read_uint(self):%0A %22%22%22Read an unsigned integer from the data%22%22%22%0A return struct.unpack(%22%3CI%22, self.stream.read(4))%5B0%5D%0A%0A def read_float(self):%0A return struct.unpack(%22%3Cf%22, self.stream.read(4))%5B0%5D%0A%0A def read_format(self, format):%0A %22%22%22Read a struct format from the data%22%22%22%0A return struct.unpack(format, self.stream.read(struct.calcsize(format)))%0A%0A def read_string(self):%0A %22%22%22Read a length-prefixed string from the file%22%22%22%0A prepos = self.stream.tell()%0A length = self.read_uint()%0A try:%0A return self.stream.read(length).decode(%22UTF-8%22)%0A except UnicodeDecodeError:%0A raise RuntimeError(%22Could not decode string with length %7B%7D at position %7B%7D%22.format(length, prepos))%0A%0A def read_list(self, reader):%0A %22%22%22Reads a length-prefixed list of something%22%22%22%0A length = self.read_uint()%0A entries = %5B%5D%0A for index in range(length):%0A entries.append(reader(self))%0A return entries%0A%0A%0A def read_single_type(self, source=None):%0A %22%22%22Reads a single instance of a named type%22%22%22%0A assert source is self or source is None%0A typeName = self.read_string()%0A reader = get_type_reader(typeName)%0A return reader(self)%0A%0A%0A # def read_typed_list(self):%0A # return read_list()%0A # length = self.read_uint()%0A # entries = %5B%5D%0A # logger.debug(%22Reading typed list of length %7B%7D%22.format(length))%0A # for index in range(length):%0A # entries.append(self.read_single_type())%0A # return entries%0A%0A #
|
|
8022d7361affddde110a289bc683201ea70af5fe
|
add weight conversion script
|
examples/yolo/darknet2npz.py
|
examples/yolo/darknet2npz.py
|
Python
| 0.000001
|
@@ -0,0 +1,1500 @@
+import argparse%0Aimport numpy as np%0A%0Aimport chainer%0Afrom chainer import serializers%0A%0Afrom chainercv.links import Conv2DBNActiv%0Afrom chainercv.links import YOLOv3%0A%0A%0Adef load(file, link):%0A if isinstance(link, Conv2DBNActiv):%0A for param in (%0A link.bn.beta.array,%0A link.bn.gamma.array,%0A link.bn.avg_mean,%0A link.bn.avg_var,%0A link.conv.W.array):%0A param%5B:%5D = np.fromfile(file, dtype=np.float32, count=param.size) %5C%0A .reshape(param.shape)%0A elif isinstance(link, chainer.ChainList):%0A for l in link:%0A load(file, l)%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('--n_fg_class', type=int, default=80)%0A parser.add_argument('darknetmodel')%0A parser.add_argument('output')%0A args = parser.parse_args()%0A%0A model = YOLOv3(args.n_fg_class)%0A with chainer.using_config('train', False):%0A model(np.empty((1, 3, model.insize, model.insize), dtype=np.float32))%0A%0A with open(args.darknetmodel, mode='rb') as f:%0A major = np.fromfile(f, dtype=np.int32, count=1)%0A minor = np.fromfile(f, dtype=np.int32, count=1)%0A np.fromfile(f, dtype=np.int32, count=1) # revision%0A assert(major * 10 + minor %3E= 2 and major %3C 1000 and minor %3C 1000)%0A np.fromfile(f, dtype=np.int64, count=1) # seen%0A%0A load(f, model.extractor)%0A%0A serializers.save_npz(args.output, model)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
7bd4ecf4f0f16ed58f253ca16045c3dd86f0a28c
|
Test script.
|
runtests.py
|
runtests.py
|
Python
| 0
|
@@ -0,0 +1,1022 @@
+# -*- coding: utf-8 -*-%0A%0Aimport os%0A%0Afrom django.conf import settings%0A%0A%0Adef make_absolute_path(path):%0A return os.path.join(os.path.realpath(os.path.dirname(__file__)), path)%0A%0A%0Aif not settings.configured:%0A settings.configure(%0A DATABASES = %7B%0A 'default': %7B%0A 'ENGINE': 'django.db.backends.sqlite3',%0A 'NAME': ':memory:',%0A %7D%0A %7D,%0A SITE_ID = 1,%0A INSTALLED_APPS = (%0A 'django.contrib.auth',%0A 'django.contrib.contenttypes',%0A 'django.contrib.sessions',%0A 'django.contrib.sites',%0A 'django.contrib.messages',%0A 'django_nose',%0A 'briefcase',%0A ),%0A TEMPLATE_DIRS = (%0A make_absolute_path('example_project/templates'),%0A ),%0A ROOT_URLCONF = 'example_project.urls',%0A TEST_RUNNER = 'django_nose.NoseTestSuiteRunner',%0A NOSE_ARGS = %5B'--stop'%5D,%0A )%0A%0A%0Afrom django.core.management import call_command%0A%0Acall_command('test', 'briefcase')%0A
|
|
a4d5e88973a25464be26488d17ecc663cce776d7
|
Add map example with data generators
|
altair/examples/world_map.py
|
altair/examples/world_map.py
|
Python
| 0
|
@@ -0,0 +1,722 @@
+%22%22%22%0AWorld Map%0A---------%0A%0AThis example shows how to create a world map using data generators for%0Adifferent background layers.%0A%22%22%22%0A# category: maps%0A%0Aimport altair as alt%0Afrom vega_datasets import data%0A%0A# Data generators for the background%0Asphere = alt.sphere()%0Agraticule = alt.graticule()%0A%0A# Source of land data%0Asource = alt.topo_feature(data.world_110m.url, 'countries')%0A%0A# Layering and configuring the components %0Aalt.layer(%0A alt.Chart(sphere).mark_geoshape(fill='lightblue'), %0A alt.Chart(graticule).mark_geoshape(stroke='white', strokeWidth=0.5), %0A alt.Chart(source).mark_geoshape(fill='ForestGreen', stroke='black')%0A).project(%0A 'naturalEarth1'%0A).properties(width=600, height=400).configure_view(stroke=None)%0A
|
|
8cbe2878f5fdca899ec71bc08e7d2de4a3c3caf2
|
add python solution to "project euler - problem3"
|
problem3.py
|
problem3.py
|
Python
| 0
|
@@ -0,0 +1,178 @@
+number = 600851475143%0A%0Afor divisor in xrange(2,number):%0A%09if (number %25 divisor == 0):%0A%09%09print divisor, %22 is a divisor%22%0A%09%09number = number / divisor%0A%09%09print %22new number is%22, number%0A
|
|
f1976ef533d98ac6e423312435bb25692831bfd9
|
Create bumper.py
|
cmp3103m-code-fragments/scripts/bumper.py
|
cmp3103m-code-fragments/scripts/bumper.py
|
Python
| 0.000004
|
@@ -0,0 +1,860 @@
+import rospy%0Afrom geometry_msgs.msg import Twist%0Afrom kobuki_msgs.msg import BumperEvent%0A%0A%0Aclass Chatter:%0A%0A def __init__(self):%0A rospy.init_node('chatter')%0A self.publisher = rospy.Publisher('/mobile_base/commands/velocity', Twist, queue_size=1)%0A self.scan_sub = rospy.Subscriber('/mobile_base/events/bumper', BumperEvent, self.bumper_cb)%0A self.obstacles = False%0A%0A def bumper_cb(self, laser_msg):%0A if laser_msg.bumper != 1:%0A return%0A if laser_msg.state == 1:%0A self.obstacles = True%0A else: %0A self.obstacles = False%0A%0A def run(self):%0A while not rospy.is_shutdown():%0A t = Twist()%0A if self.obstacles:%0A t.angular.z = 1.0%0A else:%0A t.linear.x = 0.4%0A self.publisher.publish(t)%0A%0Ac = Chatter()%0Ac.run()%0A
|
|
d571af56293912042846047c88e4a7b2c2f40df9
|
add archive command
|
alexBot/cogs/memework.py
|
alexBot/cogs/memework.py
|
Python
| 0
|
@@ -0,0 +1,2045 @@
+# -*- coding: utf-8 -*-%0Afrom ..tools import Cog%0Afrom discord.ext import commands%0Aimport discord%0Afrom datetime import datetime%0A%0A%0Aclass Memework(Cog):%0A def __init__(self, *args, **kwargs):%0A super().__init__(*args, **kwargs)%0A%0A self.archive_cat = self.bot.get_channel(355886867285147648)%0A self.rowboat_log = self.bot.get_channel(303658324652589057)%0A self.dog_log = self.bot.get_channel(336159533124812800)%0A%0A @commands.command()%0A @commands.bot_has_role(%22Rowboat%22)%0A @commands.has_permissions(manage_channels=True)%0A async def archive(self, ctx: commands.Context, channel: discord.TextChannel=None):%0A if channel is None:%0A channel = ctx.channel%0A try:%0A assert ctx.guild.id == 295341979800436736%0A except AssertionError:%0A await ctx.send(%22this only works in the memework guild.%22%0A %22pls tell Alex from Alaska to unload this.%22)%0A try:%0A assert isinstance(channel, discord.TextChannel)%0A except AssertionError:%0A await ctx.send(%22you idiot i don't know what that is%22)%0A return%0A%0A await channel.edit(category=self.archive_cat,%0A sync_permissions=True,%0A name=f%22archived-%7Bchannel.name%7D%22,%0A reason=f%22archived by %7Bctx.author.name%7D%22)%0A await channel.send(f%22this channel was archived by %7Bctx.author%7D at %7Bdatetime.utcnow().strftime('%25H:%25M')%7D UTC.%22)%0A await ctx.send(f%22archived %7Bchannel.mention%7D%22)%0A%0A await self.dog_log.send(f%22%60%5B%7Bdatetime.utcnow().strftime('%25H:%25m')%7D%5D%60%22%0A f%22%5CU0001f6e0 %7Bctx.author%7D (%60%7Bctx.author.id%7D%60) Archived %22%0A f%22%7Bchannel%7D (%60%7Bchannel.id%7D%60)%22)%0A%0A await self.rowboat_log.send(f%22%60%5B%7Bdatetime.utcnow().strftime('%25H:%25m:%25S')%7D%5D%60%22%0A f%22%5CU0001f6e0 %7Bctx.author%7D (%60%7Bctx.author.id%7D%60) Archived %22%0A f%22**%7Bchannel%7D**%22)%0A%0A%0Adef setup(bot):%0A bot.add_cog(Memework(bot))%0A
|
|
d3166947023283ae6aed9737703c852552cf17f8
|
Update app/extensions/allows/allows.py
|
app/extensions/allows/allows.py
|
app/extensions/allows/allows.py
|
Python
| 0
|
@@ -0,0 +1,2007 @@
+from flask import current_app%0Afrom flask import request%0Afrom functools import wraps%0Afrom werkzeug import LocalProxy%0Afrom werkzeug.exceptions import Forbidden%0A%0A%0Aclass Allows(object):%0A%0A def __init__(self, app=None, identity_loader=None,%0A throws=Forbidden, on_fail=None):%0A self._identity_loader = identity_loader%0A self.throws = throws%0A%0A self.on_fail = _make_callable(on_fail)%0A%0A if app:%0A self.init_app(app)%0A%0A def init_app(self, app):%0A if not hasattr(app, 'extensions'):%0A app.extensions = %7B%7D%0A app.extensions%5B'allows'%5D = self%0A%0A def requires(self, *requirements, **opts):%0A%0A def raiser():%0A raise opts.get('throws', self.throws)%0A%0A def fail(*args, **kwargs):%0A f = _make_callable(opts.get('on_fail', self.on_fail))%0A res = f(*args, **kwargs)%0A%0A if res is not None:%0A return res%0A%0A raiser()%0A%0A def decorator(f):%0A @wraps(f)%0A def allower(*args, **kwargs):%0A if self.fulfill(requirements):%0A return f(*args, **kwargs)%0A else:%0A return fail(*args, **kwargs)%0A return allower%0A return decorator%0A%0A def identity_loader(self, f):%0A %22Provides an identity loader for the instance%22%0A self._identity_loader = f%0A return f%0A%0A def fulfill(self, requirements, identity=None):%0A %22Runs each requirement until one is not fulfilled%22%0A identity = identity or self._identity_loader()%0A return all(r(identity, request) for r in requirements)%0A%0A%0Adef __get_allows():%0A %22Internal helper%22%0A try:%0A return current_app.extensions%5B'allows'%5D%0A except (AttributeError, KeyError):%0A raise RuntimeError(%22Not configured.%22)%0A%0A%0Adef _make_callable(func_or_value):%0A if not callable(func_or_value):%0A return lambda *a, **k: func_or_value%0A return func_or_value%0A%0A%0A_allows = LocalProxy(__get_allows, name=%22allows%22)%0A
|
|
37dc854c8af69c679f91163355b2a4314d66820b
|
Add a marker interface
|
usingnamespace/api/interfaces.py
|
usingnamespace/api/interfaces.py
|
Python
| 0
|
@@ -0,0 +1,95 @@
+from zope.interface import Interface%0A%0Aclass ISerializer(Interface):%0A %22%22%22Marker Interface%22%22%22%0A
|
|
b4333af5737b1376452eb0490f4175a1554ba212
|
Fix #116
|
configure-aspen.py
|
configure-aspen.py
|
import os
import gittip
import gittip.wireup
import gittip.authentication
import gittip.csrf
gittip.wireup.canonical()
gittip.wireup.db()
gittip.wireup.billing()
website.github_client_id = os.environ['GITHUB_CLIENT_ID'].decode('ASCII')
website.github_client_secret = os.environ['GITHUB_CLIENT_SECRET'].decode('ASCII')
website.github_callback = os.environ['GITHUB_CALLBACK'].decode('ASCII')
website.hooks.inbound_early.register(gittip.canonize)
website.hooks.inbound_early.register(gittip.csrf.inbound)
website.hooks.inbound_early.register(gittip.authentication.inbound)
website.hooks.outbound_late.register(gittip.authentication.outbound)
website.hooks.outbound_late.register(gittip.csrf.outbound)
def add_stuff(request):
request.context['__version__'] = gittip.__version__
request.context['username'] = None
website.hooks.inbound_early.register(add_stuff)
|
Python
| 0.000001
|
@@ -88,16 +88,51 @@
ip.csrf%0A
+from gittip.networks import github%0A
%0A%0Agittip
@@ -858,16 +858,55 @@
= None
+%0A request.context%5B'github'%5D = github
%0A%0Awebsit
|
f42d2264917f109b8cee1d641a475934a456aa61
|
Add a BaseFrontend
|
mopidy/frontends/base.py
|
mopidy/frontends/base.py
|
Python
| 0.000017
|
@@ -0,0 +1,736 @@
+class BaseFrontend(object):%0A %22%22%22%0A Base class for frontends.%0A%0A :param core_queue: queue for messaging the core%0A :type core_queue: :class:%60multiprocessing.Queue%60%0A :param backend: the backend%0A :type backend: :class:%60mopidy.backends.base.BaseBackend%60%0A %22%22%22%0A%0A def __init__(self, core_queue, backend):%0A self.core_queue = core_queue%0A self.backend = backend%0A%0A def start(self):%0A %22%22%22Start the frontend.%22%22%22%0A pass%0A%0A def destroy(self):%0A %22%22%22Destroy the frontend.%22%22%22%0A pass%0A%0A def process_message(self, message):%0A %22%22%22%0A Process messages for the frontend.%0A%0A :param message: the message%0A :type message: dict%0A %22%22%22%0A raise NotImplementedError%0A
|
|
6b834e6a3353b62079fa1f2bb32425b9479d12ae
|
Fix loading of local settings when local settings is not present
|
mopidy/utils/settings.py
|
mopidy/utils/settings.py
|
# Absolute import needed to import ~/.mopidy/settings.py and not ourselves
from __future__ import absolute_import
from copy import copy
import logging
import os
import sys
from mopidy import SettingsError
from mopidy.utils import indent
logger = logging.getLogger('mopidy.utils.settings')
class SettingsProxy(object):
def __init__(self, default_settings_module):
self.default_settings = self._get_settings_dict_from_module(
default_settings_module)
self.local_settings = self._get_local_settings()
self.raw_settings = copy(self.default_settings)
self.raw_settings.update(self.local_settings)
def _get_local_settings(self):
dotdir = os.path.expanduser(u'~/.mopidy/')
settings_file = os.path.join(dotdir, u'settings.py')
if os.path.isfile(settings_file):
sys.path.insert(0, dotdir)
import settings as local_settings_module
return self._get_settings_dict_from_module(local_settings_module)
def _get_settings_dict_from_module(self, module):
settings = filter(lambda (key, value): self._is_setting(key),
module.__dict__.iteritems())
return dict(settings)
def _is_setting(self, name):
return name.isupper()
def __getattr__(self, attr):
if not self._is_setting(attr):
return
if attr not in self.raw_settings:
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = self.raw_settings[attr]
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
def validate(self):
if self.get_errors():
logger.error(u'Settings validation errors: %s',
indent(self.get_errors_as_string()))
raise SettingsError(u'Settings validation failed.')
def get_errors(self):
return validate_settings(self.default_settings, self.local_settings)
def get_errors_as_string(self):
lines = []
for (setting, error) in self.get_errors().iteritems():
lines.append(u'%s: %s' % (setting, error))
return '\n'.join(lines)
def validate_settings(defaults, settings):
"""
Checks the settings for both errors like misspellings and against a set of
rules for renamed settings, etc.
Returns of setting names with associated errors.
:param defaults: Mopidy's default settings
:type defaults: dict
:param settings: the user's local settings
:type settings: dict
:rtype: dict
"""
errors = {}
changed = {
'SERVER_HOSTNAME': 'MPD_SERVER_HOSTNAME',
'SERVER_PORT': 'MPD_SERVER_PORT',
'SPOTIFY_LIB_APPKEY': None,
}
for setting, value in settings.iteritems():
if setting in changed:
if changed[setting] is None:
errors[setting] = u'Deprecated setting. It may be removed.'
else:
errors[setting] = u'Deprecated setting. Use %s.' % (
changed[setting],)
continue
if setting == 'BACKENDS':
if 'mopidy.backends.despotify.DespotifyBackend' in value:
errors[setting] = (u'Deprecated setting value. ' +
'"mopidy.backends.despotify.DespotifyBackend" is no ' +
'longer available.')
continue
if setting not in defaults:
errors[setting] = u'Unknown setting. Is it misspelled?'
continue
return errors
def list_settings_optparse_callback(*args):
"""
Prints a list of all settings.
Called by optparse when Mopidy is run with the :option:`--list-settings`
option.
"""
from mopidy import settings
errors = settings.get_errors()
lines = []
for (key, value) in sorted(settings.raw_settings.iteritems()):
default_value = settings.default_settings.get(key)
if key.endswith('PASSWORD'):
value = u'********'
lines.append(u'%s:' % key)
lines.append(u' Value: %s' % repr(value))
if value != default_value and default_value is not None:
lines.append(u' Default: %s' % repr(default_value))
if errors.get(key) is not None:
lines.append(u' Error: %s' % errors[key])
print u'Settings: %s' % indent('\n'.join(lines), places=2)
sys.exit(0)
|
Python
| 0
|
@@ -795,16 +795,20 @@
if
+not
os.path.
@@ -830,24 +830,42 @@
_file):%0A
+ return %7B%7D%0A
sys.
@@ -887,20 +887,16 @@
dotdir)%0A
-
|
5bba475443495bb61071ddf9f3ce54c98d1c41d5
|
allow runtests.py to run only certain tests
|
mpmath/tests/runtests.py
|
mpmath/tests/runtests.py
|
#!/usr/bin/env python
"""
python runtests.py -py
Use py.test to run tests (more useful for debugging)
python runtests.py -psyco
Enable psyco to make tests run about 50% faster
python runtests.py -profile
Generate profile stats (this is much slower)
python runtests.py -nogmpy
Run tests without using GMPY even if it exists
python runtests.py -strict
Enforce extra tests in normalize()
python runtests.py -local
Insert '../..' at the beginning of sys.path to use local mpmath
"""
import sys, os
if "-psyco" in sys.argv:
sys.argv.remove('-psyco')
import psyco
psyco.full()
profile = False
if "-profile" in sys.argv:
sys.argv.remove('-profile')
profile = True
if "-nogmpy" in sys.argv:
sys.argv.remove('-nogmpy')
os.environ['MPMATH_NOGMPY'] = 'Y'
if "-strict" in sys.argv:
sys.argv.remove('-strict')
os.environ['MPMATH_STRICT'] = 'Y'
if "-local" in sys.argv:
sys.argv.remove('-local')
sys.path.insert(0, '../..')
def testit():
if "-py" in sys.argv:
sys.argv.remove('-py')
import py
py.test.cmdline.main()
else:
import glob
import os.path
from time import clock
modules = []
for f in glob.glob("test*.py"):
name = os.path.splitext(os.path.basename(f))[0]
module = __import__(name)
priority = module.__dict__.get('priority', 100)
if priority == 666:
modules = [[priority, name, module]]
break
modules.append([priority, name, module])
modules.sort()
tstart = clock()
for priority, name, module in modules:
print name
for f in sorted(module.__dict__.keys()):
if f.startswith('test_'):
print " ", f[5:].ljust(25),
t1 = clock()
module.__dict__[f]()
t2 = clock()
print "ok", " ", ("%.7f" % (t2-t1)), "s"
tend = clock()
print
print "finished tests in", ("%.2f" % (tend-tstart)), "seconds"
if profile:
import cProfile
cProfile.run("testit()", sort=2)
else:
testit()
|
Python
| 0.000004
|
@@ -504,24 +504,156 @@
l mpmath%0D%0A%0D%0A
+Additional arguments are used to filter the tests to run. Only files that have%0D%0Aone of the arguments in their name are executed.%0D%0A%0D%0A
%22%22%22%0D%0A%0D%0Aimpor
@@ -1387,16 +1387,45 @@
s = %5B%5D%0D%0A
+ args = sys.argv%5B1:%5D%0D%0A
@@ -1518,16 +1518,261 @@
f))%5B0%5D%0D%0A
+ if args:%0D%0A ok = False%0D%0A for arg in args:%0D%0A if arg in name:%0D%0A ok = True%0D%0A break%0D%0A if not ok:%0D%0A continue%0D%0A
|
2d6ecb3b5b67539c6ad0f211d7b059ac44df2731
|
Make gallery of examples for various bending angles up to 90 degrees
|
python/bending_examples.py
|
python/bending_examples.py
|
Python
| 0
|
@@ -0,0 +1,1242 @@
+# Make a gallery of images showing the RGZ consensus double sources, sorted by bending angle.%0A%0Afrom astropy.io import ascii%0A%0Apath = '/Users/willettk/Astronomy/Research/GalaxyZoo'%0Adata = ascii.read('%7B:%7D/rgz-analysis/csv/static_catalog3.csv'.format(path),delimiter=' ')%0A%0Aimport bending_angles as ba%0Aimport numpy as np%0A%0Apathdict = ba.make_pathdict()%0A%0Adef bending_examples():%0A for a in np.linspace(0,80,9):%0A bdata = data%5B(data%5B'bending_angle'%5D %3E= a) & (data%5B'bending_angle'%5D %3C a+10.)%5D%0A count,errcount = 0,0%0A if len(bdata) %3E 0:%0A for b in bdata:%0A zid = b%5B'zooniverse_id'%5D%0A try:%0A if b%5B'angle_type'%5D == 'multipeaked_singles':%0A angle_type = 'mps'%0A else:%0A angle_type = 'radio'%0A ba.plot_one_double(zid,pathdict,save_fig=True,anglepath='%7B0:.0f%7D_%7B1:.0f%7D/'.format(a,a+10),dbltype=angle_type)%0A count += 1%0A except ValueError as inst:%0A print %22ValueError,%22,inst.args,zid%0A errcount += 1%0A print '%7B:d%7D galaxies with bending angle, %7B:d%7D with errors for angles between %7B:.0f%7D and %7B:.0f%7D'.format(count,errcount,a,a+10)%0A
|
|
93ba6ff584d84255f46ef11fb44e9ae863cd7aed
|
add demo about reversing arguments
|
python/src/reverse_args.py
|
python/src/reverse_args.py
|
Python
| 0
|
@@ -0,0 +1,1478 @@
+# Copyright (c) 2014 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0A#%0A#!/usr/bin/env python%0A# -*- encoding: utf-8 -*-%0A%0A%0Aimport sys%0A%0A%0Aif __name__ == '__main__':%0A print ' '.join(reversed(sys.argv%5B1:%5D))%0A
|
|
058eeefdef485595e48dcd0250bcf9dc9664287a
|
Add Launchpad (fixes #17)
|
services/launchpad.py
|
services/launchpad.py
|
Python
| 0
|
@@ -0,0 +1,2988 @@
+from oauthlib.oauth1.rfc5849 import SIGNATURE_PLAINTEXT, SIGNATURE_TYPE_BODY, SIGNATURE_TYPE_AUTH_HEADER%0Aimport requests%0A%0Aimport foauth.providers%0A%0Aclass Launchpad(foauth.providers.OAuth1):%0A # General info about the provider%0A provider_url = 'https://launchpad.net/'%0A docs_url = 'https://launchpad.net/+apidoc/1.0.html'%0A category = 'Code'%0A%0A # URLs to interact with the API%0A request_token_url = 'https://launchpad.net/+request-token'%0A authorize_url = 'https://launchpad.net/+authorize-token'%0A access_token_url = 'https://launchpad.net/+access-token'%0A api_domains = %5B'api.launchpad.net', 'api.staging.launchpad.net'%5D%0A%0A signature_method = SIGNATURE_PLAINTEXT%0A returns_token = False%0A signature_type = SIGNATURE_TYPE_AUTH_HEADER%0A%0A available_permissions = %5B%0A (None, 'read non-privade data'),%0A ('WRITE_PUBLIC', 'change non-private data'),%0A ('READ_PRIVATE', 'read anything, including private data'),%0A ('WRITE_PRIVATE', 'change anything, including private data'),%0A %5D%0A permissions_widget = 'radio'%0A%0A def __init__(self, *args, **kwargs):%0A super(Launchpad, self).__init__(*args, **kwargs)%0A self.client_secret = '' # Must be empty to satisfy Launchpad%0A%0A def get_authorize_params(self, redirect_uri, scopes):%0A params = super(Launchpad, self).get_authorize_params(redirect_uri, scopes)%0A params%5B'allow_permission'%5D = scopes%5B0%5D or 'READ_PUBLIC'%0A return params%0A%0A def get_request_token_response(self, redirect_uri, scopes):%0A # Launchpad expects the signature in the body, but we don't have%0A # additional parameters, so oauthlib doesn't help us here.%0A return requests.post(self.get_request_token_url(),%0A data=%7B'oauth_consumer_key': self.client_id,%0A 'oauth_signature_method': 'PLAINTEXT',%0A 'oauth_signature': '&'%7D)%0A%0A def get_access_token_response(self, token, secret, verifier=None):%0A # Launchpad expects the signature in the body, but we don't have%0A # additional parameters, so oauthlib doesn't help us here.%0A req = requests.Request(url=self.authorize_url,%0A data=%7B'oauth_consumer_key': self.client_id,%0A 'oauth_token': token,%0A 'oauth_signature_method': 'PLAINTEXT',%0A 'oauth_signature': '&%25s' %25 secret%7D)%0A req = req.prepare()%0A return requests.post(self.get_access_token_url(),%0A data=%7B'oauth_consumer_key': self.client_id,%0A 'oauth_token': token,%0A 'oauth_signature_method': 'PLAINTEXT',%0A 'oauth_signature': '&%25s' %25 secret%7D)%0A%0A def get_user_id(self, key):%0A r = super(Launchpad, self).api(key, self.api_domains%5B0%5D, '/1.0/people/+me')%0A return r.json()%5Bu'name'%5D%0A
|
|
98232fea3f8847aad69ed6af0b37a5624c24488b
|
Add experiment script to run a dataset through all graph clustering method
|
experiment.py
|
experiment.py
|
Python
| 0.000001
|
@@ -0,0 +1,2004 @@
+import fnmatch%0Aimport os%0Afrom pygraphc.preprocess.PreprocessLog import PreprocessLog%0Afrom pygraphc.preprocess.CreateGraph import CreateGraph%0Afrom pygraphc.clustering.MajorClust import MajorClust, ImprovedMajorClust%0A%0A%0Adef get_dataset(dataset, dataset_path, file_extension):%0A # get all log files under dataset directory%0A matches = %5B%5D%0A for root, dirnames, filenames in os.walk(dataset_path):%0A for filename in fnmatch.filter(filenames, file_extension):%0A matches.append(os.path.join(root, filename))%0A%0A # get file identifier, log file, labeled log file, result per cluster, result per line, and anomaly report%0A files = %7B%7D%0A result_path = './result/'%0A for match in matches:%0A identifier = match.split(dataset)%0A index = dataset + identifier%5B1%5D%0A files%5Bindex%5D = %7B'log_path': match, 'labeled_path': str(match) + '.labeled',%0A 'result_percluster': result_path + index + '.percluster',%0A 'result_perline': result_path + index + '.perline',%0A 'anomaly_report': result_path + index + '.anomaly'%7D%0A%0A return files%0A%0A%0Adef main(dataset):%0A # get dataset files%0A files = %7B%7D%0A if dataset == 'Hofstede2014':%0A files = get_dataset(dataset, '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014', '*.anon')%0A elif dataset == 'SecRepo':%0A files = get_dataset(dataset, '/home/hudan/Git/labeled-authlog/dataset/SecRepo', '*.log')%0A%0A # main process%0A for file_identifier, properties in files.iteritems():%0A # preprocess log file%0A preprocess = PreprocessLog(properties%5B'log_path'%5D)%0A preprocess.do_preprocess()%0A events_unique = preprocess.events_unique%0A%0A # create graph%0A g = CreateGraph(events_unique)%0A g.do_create()%0A graph = g.g%0A%0A # run MajorClust method%0A mc = MajorClust(graph)%0A clusters = mc.get_majorclust(graph)%0A print clusters%0A%0Aif __name__ == '__main__':%0A data = 'Hofstede2014'%0A main(data)%0A
|
|
aee93802503a19b873299774e351ef883e81356b
|
Add functions.py to provide comment functions
|
functions.py
|
functions.py
|
Python
| 0
|
@@ -0,0 +1,413 @@
+#-*- coding: utf-8 -*-%0A%0Adef getClientIP( req ):%0A%09'''%0A%09Get the client ip address%0A%09'''%0A%09xForwardedFor=req.META.get('HTTP_X_FORWARDED_FOR')%0A%09if xForwardedFor:%0A%09%09ip=xForwardedFor.split(',')%5B0%5D%0A%09else:%0A%09%09ip=req.META.get('REMOTE_ADDR')%0A%09return ip%0A%0Adef getBool( val, trueOpts=%5B'YES', 'Y', '1', 'TRUE', 'T'%5D ):%0A%09'''%0A%09Retrieve the boolean value from string%0A%09'''%0A%09if val:%0A%09%09return str(val).upper() in trueOpts%0A%09return False%0A
|
|
f3a43d11f79766c43be6de359762bd37cde49b38
|
Complete 3 pointer method
|
lc0611_valid_triangle_number.py
|
lc0611_valid_triangle_number.py
|
Python
| 0.000036
|
@@ -0,0 +1,1450 @@
+%22%22%22Leetcode 611. Valid Triangle Number%0AMedium%0A%0AURL: https://leetcode.com/problems/valid-triangle-number/%0A%0AGiven an array consists of non-negative integers, %0Ayour task is to count the number of triplets chosen from the array that can%0Amake triangles if we take them as side lengths of a triangle.%0A%0AExample 1:%0AInput: %5B2,2,3,4%5D%0AOutput: 3%0AExplanation:%0AValid combinations are: %0A2,3,4 (using the first 2)%0A2,3,4 (using the second 2)%0A2,2,3%0A%0ANote:%0A- The length of the given array won't exceed 1000.%0A- The integers in the given array are in the range of %5B0, 1000%5D.%0A%22%22%22%0A%0Aclass SolutionThreePointers(object):%0A def triangleNumber(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A # Apply three pointer method.%0A n = len(nums)%0A%0A # First sort nums in increasing order.%0A nums.sort()%0A%0A n_triplets = 0%0A%0A for i in range(n - 1, 1, -1):%0A # For each i, it suffices to apply two pointer method on the left of i.%0A # Since if num l + r %3E i, triangle number condition is satisfied.%0A l, r = 0, i - 1%0A while l %3C r:%0A if nums%5Bl%5D + nums%5Br%5D %3E nums%5Bi%5D:%0A n_triplets += r - l%0A r -= 1%0A else:%0A l += 1%0A%0A return n_triplets%0A%0A%0Adef main():%0A import time%0A%0A nums = %5B2,2,3,4%5D%0A print SolutionThreePointers().triangleNumber(nums)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
a68198514528773d2368e203491121b5c34f956d
|
Add tests of compile_ptx and compile_ptx_for_current_device
|
numba/cuda/tests/cudapy/test_compiler.py
|
numba/cuda/tests/cudapy/test_compiler.py
|
Python
| 0.000001
|
@@ -0,0 +1,2866 @@
+from math import sqrt%0Afrom numba import cuda, float32, void%0Afrom numba.cuda import compile_ptx, compile_ptx_for_current_device%0A%0Afrom numba.cuda.testing import skip_on_cudasim, unittest%0A%0A%0A@skip_on_cudasim('Compilation unsupported in the simulator')%0Aclass TestCompileToPTX(unittest.TestCase):%0A def test_global_kernel(self):%0A def f(r, x, y):%0A i = cuda.grid(1)%0A if i %3C len(r):%0A r%5Bi%5D = x%5Bi%5D + y%5Bi%5D%0A%0A args = (float32%5B:%5D, float32%5B:%5D, float32%5B:%5D)%0A ptx, resty = compile_ptx(f, args)%0A%0A # Kernels should not have a func_retval parameter%0A self.assertNotIn('func_retval', ptx)%0A # .visible .func is used to denote a device function%0A self.assertNotIn('.visible .func', ptx)%0A # .visible .entry would denote the presence of a global function%0A self.assertIn('.visible .entry', ptx)%0A # Return type for kernels should always be void%0A self.assertEqual(resty, void)%0A%0A def test_device_function(self):%0A def add(x, y):%0A return x + y%0A%0A args = (float32, float32)%0A ptx, resty = compile_ptx(add, args, device=True)%0A%0A # Device functions take a func_retval parameter for storing the%0A # returned value in by reference%0A self.assertIn('func_retval', ptx)%0A # .visible .func is used to denote a device function%0A self.assertIn('.visible .func', ptx)%0A # .visible .entry would denote the presence of a global function%0A self.assertNotIn('.visible .entry', ptx)%0A # Inferred return type as expected?%0A self.assertEqual(resty, float32)%0A%0A def test_fastmath(self):%0A def f(x, y, z, d):%0A return sqrt((x * y + z) / d)%0A%0A args = (float32, float32, float32, float32)%0A ptx, resty = compile_ptx(f, args, device=True)%0A%0A # Without fastmath, fma contraction is enabled by default, but ftz and%0A # approximate div / sqrt is not.%0A self.assertIn('fma.rn.f32', ptx)%0A self.assertIn('div.rn.f32', ptx)%0A self.assertIn('sqrt.rn.f32', ptx)%0A%0A ptx, resty = compile_ptx(f, args, device=True, fastmath=True)%0A%0A # With fastmath, ftz and approximate div / sqrt are enabled%0A self.assertIn('fma.rn.ftz.f32', ptx)%0A # %22full%22 refers to a full-range approximate divide%0A self.assertIn('div.full.ftz.f32', ptx)%0A self.assertIn('sqrt.approx.ftz.f32', ptx)%0A%0A def test_compile_ptx_for_current_device(self):%0A def add(x, y):%0A return x + y%0A%0A args = (float32, float32)%0A ptx, resty = compile_ptx_for_current_device(add, args, device=True)%0A%0A # Check we target the current device's compute capabilitay%0A cc = cuda.get_current_device().compute_capability%0A target = f'.target sm_%7Bcc%5B0%5D%7D%7Bcc%5B1%5D%7D'%0A self.assertIn(target, ptx)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
1c0230f7d0add83c36daadcf88bda72500e49015
|
add new package (#22648)
|
var/spack/repos/builtin/packages/rnaquast/package.py
|
var/spack/repos/builtin/packages/rnaquast/package.py
|
Python
| 0
|
@@ -0,0 +1,1419 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Aimport os%0Afrom spack import *%0A%0A%0Aclass Rnaquast(Package):%0A %22%22%22Quality assessment of de novo transcriptome assemblies from RNA-Seq data%0A%0A rnaQUAST is a tool for evaluating RNA-Seq assemblies using reference genome%0A and gene database. In addition, rnaQUAST is also capable of estimating gene%0A database coverage by raw reads and de novo quality assessment%0A using third-party software.%22%22%22%0A%0A homepage = %22https://github.com/ablab/rnaquast%22%0A url = %22https://github.com/ablab/rnaquast/archive/refs/tags/v2.2.0.tar.gz%22%0A%0A maintainers = %5B'dorton21'%5D%0A%0A version('2.2.0', sha256='117dff9d9c382ba74b7b0ff24bc7b95b9ca6aa701ebf8afd22943aa54e382334')%0A%0A depends_on('python@2.5:', type=('build', 'run'))%0A depends_on('py-matplotlib', type=('build', 'run'))%0A depends_on('py-joblib', type=('build', 'run'))%0A depends_on('py-gffutils', type=('build', 'run'))%0A depends_on('gmap-gsnap', type=('build', 'run'))%0A depends_on('blast-plus', type=('build', 'run'))%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix.bin)%0A os.rename('%25s/rnaQUAST.py' %25 prefix.bin, '%25s/rnaQUAST' %25 prefix.bin)%0A%0A def setup_run_environment(self, env):%0A env.prepend_path('PATH', prefix.bin)%0A
|
|
3c26db43490640e0a98f5c22bb65eecd2c1c5ba9
|
Create sudoku_formatter.py
|
sudoku_formatter.py
|
sudoku_formatter.py
|
Python
| 0.999775
|
@@ -0,0 +1,2265 @@
+instructions = %22%22%22INSTRUCTIONS:%0ACopy a sudoku puzzle from http://www.menneske.no/sudoku/ to a text file, %0Aand provide this file as the first command line argument and the file for %0Aoutput as the second command line argument%0A%0ARemember that box height and width must be added manually (before/after %0Aformatting)%22%22%22%0A%0Aimport sys%0A#step #1: fjerne enkle mellomrom bak alle tall%0A#step #2: bytte ut tall %3E 9 med bokstaver%0A#step #3: erstatte doble mellomrom med .%0A#step #4: erstatte enkle mellomrom med .%0A#step #5: fjerne tabulering%0A%0Aif len(sys.argv) == 2:%0A if sys.argv%5B1%5D == 'help':%0A print instructions%0A sys.exit(1)%0A%0A#parse command line arguments%0Atry:%0A in_filename = sys.argv%5B1%5D%0A out_filename = sys.argv%5B2%5D%0Aexcept IndexError:%0A print %22ERROR: Insufficient number of command line arguments.%22%0A print %22Correct usage:%22%0A print %22python %25s %3Cinput_file%3E %3Coutput_file%3E%5Cn%22 %25 sys.argv%5B0%5D%0A print %22For help:%22%0A print %22python %25s help%22 %25 sys.argv%5B0%5D%0A sys.exit(1)%0A%0A#read from file%0Atry:%0A infile = open(in_filename, 'r')%0A data = infile.read()%0A infile.close()%0Aexcept IOError:%0A print %22ERROR: Unable to read from %25s%22 %25 in_filename%0A sys.exit(1)%0A%0A%0A#step 1%0Adata = data.replace('1 ', '1')%0Adata = data.replace('2 ', '2')%0Adata = data.replace('3 ', '3')%0Adata = data.replace('4 ', '4')%0Adata = data.replace('5 ', '5')%0Adata = data.replace('6 ', '6')%0Adata = data.replace('7 ', '7')%0Adata = data.replace('8 ', '8')%0Adata = data.replace('9 ', '9')%0Adata = data.replace('0 ', '0')%0A%0A#step 2%0Adic = %7B '10':'A', '11':'B', '12':'C', '13':'D', '14':'E', '15':'F', %0A '16':'G', '17':'H', '18':'I', '19':'J', '20':'K', '21':'L', %0A '22':'M', '23':'N', '24':'O', '25':'P', '26':'Q', '27':'R', '28':'S', %0A '29':'T', '30':'U', '31':'V', '32':'W', '33':'X', '34':'Y', '35':'Z'%7D%0A%0Anumbers = range(10,36)%0Afor number in numbers: %0A data = data.replace(str(number), dic%5Bstr(number)%5D)%0A%0A#step 3%0Adata = data.replace(' ', '.')%0A%0A#step 4%0Adata = data.replace(' ', '.')%0A%0A#step 5%0Adata = data.replace('%5Ct', '')%0A%0A#write to file%0Atry:%0A outfile = open(out_filename, 'w')%0A outfile.write(data)%0A outfile.close()%0A print %22The sudoku puzzle was successfully formatted. %22%0Aexcept IOError:%0A print %22ERROR: Unable to write to %25s%22 %25 out_filename%0A sys.exit(1)%0A
|
|
710fe77b1d03de83cfefa3173b17d2adf5f220fe
|
Add systemd watchdog
|
systemd_watchdog.py
|
systemd_watchdog.py
|
Python
| 0.000003
|
@@ -0,0 +1,673 @@
+# RES Service Package%0A# Copyright %C2%A9 2015 InvestGroup, LLC%0A%0A%0Aimport asyncio%0Aimport logging%0A%0A%0Adef install_watchdog(interval, loop=None):%0A assert isinstance(interval, (int, float)) and interval %3E 0%0A logger = logging.getLogger(%22systemd-watchdog%22)%0A try:%0A from systemd.daemon import notify as sd_notify%0A except ImportError:%0A logger.warning(%22Failed to import systemd =%3E watchdog is disabled%22)%0A return%0A if loop is None:%0A loop = asyncio.get_event_loop()%0A%0A def notify():%0A sd_notify(%22WATCHDOG=1%22)%0A loop.call_later(interval, notify)%0A%0A notify()%0A logger.info(%22Installed watchdog notification once per %25s sec%22 %25 interval)%0A
|
|
78ef287355d670877ed2dd82386ee5ce51944fcb
|
switch logging to DEBUG
|
crate_project/settings/production/base.py
|
crate_project/settings/production/base.py
|
from ..base import *
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"filters": {
"require_debug_false": {
"()": "django.utils.log.RequireDebugFalse",
},
},
"formatters": {
"simple": {
"format": "%(levelname)s %(message)s"
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "simple"
},
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
"sentry": {
"level": "ERROR",
"class": "raven.contrib.django.handlers.SentryHandler",
},
},
"loggers": {
"": {
"handlers": ["console", "sentry"],
"propagate": True,
"level": "INFO",
},
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": True,
},
"sentry.errors": {
"level": "DEBUG",
"handlers": ["console"],
"propagate": False,
},
}
}
SITE_ID = 3
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
SERVER_EMAIL = "server@crate.io"
DEFAULT_FROM_EMAIL = "support@crate.io"
CONTACT_EMAIL = "support@crate.io"
# MIDDLEWARE_CLASSES += ["privatebeta.middleware.PrivateBetaMiddleware"]
PACKAGE_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
PACKAGE_FILE_STORAGE_OPTIONS = {
"bucket": "crate-production",
"custom_domain": "packages.crate-cdn.com",
}
DEFAULT_FILE_STORAGE = "storages.backends.s3boto.S3BotoStorage"
# STATICFILES_STORAGE = "storages.backends.s3boto.S3BotoStorage"
AWS_STORAGE_BUCKET_NAME = "crate-media-production"
AWS_S3_CUSTOM_DOMAIN = "media.crate-cdn.com"
# PRIVATE_BETA_ALLOWED_URLS = [
# "/account/login/",
# "/account/signup/",
# "/account/confirm_email/",
# ]
# PRIVATE_BETA_ALLOWED_HOSTS = [
# "simple.crate.io",
# ]
INTERCOM_APP_ID = "79qt2qu3"
SIMPLE_API_URL = "http://simple.crate.io/"
# Security
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 600 # @@@ Increase This
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = True
SESSION_COOKIE_HTTPONLY = True
|
Python
| 0.000003
|
@@ -919,12 +919,13 @@
%22: %22
-INFO
+DEBUG
%22,%0A
|
5f0feca62055dafd9931bf68abe3286bda9f41e0
|
Create __init__.py
|
frappe/desk/doctype/communication_reconciliation/__init__.py
|
frappe/desk/doctype/communication_reconciliation/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,426 @@
+import frappe%0A@frappe.whitelist(allow_guest=True)%0Adef relink(self,name,reference_doctype,reference_name):%0A%09dt = reference_doctype%0A%09dn = reference_name%0A%0A%09if dt==%22%22 or dt==None or dn == %22%22 or dn == None:%0A%09%09return # is blank maybe try flash missing required%0A%09frappe.db.sql(%22%22%22update %60tabCommunication%60%0A%09%09set reference_doctype = %25s ,reference_name = %25s ,status = %22Linked%22%0A%09%09where name = %25s %22%22%22,(dt,dn,name))%0A%0A%09return self.fetch()%0A
|
|
c07609fa140c54572832f4f8f4787d0991495e60
|
check in some migrations
|
planetstack/core/migrations/0002_omf_friendly_default_false.py
|
planetstack/core/migrations/0002_omf_friendly_default_false.py
|
Python
| 0
|
@@ -0,0 +1,396 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('core', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='slice',%0A name='omf_friendly',%0A field=models.BooleanField(default=False),%0A ),%0A %5D%0A
|
|
45c20be4027ccb7aa6a1c3e643ca164413bd353f
|
add management command for commenting on outstanding apps about tos changes.
|
TWLight/applications/management/commands/notify_applicants_tou_changes.py
|
TWLight/applications/management/commands/notify_applicants_tou_changes.py
|
Python
| 0
|
@@ -0,0 +1,1553 @@
+import logging%0Afrom django.conf import settings%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.utils.translation import ugettext as _%0Afrom TWLight.applications.models import Application%0Afrom TWLight.resources.models import Partner%0Afrom django_comments.models import Comment%0A%0Alogger = logging.getLogger(__name__)%0A%0A%0Aclass Command(BaseCommand):%0A def handle(self, *args, **options):%0A # Get apps with a status of PENDING or QUESTION for partners with a status of AVAILABLE or WAITLIST%0A # where the editor has not agreed to the terms of use.%0A pending_apps = (%0A Application.objects.filter(%0A status__in=%5BApplication.PENDING, Application.QUESTION%5D,%0A partner__status__in=%5BPartner.AVAILABLE, Partner.WAITLIST%5D,%0A editor__isnull=False,%0A agreement_with_terms_of_use=False%0A )%0A .exclude(editor__user__groups__name=%22restricted%22)%0A .order_by(%22status%22, %22partner%22, %22date_created%22)%0A )%0A%0A # Loop through the apps and add a comment.%0A for app in pending_apps:%0A comment = Comment(%0A content_object=app,%0A site_id=settings.SITE_ID,%0A # Translators: This comment is added to pending applications when our terms of use change.%0A comment=_(%22Our terms of use have changed. %22%0A %22Your applications will not be processed until you log in agree to our updated terms.%22)%0A )%0A comment.save()%0A
|
|
378e89fd74cfd383c3e1b21fc342194728f66bb7
|
Add DogStatsd metrics backend. (#5230)
|
src/sentry/metrics/dogstatsd.py
|
src/sentry/metrics/dogstatsd.py
|
Python
| 0
|
@@ -0,0 +1,1401 @@
+from __future__ import absolute_import%0A%0A__all__ = %5B'DogStatsdMetricsBackend'%5D%0A%0Afrom datadog import initialize, statsd%0A%0Afrom .base import MetricsBackend%0A%0A%0Aclass DogStatsdMetricsBackend(MetricsBackend):%0A def __init__(self, prefix=None, **kwargs):%0A # TODO(dcramer): it'd be nice if the initialize call wasn't a global%0A self.tags = kwargs.pop('tags', None)%0A initialize(**kwargs)%0A super(DogStatsdMetricsBackend, self).__init__(prefix=prefix)%0A%0A def incr(self, key, instance=None, tags=None, amount=1, sample_rate=1):%0A if tags is None:%0A tags = %7B%7D%0A if self.tags:%0A tags.update(self.tags)%0A if instance:%0A tags%5B'instance'%5D = instance%0A if tags:%0A tags = %5B'%7B%7D:%7B%7D'.format(*i) for i in tags.items()%5D%0A statsd.increment(%0A self._get_key(key),%0A amount,%0A sample_rate=sample_rate,%0A tags=tags,%0A )%0A%0A def timing(self, key, value, instance=None, tags=None, sample_rate=1):%0A if tags is None:%0A tags = %7B%7D%0A if self.tags:%0A tags.update(self.tags)%0A if instance:%0A tags%5B'instance'%5D = instance%0A if tags:%0A tags = %5B'%7B%7D:%7B%7D'.format(*i) for i in tags.items()%5D%0A statsd.timing(%0A self._get_key(key),%0A value,%0A sample_rate=sample_rate,%0A tags=tags,%0A )%0A
|
|
56f9ea1ba0026bc21eeb904afaf25606a6186125
|
Test per veure que no permetem capçaleres multivaluades
|
test/test_regles.py
|
test/test_regles.py
|
Python
| 0
|
@@ -0,0 +1,1534 @@
+import unittest%0D%0Aimport mock%0D%0Aimport settings%0D%0Afrom soa.tiquets import GestioTiquets%0D%0Afrom soa.identitat import GestioIdentitat%0D%0Afrom filtres.nou import FiltreNou%0D%0Afrom mailticket import MailTicket%0D%0Afrom testhelper import llegir_mail%0D%0A%0D%0A%0D%0Aclass TestRegles(unittest.TestCase):%0D%0A%0D%0A def setUp(self):%0D%0A self.tickets = mock.create_autospec(GestioTiquets)%0D%0A self.identitat = mock.create_autospec(GestioIdentitat)%0D%0A settings.init()%0D%0A%0D%0A def test_regla_amb_cc_comprova_primer_valor (self):%0D%0A settings.set(%22valors_defecte%22, %0D%0A %5B%0D%0A %7B%22order%22:%5B%22Cc%22%5D,%0D%0A %22match%22:%22mail.qualsevol2@mail.com%22,%0D%0A %22defaults%22:%7B%22equipResolutor%22:%22666%22%7D%0D%0A %7D%0D%0A %5D%0D%0A )%0D%0A msg = llegir_mail(%22cc.txt%22)%0D%0A f = FiltreNou(msg, self.tickets, self.identitat)%0D%0A defaults= f.obtenir_parametres_addicionals()%0D%0A self.assertEqual(defaults%5B%22equipResolutor%22%5D, %22666%22)%0D%0A%0D%0A def test_regla_amb_cc_comprova_segon_valor (self):%0D%0A settings.set(%22valors_defecte%22, %0D%0A %5B%0D%0A %7B%22order%22:%5B%22Cc%22%5D,%0D%0A %22match%22:%22mail.concret2@mail.com%22,%0D%0A %22defaults%22:%7B%22equipResolutor%22:%22666%22%7D%0D%0A %7D%0D%0A %5D%0D%0A )%0D%0A msg = llegir_mail(%22cc.txt%22)%0D%0A f = FiltreNou(msg, self.tickets, self.identitat)%0D%0A defaults= f.obtenir_parametres_addicionals()%0D%0A self.assertEqual(defaults%5B%22equipResolutor%22%5D, %22666%22)%0D%0A%0D%0Aif __name__ == '__main__':%0D%0A unittest.main()%0D%0A
|
|
c6a9f1df850dca2aced2dd34aa5cde14445956b3
|
use descriptive var name.
|
restclients/models/grad.py
|
restclients/models/grad.py
|
from django.db import models
def get_datetime_str(dt):
if dt is None:
return None
return dt.strftime("%Y-%m-%d %H:%M:%S")
class GradTerm(models.Model):
SPRING = 'spring'
SUMMER = 'summer'
AUTUMN = 'autumn'
WINTER = 'winter'
QUARTERNAME_CHOICES = (
(SPRING, 'Spring'),
(SUMMER, 'Summer'),
(AUTUMN, 'Autumn'),
(WINTER, 'Winter'),
)
quarter = models.CharField(max_length=6,
choices=QUARTERNAME_CHOICES)
year = models.PositiveSmallIntegerField()
def __init__(self):
self.terms = []
def json_data(self):
return {"year": self.year,
"quarter": self.quarter,
}
class GradDegree(models.Model):
req_type = models.CharField(max_length=100)
submit_date = models.DateTimeField()
degree_title = models.CharField(max_length=255, null=True)
status = models.CharField(max_length=64, null=True)
exam_place = models.CharField(max_length=255, null=True)
exam_date = models.DateTimeField(null=True)
target_award_year = models.PositiveSmallIntegerField()
target_award_quarter = models.CharField(
max_length=6, choices=GradTerm.QUARTERNAME_CHOICES)
def json_data(self):
return {
"req_type": self.req_type,
"degree_title": self.degree_title,
"exam_place": self.exam_place,
"exam_date": get_datetime_str(self.exam_date),
"status": self.status,
"submit_date": get_datetime_str(self.submit_date),
"target_award_year": self.target_award_year,
"target_award_quarter": self.target_award_quarter,
}
class GradCommitteeMember(models.Model):
first_name = models.CharField(max_length=96)
last_name = models.CharField(max_length=96)
member_type = models.CharField(max_length=64)
dept = models.CharField(max_length=128, null=True)
email = models.CharField(max_length=255, null=True)
status = models.CharField(max_length=64)
def json_data(self):
return {
"first_name": self.first_name,
"last_name": self.last_name,
"member_type": self.member_type,
"dept": self.dept,
"email": self.email,
"status": self.status,
}
class GradCommittee(models.Model):
committee_type = models.CharField(max_length=64)
dept = models.CharField(max_length=255, null=True)
degree_title = models.CharField(max_length=255, null=True)
degree_type = models.CharField(max_length=255)
major_full_name = models.CharField(max_length=255)
status = models.CharField(max_length=64, null=True)
start_date = models.DateTimeField()
end_date = models.DateTimeField()
def __init__(self):
self.members = [] # GradCommitteeMember
def json_data(self):
data = {
"committee_type": self.committee_type,
"dept": self.dept,
"degree_title": self.degree_title,
"degree_type": self.degree_type,
"major_full_name": self.major_full_name,
"status": self.status,
"start_date": get_datetime_str(self.start_date),
"end_date": get_datetime_str(self.end_date),
"members": [],
}
for member in self.members:
data["members"].append(member.json_data())
return data
class GradLeave(models.Model):
reason = models.CharField(max_length=100,
db_index=True)
submit_date = models.DateTimeField()
status = models.CharField(max_length=50,
blank=True)
def __init__(self):
self.terms = []
def json_data(self):
data = {
'reason': self.reason,
'submit_date': self.submit_date,
'status': self.status,
'terms': [],
}
for term in self.terms:
data["terms"].append(term.json_data())
return data
class GradPetition(models.Model):
description = models.CharField(max_length=100,
db_index=True)
submit_date = models.DateTimeField()
dept_recommend = models.CharField(max_length=50)
gradschool_decision = models.CharField(max_length=50,
null=True,
blank=True)
def json_data(self):
data = {
'description': self.description,
'submit_date': get_datetime_str(self.submit_date),
'dept_recommend': self.dept_recommend,
'gradschool_decision': self.gradschool_decision,
}
return data
|
Python
| 0.000002
|
@@ -50,21 +50,41 @@
tr(d
-t):%0A if dt
+atetime_obj):%0A if datetime_obj
is
@@ -121,17 +121,27 @@
return d
-t
+atetime_obj
.strftim
@@ -161,11 +161,8 @@
H:%25M
-:%25S
%22)%0A%0A
|
ae7f22b5fc606a8415e286ffabd43d3fbb71977c
|
Add Euler angle conversion tests.
|
tests/test_euler.py
|
tests/test_euler.py
|
Python
| 0
|
@@ -0,0 +1,812 @@
+import unittest%0A%0Afrom QGL import *%0Afrom QGL.Euler import *%0Afrom QGL.Cliffords import C1%0Aimport QGL.config%0Atry:%0A from helpers import setup_test_lib%0Aexcept:%0A from .helpers import setup_test_lib%0A%0Aclass EulerDecompositions(unittest.TestCase):%0A%0A%09N_test = 1000%0A%0A%09def setUp(self):%0A%09%09pass%0A%09%09#setup_test_lib()%0A%09%09#self.q1 = QubitFactory('q1')%0A%0A%09def test_zyz_decomp(self):%0A%09%09for j in range(self.N_test):%0A%09%09%09Uh = haar_unitary(2)%0A%09%09%09Ux = zyz_unitary(*zyz_angles(Uh))%0A%09%09%09assert is_close(Uh, Ux)%0A%0A%09def test_xyx_decomp(self):%0A%09%09for j in range(self.N_test):%0A%09%09%09Uh = haar_unitary(2)%0A%09%09%09Ux = xyx_unitary(*xyx_angles(Uh))%0A%09%09%09assert is_close(Uh, Ux)%0A%0A%09def test_xyx_cliffords(self):%0A%09%09for j in range(24):%0A%09%09%09Uxyx = xyx_unitary(*xyx_angles(C1%5Bj%5D))%0A%09%09%09assert is_close(Uxyx, C1%5Bj%5D), f%22%7Bj%7D%22%0A%0Aif __name__ == %22__main__%22:%0A%09unittest.main()%0A
|
|
329c1d9cd515414fe754385ee302197b93eadd20
|
implement 15 (15) ツイッターのユーザー名(例えば@xxxxxxx)を,そのユーザーのページへのリンク(<a href="https://twitter.com/#!/xxxxxxx">@xxxxxxx</a>で囲まれたHTML断片)に置換せよ.
|
set02/15.py
|
set02/15.py
|
Python
| 0
|
@@ -0,0 +1,440 @@
+# -*- coding: utf-8 -*-%0A# (15) %E3%83%84%E3%82%A4%E3%83%83%E3%82%BF%E3%83%BC%E3%81%AE%E3%83%A6%E3%83%BC%E3%82%B6%E3%83%BC%E5%90%8D%EF%BC%88%E4%BE%8B%E3%81%88%E3%81%B0@xxxxxxx%EF%BC%89%E3%82%92%EF%BC%8C%E3%81%9D%E3%81%AE%E3%83%A6%E3%83%BC%E3%82%B6%E3%83%BC%E3%81%AE%E3%83%9A%E3%83%BC%E3%82%B8%E3%81%B8%E3%81%AE%E3%83%AA%E3%83%B3%E3%82%AF%EF%BC%88%3Ca href=%22https://twitter.com/#!/xxxxxxx%22%3E@xxxxxxx%3C/a%3E%E3%81%A7%E5%9B%B2%E3%81%BE%E3%82%8C%E3%81%9FHTML%E6%96%AD%E7%89%87%EF%BC%89%E3%81%AB%E7%BD%AE%E6%8F%9B%E3%81%9B%E3%82%88%EF%BC%8E%0A%0Aimport sys%0Aimport csv%0Aimport re%0A%0Are_name = re.compile(u'@(%5Ba-zA-Z0-9_%5D+)')%0A%0Afor row in csv.reader(sys.stdin):%0A tweet = row%5B5%5D.decode('utf-8')%0A replaced = re_name.sub(ur'%3Ca href=%22https://twitter.com/#!/%5C1%22%3E@%5C1%3C/a%3E', tweet)%0A if replaced != tweet:%0A print replaced.encode('utf-8')%0A%0A
|
|
89ade49695c8961f23879050dda8aa684603c04b
|
Fix serializer
|
CTFd/utils/exports/serializers.py
|
CTFd/utils/exports/serializers.py
|
import json
import six
from collections import OrderedDict
from CTFd.utils.exports.encoders import JSONEncoder
class JSONSerializer(object):
def __init__(self, query, fileobj):
self.query = query
self.fileobj = fileobj
self.buckets = []
def serialize(self):
for row in self.query:
self.write(None, row)
self.close()
def write(self, path, result):
self.buckets.append([result])
def wrap(self, result):
result = OrderedDict([("count", len(result)), ("results", result)])
result["meta"] = {}
return result
def close(self):
for result in self.buckets:
result = self.wrap(result)
# Certain databases (MariaDB) store JSON as LONGTEXT.
# Before emitting a file we should standardize to valid JSON (i.e. a dict)
# See Issue #973
for i, r in enumerate(result["results"]):
data = r.get("requirements")
if data:
try:
if isinstance(data, six.string_types):
result["results"][i]["requirements"] = json.loads(data)
except ValueError:
pass
data = json.dumps(result, cls=JSONEncoder, indent=2)
self.fileobj.write(data.encode("utf-8"))
|
Python
| 0.000039
|
@@ -39,16 +39,29 @@
s import
+ defaultdict,
Ordered
@@ -65,16 +65,16 @@
redDict%0A
-
%0Afrom CT
@@ -275,10 +275,25 @@
s =
-%5B%5D
+defaultdict(list)
%0A%0A
@@ -461,16 +461,22 @@
kets
+%5Bpath%5D
.append(
%5Bres
@@ -475,16 +475,14 @@
end(
-%5B
result
-%5D
)%0A%0A
@@ -667,16 +667,22 @@
for
+ path,
result
@@ -696,16 +696,24 @@
.buckets
+.items()
:%0A
|
933d364981f2b05cbca3325ee92f0696da5de44e
|
Create settings.py
|
settings.py
|
settings.py
|
Python
| 0.000001
|
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-%0A
|
|
7b6b1426015a83b96395f0c7c112dc53d373647f
|
Add init file for remediation module.
|
fairness_indicators/remediation/__init__.py
|
fairness_indicators/remediation/__init__.py
|
Python
| 0
|
@@ -0,0 +1,596 @@
+# Copyright 2019 Google LLC. All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A
|
|
205362c2f068ca22fe40cb6399b071849727ee55
|
Test cases for style attribute parsing
|
tests/test_parse.py
|
tests/test_parse.py
|
Python
| 0
|
@@ -0,0 +1,2512 @@
+%0Aimport pytest%0A%0Afrom rinoh.dimension import DimensionBase, PT, PICA, INCH, MM, CM, PERCENT%0Afrom rinoh.style import OptionSet, Bool, Integer%0A%0A%0Adef test_optionset_from_string():%0A ONE = 'one'%0A TWO = 'two'%0A THREE = 'three'%0A%0A class TestSet1(OptionSet):%0A values = ONE, TWO, THREE%0A%0A assert TestSet1.from_string('one') == ONE%0A assert TestSet1.from_string('TWO') == TWO%0A assert TestSet1.from_string('tHRee') == THREE%0A with pytest.raises(ValueError):%0A TestSet1.from_string('four')%0A with pytest.raises(ValueError):%0A TestSet1.from_string('none')%0A%0A%0A class TestSet2(OptionSet):%0A values = None, TWO%0A%0A assert TestSet2.from_string('none') == None%0A assert TestSet2.from_string('nONe') == None%0A assert TestSet2.from_string('two') == TWO%0A with pytest.raises(ValueError):%0A TestSet2.from_string('one')%0A with pytest.raises(ValueError):%0A TestSet2.from_string('False')%0A%0A%0Adef test_bool_from_string():%0A assert Bool.from_string('true') == True%0A assert Bool.from_string('false') == False%0A assert Bool.from_string('TRUE') == True%0A assert Bool.from_string('FALSE') == False%0A assert Bool.from_string('True') == True%0A assert Bool.from_string('FaLSE') == False%0A with pytest.raises(ValueError):%0A Bool.from_string('1')%0A with pytest.raises(ValueError):%0A Bool.from_string('0')%0A with pytest.raises(ValueError):%0A Bool.from_string('T')%0A with pytest.raises(ValueError):%0A Bool.from_string('f')%0A%0A%0Adef test_integer_from_string():%0A assert Integer.from_string('1') == 1%0A assert Integer.from_string('001') == 1%0A assert Integer.from_string('873654354') == 873654354%0A assert Integer.from_string('-9') == -9%0A with pytest.raises(ValueError):%0A assert Integer.from_string('1e5')%0A with pytest.raises(ValueError):%0A assert Integer.from_string('0.5')%0A%0A%0Adef test_dimensionbase_from_string():%0A assert DimensionBase.from_string('0') == 0%0A assert DimensionBase.from_string('1pt') == 1*PT%0A assert DimensionBase.from_string('10 pt') == 10*PT%0A assert DimensionBase.from_string('25pc') == 25*PICA%0A assert DimensionBase.from_string('1.5 in') == 1.5*INCH%0A assert DimensionBase.from_string('99999mm') == 99999*MM%0A assert DimensionBase.from_string('-2.1 cm') == -2.1*CM%0A assert DimensionBase.from_string('21%25') == 21.00*PERCENT%0A assert DimensionBase.from_string('-16.12%25') == -16.12*PERCENT%0A with pytest.raises(ValueError):%0A assert DimensionBase.from_string('20inch')%0A
|
|
9118f73f02723bcf62c82934f3ea36328777da25
|
add tests to make sure other queryset methods work
|
django_object_actions/tests/test_utils.py
|
django_object_actions/tests/test_utils.py
|
from django.db.models.query import QuerySet
from django.test import TestCase
from example_project.polls.models import Poll
from ..utils import (
BaseDjangoObjectActions,
QuerySetIsh,
takes_instance_or_queryset,
)
class BaseDjangoObjectActionsTest(TestCase):
def setUp(self):
self.instance = BaseDjangoObjectActions()
def test_get_object_actions_gets_attribute(self):
mock_objectactions = [] # set to something mutable
mock_request = 'request'
mock_context = 'context'
mock_kwargs = {}
self.instance.objectactions = mock_objectactions
returned_value = self.instance.get_object_actions(
mock_request, mock_context, **mock_kwargs
)
# assert that `mock_objectactions` was returned
self.assertEqual(id(mock_objectactions), id(returned_value))
# WISHLIST assert get_object_actions was called with right args
def test_get_djoa_button_attrs_returns_defaults(self):
mock_tool = type('mock_tool', (object, ), {})
attrs, __ = self.instance.get_djoa_button_attrs(mock_tool)
self.assertEqual(attrs['class'], '')
self.assertEqual(attrs['title'], '')
def test_get_djoa_button_attrs_disallows_href(self):
mock_tool = type('mock_tool', (object, ), {
'attrs': {'href': 'hreeeeef'},
})
attrs, __ = self.instance.get_djoa_button_attrs(mock_tool)
self.assertNotIn('href', attrs)
def test_get_djoa_button_attrs_disallows_title(self):
mock_tool = type('mock_tool', (object, ), {
'attrs': {'title': 'i wanna be a title'},
'short_description': 'real title',
})
attrs, __ = self.instance.get_djoa_button_attrs(mock_tool)
self.assertEqual(attrs['title'], 'real title')
def test_get_djoa_button_attrs_gets_set(self):
mock_tool = type('mock_tool', (object, ), {
'attrs': {'class': 'class'},
'short_description': 'description',
})
attrs, __ = self.instance.get_djoa_button_attrs(mock_tool)
self.assertEqual(attrs['class'], 'class')
self.assertEqual(attrs['title'], 'description')
def test_get_djoa_button_attrs_custom_attrs_get_partitioned(self):
mock_tool = type('mock_tool', (object, ), {
'attrs': {'nonstandard': 'wombat'},
})
attrs, custom = self.instance.get_djoa_button_attrs(mock_tool)
self.assertEqual(custom['nonstandard'], 'wombat')
class QuerySetIshTest(TestCase):
def setUp(self):
# as defined in initial_data fixture
# WISHLIST don't depend on fixture
self.obj = Poll.objects.get(pk=1)
def test_can_turn_object_into_queryset(self):
qs = QuerySetIsh(self.obj)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs.get(), self.obj)
self.assertEqual(qs.order_by('foo').get(), self.obj)
self.assertEqual(qs.all().get(), self.obj)
self.assertEqual(qs.filter().get(), self.obj)
self.assertEqual(qs.latest('bar'), self.obj)
class DecoratorTest(TestCase):
def setUp(self):
# as defined in initial_data fixture
# WISHLIST don't depend on fixture
self.obj = Poll.objects.get(pk=1)
self.queryset = Poll.objects.all()
def test_trivial(self):
# setup
def myfunc(foo, bar, queryset):
return queryset
# make sure my test function outputs the third arg
self.assertEqual(myfunc(None, None, 'foo'), 'foo')
# or the `queryset` kwarg
self.assertEqual(myfunc(None, None, queryset='bar'), 'bar')
def test_decorated(self):
# setup
@takes_instance_or_queryset
def myfunc(foo, bar, queryset):
return queryset
# passing in an instance yields a queryset (using positional args)
queryset = myfunc(None, None, self.obj)
self.assertIsInstance(queryset, QuerySet)
# the resulting queryset only has one item and it's self.obj
self.assertEqual(queryset.get(), self.obj)
# passing in a queryset yields the same queryset
queryset = myfunc(None, None, self.queryset)
self.assertIsInstance(queryset, QuerySet)
self.assertEqual(queryset, self.queryset)
# passing in an instance yields a queryset (using keyword args)
queryset = myfunc(None, None, queryset=self.obj)
self.assertIsInstance(queryset, QuerySet)
# the resulting queryset only has one item and it's self.obj
self.assertEqual(queryset.get(), self.obj)
|
Python
| 0
|
@@ -3074,16 +3074,385 @@
f.obj)%0A%0A
+ def test_queryset_supports_delete(self):%0A qs = QuerySetIsh(self.obj)%0A qs.delete()%0A with self.assertRaises(Poll.DoesNotExist):%0A Poll.objects.get(pk=1)%0A%0A def test_queryset_supports_update(self):%0A qs = QuerySetIsh(self.obj)%0A qs.update(question='mooo')%0A self.assertEqual(Poll.objects.get(pk=1).question, 'mooo')%0A%0A
%0Aclass D
|
9112bfe1a4f9253b78c6145f74b12c712cee136b
|
Create spacewar.py
|
spacewar.py
|
spacewar.py
|
Python
| 0.002719
|
@@ -0,0 +1,215 @@
+%22%22%22%0Aspacewar.py%0AAuthor: %3Cyour name here%3E%0ACredit: %3Clist sources used, if any%3E%0A%0AAssignment:%0AWrite and submit a program that implements the sandbox platformer game:%0Ahttps://github.com/HHS-IntroProgramming/Spacewar%0A%22%22%22%0A
|
|
c8989ff640eb9fb1e9fddad4ac303ed417b54c79
|
Adding grumpy.py
|
axelrod/strategies/grumpy.py
|
axelrod/strategies/grumpy.py
|
Python
| 0.999739
|
@@ -0,0 +1,1545 @@
+from axelrod import Player%0A%0Aclass Grumpy(Player):%0A %22%22%22%0A A player that defects after a ceratin level of grumpiness. Grumpiness increases when the opponent defects and decreases when the opponent co-operates.%0A %22%22%22%0A%0A def __init__(self, starting_state = 'Nice', grumpy_threshold = 10, nice_threshold = -10):%0A %22%22%22%0A Player starts of nice be default with set thresholds%0A %22%22%22%0A%0A self.history = %5B%5D%0A self.score = 0%0A self.state = starting_state%0A self.grumpy_threshold = grumpy_threshold%0A self.nice_threshold = nice_threshold%0A%0A def strategy(self, opponent):%0A %22%22%22%0A A player that gets grumpier the more the opposition defects, and nicer the more they cooperate.%0A Starts off Nice, but becomes grumpy once the grumpiness threshold is hit.%0A Won't become nice once that grumpy threshold is hit, but must reach a much lower threshold before it becomes nice again.%0A %22%22%22%0A%0A self.grumpiness = sum(play=='D' for play in opponent.history) - sum(play=='C' for play in opponent.history) %0A%0A if self.state == 'Nice':%0A if self.grumpiness %3E self.grumpy_threshold:%0A self.state = 'Grumpy'%0A return 'D'%0A return 'C'%0A%0A if self.state == 'Grumpy':%0A if self.grumpiness %3C self.nice_threshold:%0A self.state = 'Nice'%0A return 'C'%0A return 'D'%0A%0A def __repr__(self):%0A %22%22%22%0A The string method for the strategy:%0A %22%22%22%0A%0A return 'Grumpy'%0A
|
|
ed793b470e55a0fe1c7a0a4b0cc3d8a2774e4628
|
verify that the files are actually images
|
scripts/verifyImageType.py
|
scripts/verifyImageType.py
|
Python
| 0
|
@@ -0,0 +1,1304 @@
+from resnet50 import ResNet50%0Afrom keras.preprocessing import image%0Afrom imagenet_utils import preprocess_input, decode_predictions%0Aimport numpy as np%0Aimport json%0Aimport time%0Aimport sys%0Afrom multiprocessing import Pool%0Aimport functools%0A%0Adef readTasking(filename):%0A%0A tasking = open(filename,'r')%0A data = list()%0A fileProblems = list()%0A%0A for task in tasking:%0A task = task.strip()%0A line = json.loads(task)%0A data.append(line)%0A tasking.close()%0A return data%0A%0Adef procLine2(l, r):%0A img_path = '%7B0%7D/%7B1%7D'.format(r,l%5B'filename'%5D)%0A try:%0A img = image.load_img(img_path, target_size=(224, 224))%0A return (1,l%5B'filename'%5D)%0A except:%0A return (0,l%5B'filename'%5D)%0A%0Adef writeTasking(filename, tasking, bad):%0A outFile = open(filename,'w')%0A badFiles = set()%0A for item in bad:%0A if item%5B0%5D == 0:%0A badFiles.add(item%5B1%5D)%0A for task in tasking:%0A if task%5B'filename'%5D not in badFiles:%0A outFile.write(json.dumps(task)+'%5Cn')%0A%0A outFile.close()%0A %0A %0Adef main():%0A procLine = functools.partial(procLine2, r=sys.argv%5B2%5D )%0A p = Pool()%0A tasking = readTasking(sys.argv%5B1%5D)%0A files = p.map(procLine,tasking)%0A writeTasking(sys.argv%5B1%5D+'.new',tasking,files)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A%0A
|
|
12c950c09839ad887d76de9a062e36015534c054
|
Create activate-devices.py
|
cron/activate-devices.py
|
cron/activate-devices.py
|
Python
| 0.000001
|
@@ -0,0 +1 @@
+%0A
|
|
4628adc38789f52e8e2ef0cdf600b9fbed7b30ab
|
Test events (really event __repr__)
|
test/test_events.py
|
test/test_events.py
|
Python
| 0.000003
|
@@ -0,0 +1,2085 @@
+import pytest%0A%0Afrom h11 import Request%0A%0Afrom wsproto.events import (%0A ConnectionClosed,%0A ConnectionEstablished,%0A ConnectionRequested,%0A)%0Afrom wsproto.frame_protocol import CloseReason%0A%0A%0Adef test_connection_requested_repr_no_subprotocol():%0A method = b'GET'%0A target = b'/foo'%0A headers = %7B%0A b'host': b'localhost',%0A b'sec-websocket-version': b'13',%0A %7D%0A http_version = b'1.1'%0A%0A req = Request(method=method, target=target, headers=list(headers.items()),%0A http_version=http_version)%0A%0A event = ConnectionRequested(%5B%5D, req)%0A r = repr(event)%0A%0A assert 'ConnectionRequested' in r%0A assert target.decode('ascii') in r%0A%0A%0Adef test_connection_requested_repr_with_subprotocol():%0A method = b'GET'%0A target = b'/foo'%0A headers = %7B%0A b'host': b'localhost',%0A b'sec-websocket-version': b'13',%0A b'sec-websocket-protocol': b'fnord',%0A %7D%0A http_version = b'1.1'%0A%0A req = Request(method=method, target=target, headers=list(headers.items()),%0A http_version=http_version)%0A%0A event = ConnectionRequested(%5B%5D, req)%0A r = repr(event)%0A%0A assert 'ConnectionRequested' in r%0A assert target.decode('ascii') in r%0A assert headers%5Bb'sec-websocket-protocol'%5D.decode('ascii') in r%0A%0A%0A@pytest.mark.parametrize('subprotocol,extensions', %5B%0A ('sproto', None),%0A (None, %5B'fake'%5D),%0A ('sprout', %5B'pretend'%5D),%0A%5D)%0Adef test_connection_established_repr(subprotocol, extensions):%0A event = ConnectionEstablished(subprotocol, extensions)%0A r = repr(event)%0A%0A if subprotocol:%0A assert subprotocol in r%0A if extensions:%0A for extension in extensions:%0A assert extension in r%0A%0A%0A@pytest.mark.parametrize('code,reason', %5B%0A (CloseReason.NORMAL_CLOSURE, None),%0A (CloseReason.NORMAL_CLOSURE, 'because i felt like it'),%0A (CloseReason.INVALID_FRAME_PAYLOAD_DATA, 'GOOD GOD WHAT DID YOU DO'),%0A%5D)%0Adef test_connection_closed_repr(code, reason):%0A event = ConnectionClosed(code, reason)%0A r = repr(event)%0A%0A assert repr(code) in r%0A if reason:%0A assert reason in r%0A
|
|
e6df90237fa3a42f40cf39aa0df7675de6dd9c9f
|
859. Buddy Strings
|
LeetCode/BuddyStrings.py
|
LeetCode/BuddyStrings.py
|
Python
| 0.999776
|
@@ -0,0 +1,1038 @@
+class Solution:%0A def buddyStrings(self, A: str, B: str) -%3E bool:%0A if len(A) != len(B):%0A return False%0A mismatches = %5B-1, -1%5D%0A for i in range(len(A)):%0A if A%5Bi%5D != B%5Bi%5D:%0A if mismatches%5B0%5D %3C 0:%0A # first mismatch%0A mismatches%5B0%5D = i%0A elif mismatches%5B1%5D %3C 0:%0A # 1 previous mismatch%0A mismatches%5B1%5D = i%0A else:%0A # 2 previous mismatches - can't have 3%0A return False%0A if mismatches%5B0%5D %3C 0:%0A # no mismatches found, so they're buddy strings iff there are at least 2 identical letters%0A return len(set(A)) %3C len(A)%0A elif mismatches%5B1%5D %3C 0:%0A # 1 mistmatch found, no switch possible to fix it%0A return False%0A else:%0A # exactly 2 mismatches - they're buddy strings iff it's a swap%0A return A%5Bmismatches%5B0%5D%5D == B%5Bmismatches%5B1%5D%5D and A%5Bmismatches%5B1%5D%5D == B%5Bmismatches%5B0%5D%5D%0A
|
|
60075ecdc73097c39895193a593688cb3cf103dd
|
add glucose example
|
examples/plotting/file/glucose.py
|
examples/plotting/file/glucose.py
|
Python
| 0.00231
|
@@ -0,0 +1,563 @@
+%0Afrom bokeh.sampledata import glucose%0Afrom bokeh.plotting import *%0A%0Aday = glucose.data.ix%5B'2010-10-06'%5D%0Ahighs = day%5Bday%5B'glucose'%5D %3E 180%5D%0Alows = day%5Bday%5B'glucose'%5D %3C 80%5D%0A%0A%0Aoutput_file(%22glucose.html%22, title=%22glucose.py example%22)%0A%0Ahold()%0A%0Aline(day.index.astype('int')/1000000, day%5B'glucose'%5D, color='grey', tools=%22pan,zoom,resize%22)%0Ascatter(highs.index.astype('int')/1000000, highs%5B'glucose'%5D, color='red', radius=4, legend=%22high%22)%0Ascatter(lows.index.astype('int')/1000000, lows%5B'glucose'%5D, color='blue', radius=4, legend=%22low%22)%0A%0A#figure()%0A%0A# open a browser%0Ashow()%0A%0A
|
|
6f0fdb4d7b1202c7ab07d01cf34954ee725df6fe
|
add basic-calculator
|
vol5/basic-calculator/basic-calculator.py
|
vol5/basic-calculator/basic-calculator.py
|
Python
| 0.998627
|
@@ -0,0 +1,1965 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# @Author: Zeyuan Shang%0A# @Date: 2015-11-18 17:21:42%0A# @Last Modified by: Zeyuan Shang%0A# @Last Modified time: 2015-11-18 17:21:51%0Aclass Solution:%0A operators = %5B'+', '-', '*', '/'%5D%0A %0A def getPriority(self, operator):%0A return %7B%0A '+' : 1,%0A '-' : 1,%0A '*' : 2,%0A '/' : 2,%0A %7D.get(operator, 0)%0A %0A def toRPN(self, s):%0A tokens, stack = %5B%5D, %5B%5D%0A number = ''%0A for c in s:%0A if c.isdigit():%0A number += c%0A else:%0A if number:%0A tokens.append(number)%0A number = ''%0A if c in self.operators:%0A while len(stack) and self.getPriority(stack%5B-1%5D) %3E= self.getPriority(c):%0A tokens.append(stack.pop())%0A stack.append(c)%0A elif c == '(':%0A stack.append(c)%0A elif c == ')':%0A while len(stack) and stack%5B-1%5D != '(':%0A tokens.append(stack.pop())%0A stack.pop()%0A if number:%0A tokens.append(number)%0A while len(stack):%0A tokens.append(stack.pop())%0A return tokens%0A %0A def calcValue(self, x, y, operator):%0A return %7B%0A '+': lambda x, y: x + y,%0A '-': lambda x, y: x - y,%0A '*': lambda x, y: x * y,%0A '/': lambda x, y: int(float(x) / y),%0A %7D%5Boperator%5D(x, y)%0A %0A def evalRPN(self, tokens):%0A operands = %5B%5D%0A for token in tokens:%0A if token in self.operators:%0A y, x = operands.pop(), operands.pop()%0A operands.append(self.calcValue(x, y, token))%0A else:%0A operands.append(int(token))%0A return operands%5B0%5D%0A %0A def calculate(self, s):%0A tokens = self.toRPN(s)%0A return self.evalRPN(tokens)
|
|
54c1028157447233014890419ab869a8c1ca0c3a
|
Add linux installer
|
installation/server-linux.py
|
installation/server-linux.py
|
Python
| 0.000001
|
@@ -0,0 +1,1189 @@
+#!/usr/bin/env python%0A#-*- encoding:utf-8 -*-%0A%0Aimport os%0Aimport sys%0Aimport urllib2%0Aimport subprocess%0A%0AinstallationPath = os.path.expanduser('~') + '/.config/ydlss'%0AserverLocalLocation = installationPath + '/youtube-dl-simple-server'%0AserverWebLocation = 'https://github.com/r4mos/youtube-dl-simple-server/raw/master/bin/server/linux/youtube-dl-simple-server'%0A%0Atry:%0A print 'Cheeking installation folder'%0A if not os.path.isdir(installationPath):%0A os.makedirs(installationPath)%0A%0A print 'Downloading server'%0A latest = urllib2.urlopen(serverWebLocation)%0A output = open(serverLocalLocation, 'wb')%0A output.write(latest.read())%0A output.close()%0A%0A print 'Changing permissions'%0A subprocess.check_output(%5B'chmod', 'a+x', serverLocalLocation%5D)%0A%0A print 'Adding to .profile file to autostart'%0A profile = open (os.path.expanduser('~') + '/.profile', 'a')%0A profile.write('sleep 5 && ' + serverLocalLocation + ' &%5Cn')%0A profile.close()%0A%0A print '%5CnCompleted installation but server is stopped'%0A print 'Reboot your computer or start the server manually:'%0A print serverLocalLocation + ' --verbose'%0Aexcept:%0A print 'Fail. An error occurred'%0A sys.exit(1)
|
|
46d6b7451bb8e295751d771228782cfbbcb8ecc7
|
Add dask_client_from_ipclient function
|
dask/distributed/ipython_utils.py
|
dask/distributed/ipython_utils.py
|
Python
| 0
|
@@ -0,0 +1,434 @@
+from .scheduler import Scheduler%0Afrom .worker import Worker%0Afrom .client import Client%0A%0A%0Adef dask_client_from_ipclient(client):%0A %22%22%22%0A Construct a scheduler from an ipython client.%0A %22%22%22%0A zmq_context = client._context%0A scheduler = Scheduler(context=zmq_context)%0A workers = %5BWorker(scheduler.address_to_workers) for i in range(len(client))%5D%0A dask_client = Client(scheduler.address_to_clients)%0A return dask_client%0A
|
|
93e19ab50567e045daf0e35d856033303be70192
|
Implement first test cases
|
test_esp8266uart.py
|
test_esp8266uart.py
|
Python
| 0.006485
|
@@ -0,0 +1,1442 @@
+import esp8266uart%0A%0Aesp = esp8266uart.ESP8266(1, 115200)%0A%0Aprint('Testing generic methods')%0Aprint('=======================')%0A%0Aprint('AT startup...')%0Aif esp.test():%0A print('Success!')%0Aelse:%0A print('Failed!')%0A%0A#print('Soft-Reset...')%0A#if esp.reset():%0A# print('Success!')%0A#else:%0A# print('Failed!')%0A%0Aprint('Another AT startup...')%0Aif esp.test():%0A print('Success!')%0Aelse:%0A print('Failed!')%0A%0Aprint()%0A%0Aprint('Testing WIFI methods')%0Aprint('====================')%0A%0Awifi_mode = 1%0Aprint(%22Testing get_mode/set_mode of value '%25s'(%25i)...%22 %25 (esp8266uart.WIFI_MODES%5Bwifi_mode%5D, wifi_mode))%0Aesp.set_mode(wifi_mode)%0Aif esp.get_mode() == wifi_mode:%0A print('Success!')%0Aelse:%0A print('Failed!')%0A %0Aprint('Disconnecting from WLAN...')%0Aif esp.disconnect():%0A print('Success!')%0Aelse:%0A print('Failed!')%0A%0Aprint('Disconnecting from WLAN again...')%0Aif esp.disconnect():%0A print('Success!')%0Aelse:%0A print('Failed!')%0A%0Aprint('Checking if not connected WLAN...')%0Aif esp.get_accesspoint() == None:%0A print('Success!')%0Aelse:%0A print('Failed!')%0A%0Aprint('Scanning for WLANs...')%0Awlans = esp.list_all_accesspoints()%0Afor wlan in wlans:%0A print(wlan)%0A print(%22Scanning for WLAN '%25s'...%22 %25 (wlan%5B'ssid'%5D))%0A for wlan2 in esp.list_accesspoints(wlan%5B'ssid'%5D):%0A print(wlan2)%0A %0Aprint('Setting access point mode...')%0Aif esp.set_mode(esp8266uart.WIFI_MODES%5B'Access Point + Station'%5D):%0A print('Failed!')%0Aelse:%0A print('Success!')%0A%0A
|
|
8ec7492658bab8d6b0fba5d6b49a58b6408f5fa2
|
Add framework for graphing temporal time
|
src/graph_run_algo.py
|
src/graph_run_algo.py
|
Python
| 0.000001
|
@@ -0,0 +1,2416 @@
+import dateutil.parser%0Aimport time%0Aimport matplotlib.pyplot as plt%0Afrom operator import add%0Afrom datetime import datetime%0Afrom gensim import corpora, models, similarities%0A%0A%0Aminute = 60%0Ahour = 3600%0Acdict = %22%25scorpus.dict%22%0Accorpus = %22%25scorpus.mm%22%0Acorpus_model = %22%25s.%25s%22%0A%0A%0Adef gen_graph(figure, x, y, subtitle, xlabel, ylabel, scale):%0A figure.suptitle(subtitle, fontsize=14, fontweight='bold')%0A ax = figure.add_subplot(111)%0A figure.subplots_adjust(top=0.85)%0A ax.set_xlabel(xlabel)%0A ax.set_ylabel(ylabel)%0A ax.set_yscale(scale)%0A ax.plot(x, y, linestyle='--', marker='o')%0A%0A%0Adef load_data(sizes, algorithm):%0A dictionaries = %5Bcorpora.Dictionary.load(cdict %25 size) for size in sizes%5D%0A corpi = %5Bcorpora.MmCorpus(ccorpus %25 size) for size in sizes%5D%0A corpus_models = %5Bmodels.ldamodel.LdaModel.load(corpus_model %25 (size, algorithm)) for size in sizes%5D%0A dict_sizes = %5Blen(x) for x in dictionaries%5D%0A return dict_sizes, corpus_models, corpi, dictionaries%0A%0A%0Adef read_logfile(path):%0A train_times = list()%0A build_times = list()%0A with open(path) as log_file:%0A for i, line in enumerate(log_file):%0A bt_time = line.strip().split(%22%5Ct%22)%5B-1%5D%0A hours, mins, seconds = bt_time.split(%22:%22)%0A total = (hour * int(hours)) + (minute * int(mins)) + float(seconds)%0A if (i %25 2) == 0:%0A train_times.append(total)%0A else:%0A build_times.append(total)%0A total_times = map(add, train_times, build_times)%0A return total_times, train_times, build_times%0A%0A%0Adef main():%0A corp_dict_size = plt.figure()%0A corp_time = plt.figure()%0A corp_build = plt.figure()%0A total_time = plt.figure()%0A total_times, train_times, build_times = read_logfile(%22runtimes.log%22)%0A sizes = %5B1000, 5000, 10000, 15000, 20000, 25000, 30000%5D%0A dict_sizes, corpus_models, corpi, dictionaries = load_data(sizes, %22lda%22)%0A%0A gen_graph(corp_dict_size, sizes, dict_sizes, 'Corpus size and dictionary features', %22corpus size%22, %22dictionary size%22, 'log')%0A gen_graph(corp_time, sizes, train_times, 'Corpus size and train time', %22corpus size%22, %22training time%22, 'log')%0A gen_graph(corp_build, sizes, build_times, 'Corpus size and build time', %22corpus size%22, %22build time%22, 'log')%0A gen_graph(total_time, sizes, total_times, 'Corpus size and total time', %22corpus size%22, %22total time%22, 'log')%0A%0A plt.show()%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
b80fbf3f841a950cbcf7751fee7173910fbd19fa
|
Fix ipynb to actuall render the output
|
nikola/plugins/compile/ipynb.py
|
nikola/plugins/compile/ipynb.py
|
# -*- coding: utf-8 -*-
# Copyright © 2013-2016 Damián Avila, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Implementation of compile_html based on nbconvert."""
from __future__ import unicode_literals, print_function
import io
import os
import sys
try:
from nbconvert.exporters import HTMLExporter
import nbformat
current_nbformat = nbformat.current_nbformat
from jupyter_client import kernelspec
from traitlets.config import Config
flag = True
ipy_modern = True
except ImportError:
try:
import IPython
from IPython.nbconvert.exporters import HTMLExporter
if IPython.version_info[0] >= 3: # API changed with 3.0.0
from IPython import nbformat
current_nbformat = nbformat.current_nbformat
from IPython.kernel import kernelspec
ipy_modern = True
else:
import IPython.nbformat.current as nbformat
current_nbformat = 'json'
kernelspec = None
ipy_modern = False
from IPython.config import Config
flag = True
except ImportError:
flag = None
ipy_modern = None
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, req_missing, get_logger, STDERR_HANDLER
from nikola.shortcodes import apply_shortcodes
class CompileIPynb(PageCompiler):
"""Compile IPynb into HTML."""
name = "ipynb"
friendly_name = "Jupyter/IPython Notebook"
demote_headers = True
default_kernel = 'python2' if sys.version_info[0] == 2 else 'python3'
def set_site(self, site):
"""Set Nikola site."""
self.logger = get_logger('compile_ipynb', STDERR_HANDLER)
super(CompileIPynb, self).set_site(site)
def compile_html_string(self, source, is_two_file=True):
"""Export notebooks as HTML strings."""
if flag is None:
req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
HTMLExporter.default_template = 'basic'
c = Config(self.site.config['IPYNB_CONFIG'])
exportHtml = HTMLExporter(config=c)
with io.open(source, "r", encoding="utf8") as in_file:
nb_json = nbformat.read(in_file, current_nbformat)
(body, resources) = exportHtml.from_notebook_node(nb_json)
return body
def compile_html(self, source, dest, is_two_file=True):
"""Compile source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
with io.open(dest, "w+", encoding="utf8") as out_file:
output = self.compile_html_string(source, is_two_file)
output = apply_shortcodes(output, self.site.shortcode_registry, self.site, source)
out_file.write()
def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang=None):
"""Read metadata directly from ipynb file.
As ipynb file support arbitrary metadata as json, the metadata used by Nikola
will be assume to be in the 'nikola' subfield.
"""
if flag is None:
req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
source = post.source_path
with io.open(source, "r", encoding="utf8") as in_file:
nb_json = nbformat.read(in_file, current_nbformat)
# Metadata might not exist in two-file posts or in hand-crafted
# .ipynb files.
return nb_json.get('metadata', {}).get('nikola', {})
def create_post(self, path, **kw):
"""Create a new post."""
if flag is None:
req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
content = kw.pop('content', None)
onefile = kw.pop('onefile', False)
kernel = kw.pop('ipython_kernel', None)
# is_page is not needed to create the file
kw.pop('is_page', False)
metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
if content.startswith("{"):
# imported .ipynb file, guaranteed to start with "{" because it’s JSON.
nb = nbformat.reads(content, current_nbformat)
else:
if ipy_modern:
nb = nbformat.v4.new_notebook()
nb["cells"] = [nbformat.v4.new_markdown_cell(content)]
else:
nb = nbformat.new_notebook()
nb["worksheets"] = [nbformat.new_worksheet(cells=[nbformat.new_text_cell('markdown', [content])])]
if kernelspec is not None:
if kernel is None:
kernel = self.default_kernel
self.logger.notice('No kernel specified, assuming "{0}".'.format(kernel))
IPYNB_KERNELS = {}
ksm = kernelspec.KernelSpecManager()
for k in ksm.find_kernel_specs():
IPYNB_KERNELS[k] = ksm.get_kernel_spec(k).to_dict()
IPYNB_KERNELS[k]['name'] = k
del IPYNB_KERNELS[k]['argv']
if kernel not in IPYNB_KERNELS:
self.logger.error('Unknown kernel "{0}". Maybe you mispelled it?'.format(kernel))
self.logger.info("Available kernels: {0}".format(", ".join(sorted(IPYNB_KERNELS))))
raise Exception('Unknown kernel "{0}"'.format(kernel))
nb["metadata"]["kernelspec"] = IPYNB_KERNELS[kernel]
else:
# Older IPython versions don’t need kernelspecs.
pass
if onefile:
nb["metadata"]["nikola"] = metadata
with io.open(path, "w+", encoding="utf8") as fd:
if ipy_modern:
nbformat.write(nb, fd, 4)
else:
nbformat.write(nb, fd, 'ipynb')
|
Python
| 0
|
@@ -3784,16 +3784,22 @@
e.write(
+output
)%0A%0A d
|
87fe0b7ff745e4057e4b60a1a9a75fe57581b2d5
|
add exp4 script
|
num_exper/exp4.py
|
num_exper/exp4.py
|
Python
| 0
|
@@ -0,0 +1,877 @@
+import os%0Aimport numpy as np%0A%0Anp = 4%0Afmin = 1.0%0Afmax = 9.0%0Adf = 0.5%0A%0Afor i in np.arange(fmin+df, fmax, df): %0A %0A %0A str1 = 'python3 elast_wedge.py --ndims=2 --dx=100.0 --dy=100.0 --dz=100.0 --df=0.1 --degree=1 --damping=0.5 --maxit=300 %5C%0A --tol=1e-8 --dg_pp=0 --tau_re=-100 -tau_im=-0.7 --block=True %5C%0A --plots=False --plot_resnrm=True --solver_flag=0 --nprocs=8'%0A %0A %0A w_tick3 = np.logspace(np.log10(i), np.log10(fmax), num=np, endpoint=True)%0A int1 = ' --freq=%5B1,'+str(i)+'%5D'%0A int2 = ' --freq=%5B'+str(i)+','+str(w_tick3%5B1%5D)+'%5D'%0A int3 = ' --freq=%5B'+str(w_tick3%5B1%5D)+','+str(w_tick3%5B2%5D)+'%5D'%0A int4 = ' --freq=%5B'+str(str(w_tick3%5B2%5D))+',9%5D'%0A %0A os.system(str1+int1)%0A os.system(str1+int2)%0A os.system(str1+int3)%0A os.system(str1+int4)
|
|
2fa721b24891fcd8170d87328334c05faec9cb9a
|
add hamming char dist function
|
cryptography/hamming_distance/python/hamming_char_dist.py
|
cryptography/hamming_distance/python/hamming_char_dist.py
|
Python
| 0.000038
|
@@ -0,0 +1,319 @@
+%22%22%22%0ADetermine the hamming distance, at the character level, between two equal length strings a and b.%0A%22%22%22%0Adef hamming_char_dist(a,b):%0A if len(a) != len(b):%0A raise ValueError(%22a and b are unequal lengths%22)%0A dist = 0%0A for i in range(len(a)):%0A if(a%5Bi%5D != b%5Bi%5D):%0A dist += 1%0A return dist
|
|
c679b614b638123d846940718bb3fd27ed3078ce
|
Add test for fftpack.
|
numpy/fft/tests/test_fftpack.py
|
numpy/fft/tests/test_fftpack.py
|
Python
| 0.000027
|
@@ -0,0 +1,267 @@
+import sys%0Afrom numpy.testing import *%0Aset_package_path()%0Afrom numpy.fft import *%0Arestore_path()%0A%0Aclass test_fftshift(NumpyTestCase):%0A def check_fft_n(self):%0A self.failUnlessRaises(ValueError,fft,%5B1,2,3%5D,0)%0A%0Aif __name__ == %22__main__%22:%0A NumpyTest().run()%0A
|
|
9436af51e8b823fa83358b40f94dcd07970aea6b
|
test test
|
src/test_deque.py
|
src/test_deque.py
|
Python
| 0.000037
|
@@ -0,0 +1,1418 @@
+%22%22%22Test functions for deque module.%22%22%22%0Aimport pytest%0A%0A%0Adef test_queue_is_instance_of_queue_object():%0A %22%22%22Test stack is instance of Stack().%22%22%22%0A from deque import Deque%0A d = Deque()%0A assert isinstance(d, Deque)%0A%0A%0Adef test_queue_is_instance_of_doubly_linked_list():%0A %22%22%22Test stack inherits from DoublyLinkedList.%22%22%22%0A from deque import Deque%0A from doubly_linked_list import DoublyLinkedList%0A d = Deque()%0A assert isinstance(d._doubly_linked_list, DoublyLinkedList)%0A%0A%0Adef test_enqueue_adds_a_value(deque_fixture):%0A %22%22%22Test the enqueue method adds value.%22%22%22%0A deque_fixture.append(2)%0A assert deque_fixture._doubly_linked_list.tail.data == 2%0A%0A%0A# def test_append_adds_multiple_values_and_checks_front(deque_fixture):%0A# %22%22%22Test the append method adds value.%22%22%22%0A# deque_fixture.append(2)%0A# deque_fixture.append(3)%0A# assert deque_fixture._doubly_linked_list.tail.data == 2%0A%0A%0A# def test_dequeue_removes_first_node_added(deque_fixture):%0A# %22%22%22Test that node dequeued is first node added.%22%22%22%0A# deque_fixture.append(2)%0A# remove = deque_fixture.popleft()%0A# assert remove == 2%0A%0A%0A# def test_popleft_removes_first_node_added_even_with_multiple_nodes(deque_fixture):%0A# %22%22%22Test that node popleftd is first node added even with multiple nodes.%22%22%22%0A# deque_fixture.append(2)%0A# deque_fixture.append(3)%0A# remove = deque_fixture.popleft()%0A# assert remove == 2
|
|
7a3a7a8d5a397c886086cc87fff3f058921d06d2
|
add IntegerListField for keeping major selections
|
application/fields.py
|
application/fields.py
|
Python
| 0
|
@@ -0,0 +1,734 @@
+from django.db import models%0A%0Aclass IntegerListField(models.Field):%0A %22%22%22%0A IntegerListField keeps a list of int as a comma-separated string.%0A%0A %3E%3E%3E g = IntegerListField()%0A %3E%3E%3E g.get_db_prep_value(%5B1,2,-1,20,30,40,-100%5D)%0A '1,2,-1,20,30,40,-100'%0A%0A %3E%3E%3E g.to_python('1,2,-10,3,4,-100,7')%0A %5B1,2,-10,3,4,-100,7%5D%0A %22%22%22%0A __metaclass__ = models.SubfieldBase%0A%0A def db_type(self):%0A return 'text'%0A%0A def to_python(self, value):%0A if isinstance(value, list):%0A return value%0A%0A if value==None or value=='':%0A return %5B%5D%0A else:%0A return %5B int(r) for r in value.split(',') %5D%0A%0A def get_db_prep_value(self, value):%0A return ','.join(%5Bstr(r) for r in value%5D)%0A%0A
|
|
1ee05e6a5a80fadf2c0d01c3d85668171edf2caa
|
fix conf.py to use correct version of mpmath
|
doc/source/conf.py
|
doc/source/conf.py
|
# -*- coding: utf-8 -*-
#
# mpmath documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 13 00:14:30 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys
# If your extensions are in another directory, add it here.
#sys.path.append('some/directory')
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath']
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'mpmath'
copyright = '2008, Fredrik Johansson'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
import mpmath
version = mpmath.__version__
# The full version, including alpha/beta/rc tags.
release = mpmath.__version__
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'mpmathdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
#latex_documents = []
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
default_role = 'math'
pngmath_dvipng_args = ['-gamma 1.5', '-D 110']
|
Python
| 0
|
@@ -579,17 +579,16 @@
here.%0D%0A
-#
sys.path
@@ -592,30 +592,24 @@
ath.
-append('some/directory
+insert(0, '../..
')%0D%0A
@@ -1147,17 +1147,17 @@
t = '200
-8
+9
, Fredri
|
4cb6b3a3e1b74dd83812469c472accfc22e4d699
|
Update consecutive-numbers-sum.py
|
Python/consecutive-numbers-sum.py
|
Python/consecutive-numbers-sum.py
|
# Time: O(sqrt(n))
# Space: O(1)
# Given a positive integer N,
# how many ways can we write it as a sum of
# consecutive positive integers?
#
# Example 1:
#
# Input: 5
# Output: 2
# Explanation: 5 = 5 = 2 + 3
# Example 2:
#
# Input: 9
# Output: 3
# Explanation: 9 = 9 = 4 + 5 = 2 + 3 + 4
# Example 3:
#
# Input: 15
# Output: 4
# Explanation: 15 = 15 = 8 + 7 = 4 + 5 + 6 = 1 + 2 + 3 + 4 + 5
# Note: 1 <= N <= 10 ^ 9.
class Solution(object):
def consecutiveNumbersSum(self, N):
"""
:type N: int
:rtype: int
"""
# x + x+1 + x+2 + ... + x+l-1 = N = 2^k * M
# => l*x + (l-1)*l/2 = N
# => x = (N -(l-1)*l/2)/l= 2^k * M/l - (l-1)/2 is integer
# => l could be 2 or any odd factor of M (excluding M),
# set x = 2^k * M/l - (l-1)/2 is integer, and also unique
# => the answer is the number of all odd factors of M
# if prime factorization of N is 2^k * p1^a * p2^b * ..
# => answer is the number of all odd factors = (a+1) * (b+1) * ...
result = 1
while N % 2 == 0:
N /= 2
i = 3
while i*i <= N:
count = 0
while N % i == 0:
N /= i
count += 1
result *= count+1
i += 2
if N > 1:
result *= 2
return result
|
Python
| 0.003216
|
@@ -593,16 +593,32 @@
2%5Ek * M
+, where M is odd
%0A
|
3dc8fde56be2438dae03e5d9d310fa2d19cd1ce2
|
Add multi-result testing
|
tests/test_multi.py
|
tests/test_multi.py
|
Python
| 0.000002
|
@@ -0,0 +1,1550 @@
+# Copyright 2011 OpenStack LLC.%0A# All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Afrom dtest import *%0Afrom dtest.util import *%0A%0A%0A@repeat(2)%0Adef test_multi():%0A # Set up a list to record executions%0A recorded = %5B%5D%0A%0A # Now, define an inner function%0A def inner(*args, **kwargs):%0A # Place the arguments into the recorded list%0A recorded.append((args, kwargs))%0A%0A # Now, yield the inner function once...%0A yield ('inner1', inner, (1,), dict(kw=1))%0A%0A # Yield it again%0A yield ('inner2', inner, (2,), dict(kw=2))%0A%0A # Now, check if recorded has what we expect%0A assert_equal(len(recorded), 4)%0A assert_tuple_equal(recorded%5B0%5D%5B0%5D, (1,))%0A assert_dict_equal(recorded%5B0%5D%5B1%5D, dict(kw=1))%0A assert_tuple_equal(recorded%5B1%5D%5B0%5D, (1,))%0A assert_dict_equal(recorded%5B1%5D%5B1%5D, dict(kw=1))%0A assert_tuple_equal(recorded%5B2%5D%5B0%5D, (2,))%0A assert_dict_equal(recorded%5B2%5D%5B1%5D, dict(kw=2))%0A assert_tuple_equal(recorded%5B3%5D%5B0%5D, (2,))%0A assert_dict_equal(recorded%5B3%5D%5B1%5D, dict(kw=2))%0A
|
|
362c35a1753c908fa7496c0e050b9325420e405e
|
add missing migrations
|
corehq/apps/linked_domain/migrations/0012_auto_20200929_0809.py
|
corehq/apps/linked_domain/migrations/0012_auto_20200929_0809.py
|
Python
| 0.000012
|
@@ -0,0 +1,828 @@
+# Generated by Django 2.2.16 on 2020-09-29 08:09%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('linked_domain', '0011_auto_20200728_2316'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='domainlinkhistory',%0A name='model',%0A field=models.CharField(choices=%5B('app', 'Application'), ('custom_user_data', 'Custom User Data Fields'), ('custom_product_data', 'Custom Product Data Fields'), ('custom_location_data', 'Custom Location Data Fields'), ('roles', 'User Roles'), ('toggles', 'Feature Flags and Previews'), ('fixture', 'Lookup Table'), ('case_search_data', 'Case Search Settings'), ('report', 'Report'), ('data_dictionary', 'Data Dictionary'), ('keyword', 'Keyword')%5D, max_length=128),%0A ),%0A %5D%0A
|
|
fcdbd563a2823f4950ccf2fd164637096d55987f
|
Add HavocInc
|
dosagelib/plugins/h.py
|
dosagelib/plugins/h.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from re import compile, escape
from ..scraper import _BasicScraper, _ParserScraper
from ..util import tagre
from ..helpers import bounceStarter, indirectStarter
from .common import _ComicControlScraper, _WordPressScraper, _WPNaviIn
class HagarTheHorrible(_BasicScraper):
url = 'http://www.hagarthehorrible.net/'
stripUrl = 'http://www.hagardunor.net/comicstrips_us.php?serietype=9&colortype=1&serieno=%s'
firstStripUrl = stripUrl % '1'
multipleImagesPerStrip = True
imageSearch = compile(tagre("img", "src", r'(stripus\d+/(?:Hagar_The_Horrible_?|h)\d+[^ >]+)', quote=""))
prevUrl = r'(comicstrips_us\.php\?serietype\=9\&colortype\=1\&serieno\=\d+)'
prevSearch = compile(tagre("a", "href", prevUrl, after="Previous"))
help = 'Index format: number'
def starter(self):
"""Return last gallery link."""
url = 'http://www.hagardunor.net/comics.php'
data = self.getPage(url)
pattern = compile(tagre("a", "href", self.prevUrl))
for starturl in self.fetchUrls(url, data, pattern):
pass
return starturl
# "Hiatus", navigation missing
class _HappyJar(_WordPressScraper):
url = 'http://www.happyjar.com/'
class HarkAVagrant(_BasicScraper):
url = 'http://www.harkavagrant.com/'
rurl = escape(url)
starter = bounceStarter
stripUrl = url + 'index.php?id=%s'
firstStripUrl = stripUrl % '1'
imageSearch = compile(tagre("img", "src", r'(%s[^"]+)' % rurl,
after='BORDER'))
prevSearch = compile(tagre("a", "href", r'(%sindex\.php\?id=\d+)' % rurl) +
tagre("img", "src", "buttonprevious.png"))
nextSearch = compile(tagre("a", "href", r'(%sindex\.php\?id=\d+)' % rurl) +
tagre("img", "src", "buttonnext.png"))
help = 'Index format: number'
def namer(self, image_url, page_url):
filename = image_url.rsplit('/', 1)[1]
num = page_url.rsplit('=', 1)[1]
return '%s-%s' % (num, filename)
class Hipsters(_WordPressScraper):
url = 'http://www.hipsters-comic.com/'
firstStripUrl = 'http://www.hipsters-comic.com/comic/hip01/'
class HijinksEnsue(_WPNaviIn):
url = 'http://hijinksensue.com/'
latestSearch = '//a[text()="Latest HijiNKS ENSUE"]'
firstStripUrl = 'http://hijinksensue.com/comic/who-is-your-daddy-and-what-does-he-do/'
starter = indirectStarter
class HijinksEnsueClassic(_WPNaviIn):
url = 'http://hijinksensue.com/comic/open-your-eyes/'
firstStripUrl = 'http://hijinksensue.com/comic/a-soul-as-black-as-eyeliner/'
endOfLife = True
class HijinksEnsueConvention(_WPNaviIn):
url = 'http://hijinksensue.com/comic/emerald-city-comicon-2015-fancy-sketches-part-4/'
firstStripUrl = 'http://hijinksensue.com/comic/whatever-dad-im-outta-here/'
endOfLife = True
class HijinksEnsuePhoto(_WPNaviIn):
url = 'http://hijinksensue.com/comic/emerald-city-comicon-2015-fancy-photo-comic-part-2/'
firstStripUrl = 'http://hijinksensue.com/comic/san-diego-comic-con-fancy-picto-comic-pt-1/'
endOfLife = True
class Housepets(_WordPressScraper):
url = 'http://www.housepetscomic.com/'
stripUrl = url + 'comic/%s/'
firstStripUrl = '2008/06/02/when-boredom-strikes'
class HowToBeAWerewolf(_ComicControlScraper):
url = 'http://howtobeawerewolf.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % 'coming-february-3rd'
def namer(self, imageUrl, pageUrl):
filename = imageUrl.rsplit('/', 1)[-1]
if filename[0].isdigit():
filename = filename.split('-', 1)[1]
return filename
|
Python
| 0.999232
|
@@ -2257,16 +2257,180 @@
name)%0A%0A%0A
+class HavocInc(_WordPressScraper):%0A url = 'http://www.radiocomix.com/havoc-inc/'%0A stripUrl = url + 'comic/%25s/'%0A firstStripUrl = stripUrl %25 'havoc-cover'%0A%0A%0A
class Hi
|
b43aef2a3ebd54a72791ae635dca4d7544c0ad23
|
Add Flask test server
|
test_server.py
|
test_server.py
|
Python
| 0.000001
|
@@ -0,0 +1,166 @@
+from flask import Flask%0Aapp = Flask(__name__, static_folder=%22.%22, static_url_path=%22%22)%0A%0Aif __name__ == %22__main__%22:%0A app.run(debug=True, host=%22127.0.0.1%22, port=8765)%0A
|
|
3352cb3f38db4f68decf6de60528ad6ff07ce613
|
Create mssql_import.py
|
database/mssql_import.py
|
database/mssql_import.py
|
Python
| 0.000002
|
@@ -0,0 +1,1134 @@
+# -*-coding:cp949-*-%0A# vim: set et:ts=4:sw=4%0Aimport pyodbc%0A%0Adef import_table(file, table):%0A connection = pyodbc.connect(%0A r'DRIVER=%7BSQL Server%7D;'%0A r'SERVER=127.0.0.1%5Cinstance;'%0A r'DATABASE=database;'%0A r'UID=id;'%0A r'PWD=passwd')%0A%0A with open (file, 'r') as f:%0A lines = f.readlines()%0A cursor = connection.cursor()%0A for line in lines%5B1:%5D: # 1st line skip%0A line = line.replace('%5Cn', '')%0A query = 'insert into ' + table + ' values (%7B0%7D)'.decode('cp949')%0A query = query.format(line.decode('cp949'))%0A #print(query)%0A cursor.execute(query)%0A cursor.commit()%0A %0A print %22%25d %EA%B1%B4 %EC%99%84%EB%A3%8C%22 %25 (len(lines)-1,)%0A%0Aimport sys%0Aimport os.path%0A%0Aif len(sys.argv) %3C 2 :%0A print u'%ED%8C%8C%EC%9D%BC%EB%AA%85%EC%9D%84 %EC%9E%85%EB%A0%A5%ED%95%98%EC%84%B8%EC%9A%94'.encode('cp949')%0A print u'%ED%8C%8C%EC%9D%BC%EB%AA%85%EC%97%90 %EC%A0%81%ED%9E%8C %ED%85%8C%EC%9D%B4%EB%B8%94%EB%A1%9C %EB%8D%B0%EC%9D%B4%ED%84%B0%EB%A5%BC %EC%9E%85%EB%A0%A5%ED%95%A9%EB%8B%88%EB%8B%A4'.encode('cp949')%0A print u'%EC%82%AC%EC%9A%A9%EB%B2%95 :%5Cn%5Ctimport.exe db_name..tablename.csv'.encode('cp949')%0A sys.exit(0)%0A%0Afile = sys.argv%5B1%5D%0Atbl = os.path.splitext(os.path.basename(file))%5B0%5D%0A# print file # %22.%5Cdb_name..tablename.csv%22%0A# print tbl # %22db_name..tablename%22%0Aimport_table(file, tbl.decode('cp949'))%0A
|
|
d67b685340cf2db7cd31b50a4484c29625b8fea5
|
Remove pixel test fail expectation
|
content/test/gpu/gpu_tests/pixel_expectations.py
|
content/test/gpu/gpu_tests/pixel_expectations.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.Canvas2DRedBox', bug=384551)
self.Fail('Pixel.CSS3DBlueBox', bug=384551)
self.Fail('Pixel.WebGLGreenTriangle', bug=384551)
pass
|
Python
| 0.000003
|
@@ -859,161 +859,8 @@
3)%0A%0A
- self.Fail('Pixel.Canvas2DRedBox', bug=384551)%0A self.Fail('Pixel.CSS3DBlueBox', bug=384551)%0A self.Fail('Pixel.WebGLGreenTriangle', bug=384551)%0A%0A
|
b0116fb3b52e9c9f0cc156bc49f1400261ed879c
|
ADD mechanism table
|
packages/syft/src/syft/core/node/common/node_table/mechanism.py
|
packages/syft/src/syft/core/node/common/node_table/mechanism.py
|
Python
| 0.000001
|
@@ -0,0 +1,726 @@
+# stdlib%0Afrom typing import Any%0A%0A# third party%0Afrom sqlalchemy import Column%0Afrom sqlalchemy import LargeBinary%0Afrom sqlalchemy import String%0Afrom sqlalchemy import Integer%0A%0A# syft absolute%0Afrom syft import deserialize%0Afrom syft import serialize%0A%0A# relative%0Afrom . import Base%0A%0A%0Aclass Mechanism(Base):%0A __tablename__ = %22mechanism%22%0A%0A id = Column(Integer(), primary_key=True, autoincrement=True)%0A mechanism_bin = Column(LargeBinary(3072), default=None)%0A%0A @property%0A def obj(self) -%3E Any:%0A return deserialize(self.mechanism_bin, from_bytes=True) # TODO: techdebt fix%0A%0A @obj.setter%0A def obj(self, value: Any) -%3E None:%0A self.mechanism_bin = serialize(value, to_bytes=True) # TODO: techdebt fix%0A
|
|
05e90660ab0072a50dd17a2afa1e08c93cf694e8
|
Create reverse_k.py
|
python/linked_list/reverse_k.py
|
python/linked_list/reverse_k.py
|
Python
| 0.011909
|
@@ -0,0 +1,846 @@
+public ListNode reverseKGroup(ListNode head, int k) %7B%0A ListNode curr = head;%0A int count = 0;%0A while (curr != null && count != k) %7B // find the k+1 node%0A curr = curr.next;%0A count++;%0A %7D%0A if (count == k) %7B // if k+1 node is found%0A curr = reverseKGroup(curr, k); // reverse list with k+1 node as head%0A // head - head-pointer to direct part, %0A // curr - head-pointer to reversed part;%0A while (count-- %3E 0) %7B // reverse current k-group: %0A ListNode tmp = head.next; // tmp - next head in direct part%0A head.next = curr; // preappending %22direct%22 head to the reversed list %0A curr = head; // move head of reversed part to a new node%0A head = tmp; // move %22direct%22 head to the next node in direct part%0A %7D%0A head = curr;%0A %7D%0A return head;%0A%7D%0A
|
|
5f67d1fe99817dc19e9b670532e3667f8fa9479e
|
rewrite IMAGE_PATH, too
|
python/qidoc/doxygen_project.py
|
python/qidoc/doxygen_project.py
|
import os
import qisys.command
import qidoc.doxygen
import qidoc.project
class DoxygenProject(qidoc.project.DocProject):
""" A doc projet using doxygen """
def __init__(self, doc_worktree, project, name,
depends=None, dest=None):
self.doc_type = "doxygen"
super(DoxygenProject, self).__init__(doc_worktree, project, name,
depends=depends, dest=dest)
@property
def in_doxyfile(self):
return os.path.join(self.path, "Doxyfile")
@property
def out_doxyfile(self):
return os.path.join(self.build_dir, "Doxyfile")
@property
def tagfile(self):
return os.path.join(self.build_dir, self.name + ".tag")
def configure(self, **kwargs):
""" Create a correct Doxyfile in self.build_dir.
* Force OUTPUT_DIRECTORY
* Rewrite INPUT and EXAMPLE_PATH
* Add @INCLUDE_PATH and @INCLUDE statements if we
have a template
"""
version = kwargs.get("version")
rel_paths = kwargs.get("rel_paths", False)
in_conf = qidoc.doxygen.read_doxyfile(self.in_doxyfile)
out_conf = in_conf.copy()
out_conf["OUTPUT_DIRECTORY"] = self.build_dir
out_conf["GENERATE_XML"] = "YES" # required by qiapidoc and doxylink
out_conf["GENERATE_HTML"] = "YES"
out_conf["GENERATE_LATEX"] = "NO"
out_conf["PROJECT_NAME"] = in_conf.get("PROJECT_NAME", self.name)
out_conf["WARNINGS"] = "YES"
out_conf["QUIET"] = "YES"
out_conf["GENERATE_TAGFILE"] = self.tagfile
doxydeps = list()
# no need to recurse the dependencies here, doxygen does it for us
for dep_name in self.depends:
doc_project = self.doc_worktree.get_doc_project(dep_name, raises=False)
if doc_project and doc_project.doc_type == "doxygen":
doxydeps.append(doc_project)
if doxydeps:
out_conf["TAGFILES"] = ""
for doxydep in doxydeps:
if rel_paths:
dep_path = os.path.relpath(doxydep.dest, self.dest)
else:
dep_path = doxydep.html_dir
out_conf["TAGFILES"] += doxydep.tagfile + "=" + dep_path + " "
if version:
out_conf["PROJECT_NUMBER"] = version
for path_key in ["INPUT", "EXAMPLE_PATH"]:
in_value = in_conf.get(path_key)
if not in_value:
continue
out_value = self.make_rel_paths(in_value)
out_conf[path_key] = out_value
qidoc.doxygen.write_doxyfile(out_conf, self.out_doxyfile)
def build(self, **kwargs):
""" Run doxygen from the build directory """
cmd = ["doxygen", self.out_doxyfile]
qisys.command.call(cmd, cwd=self.build_dir)
def install(self, destdir):
qisys.sh.install(self.html_dir, destdir)
def make_rel_paths(self, value):
""" Transform a relative path to the source into an
absolute path (usable from a build directory)
"""
res = list()
for path in value.split():
full_path = os.path.join(self.path, path)
res.append(full_path)
return " ".join(res)
|
Python
| 0.000026
|
@@ -876,20 +876,17 @@
te INPUT
- and
+,
EXAMPLE
@@ -890,16 +890,31 @@
PLE_PATH
+ and IMAGE_PATH
%0A
@@ -2405,16 +2405,30 @@
LE_PATH%22
+, %22IMAGE_PATH%22
%5D:%0A
|
85c452782722c3f58db305b5af5bd3286b0450b0
|
Fix some docstrings
|
axelrod/tournament.py
|
axelrod/tournament.py
|
"""
Recreate Axelrod's tournament
"""
import inspect
import itertools
class Axelrod:
"""
A class for an iterated prisoner's dilemma.
Take a list of players (see the Player class):
>>> P1 = Defector()
>>> P2 = Cooperator()
>>> axelrod = Axelrod(P1, P2)
>>> axelrod.round_robin(turns=10)
>>> for player in sorted(axelrod.players, key=lambda x: x.score):
... print player, player.score
Defector 0
Cooperator 50
"""
def __init__(self, *args):
"""
Initiate a tournament of players
"""
self.players = list(args)
self.deterministic_cache = {}
def round_robin(self, turns=200):
"""
Plays a round robin where each match lasts turns.
We can cache scores for paris of deterministic strategies, since the outcome
will always be the same. There are many possible keys to cache by, but perhaps
the most versatile is a tuple with the classes of both players.
"""
for p1, p2 in itertools.combinations(self.players, 2):
cl1 = p1.__class__
cl2 = p2.__class__
key = (cl1, cl2)
if p1.stochastic or p2.stochastic or key not in self.deterministic_cache:
turn = 0
p1.reset()
p2.reset()
while turn < turns:
turn += 1
p1.play(p2)
scores = self.calculate_scores(p1, p2)
if not (p1.stochastic or p2.stochastic):
self.deterministic_cache[key] = scores
else:
scores = self.deterministic_cache[key]
p1.score += scores[0]
p2.score += scores[1]
def tournament(self, turns=200, repetitions=10):
"""
Runs repetitions of the round robin (this is mainly to handle stochastic strategies).
Returns a dictionary containing the scores for every repetition.
"""
dic = {player:[] for player in self.players}
for repetition in range(repetitions):
self.round_robin(turns=turns)
for player in self.players:
dic[player].append(player.score) # Record score
player.score = 0 # Reset score
return dic
def calculate_scores(self, p1, p2):
"""
Calculates the score for two players based their history and on following:
- C vs C both get 2
- D vs D both get 4
- C vs D => C gets 5 and D gets 0
"""
scores = {
('C', 'C'): (2, 2),
('D', 'D'): (4, 4),
('C', 'D'): (5, 0),
('D', 'C'): (0, 5),
}
s1, s2 = 0, 0
for pair in zip(p1.history, p2.history):
score = scores[pair]
s1 += score[0]
s2 += score[1]
return s1, s2
class Player(object):
"""An abstract class for a player"""
name = "Player"
def __init__(self):
"""
Initiates an empty history and 0 score for every player
"""
self.history = []
self.score = 0
self.stochastic = "random" in inspect.getsource(self.__class__)
def play(self, opponent):
"""
This pits two players against each other: note that this will raise
an error if no strategy method is defined (which are defined through
class inheritance).
"""
s1, s2 = self.strategy(opponent), opponent.strategy(self)
self.history.append(s1)
opponent.history.append(s2)
def reset(self):
"""
Resets history.
When creating strategies that create new attributes then this method should be re-written (in the inherited class) and should not only reset history but also rest all other attributes.
"""
self.history = []
def strategy(self, opponent):
"""This is a placeholder"""
return None
def __repr__(self):
"""The string method for the strategy."""
return self.name
|
Python
| 0.998602
|
@@ -1,12 +1,11 @@
%22%22%22
-%0A
Recreate
@@ -25,19 +25,17 @@
urnament
-%0A%0A%0A
+.
%22%22%22%0A%0Aimp
@@ -87,21 +87,16 @@
%0A %22%22%22
-%0A
A class
@@ -526,33 +526,24 @@
%0A %22%22%22
-%0A
Initiate a t
@@ -562,25 +562,17 @@
players
-%0A
+.
%22%22%22%0A
@@ -689,25 +689,16 @@
%22%22%22
-%0A
Plays a
@@ -1790,25 +1790,16 @@
%22%22%22
-%0A
Runs rep
@@ -2327,25 +2327,16 @@
%22%22%22
-%0A
Calculat
@@ -2890,17 +2890,8 @@
%22An
-abstract
clas
@@ -2904,16 +2904,111 @@
a player
+ in the tournament.%0A%0A This is an abstract base class, not intended to be used directly.%0A
%22%22%22%0A%0A
@@ -3060,25 +3060,16 @@
%22%22%22
-%0A
Initiate
@@ -3107,29 +3107,17 @@
for
-every
+a
player
-%0A
+.
%22%22%22%0A
@@ -3279,25 +3279,16 @@
%22%22%22
-%0A
This pit
@@ -3323,139 +3323,8 @@
ther
-: note that this will raise%0A an error if no strategy method is defined (which are defined through%0A class inheritance)
.%0A
@@ -3500,25 +3500,16 @@
%22%22%22
-%0A
Resets h
@@ -3516,16 +3516,17 @@
istory.%0A
+%0A
@@ -3603,16 +3603,24 @@
hould be
+%0A
re-writ
@@ -3690,16 +3690,24 @@
but also
+%0A
rest al
@@ -3831,16 +3831,26 @@
ceholder
+ strategy.
%22%22%22%0A
|
f18d70ce9c9e86ca184da939f9ffb193b32d981d
|
add 135
|
vol3/135.py
|
vol3/135.py
|
Python
| 0.999991
|
@@ -0,0 +1,425 @@
+if __name__ == %22__main__%22:%0A L = 10 ** 6%0A sol = %5B0%5D * (L + 1)%0A for u in xrange(1, L + 1):%0A for v in xrange(1, L + 1):%0A if u * v %3E L:%0A break%0A if 3 * v %3C= u:%0A continue%0A if (u + v) %25 4 == 0 and (3 * v - u) %25 4 == 0:%0A sol%5Bu * v%5D += 1%0A ans = 0%0A for i in range(1, L + 1):%0A if sol%5Bi%5D == 10:%0A ans += 1%0A print ans%0A
|
|
64b2435b77044c1a258433f8794d9bd0a431b61a
|
Add accept header
|
travis-ping.py
|
travis-ping.py
|
#Based on https://github.com/FiloSottile/travis-cron
import urllib2
import json
import sys
def api_call(url, token=None, data=None):
print url
if data:
data = json.dumps(data)
req = urllib2.Request(url, data)
if data:
req.add_header('Content-Type', 'application/json; charset=UTF-8')
if token:
req.add_header('Authorization', 'token ' + token)
p = urllib2.urlopen(req)
return json.loads(p.read())
def travis_ping(travis_token, repository):
last_build_id = api_call('https://api.travis-ci.org/repos/{}/builds'.format(repository))[0]['id']
print "Got build ID", last_build_id
return api_call('https://api.travis-ci.org/builds/{}/restart'.format(last_build_id), travis_token, { 'build_id': last_build_id })['result']
def main():
#print sys.argv[1][0]
#print sys.argv[2][0]
travis_ping(sys.argv[1], sys.argv[2])
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -382,16 +382,86 @@
token)%0A
+ req.add_header(%22Accept%22 , 'application/vnd.travis-ci.2+json')%0A
p =
|
b7d219a5afbf349385dbe9b2712f34f5e756e3e6
|
add flask app
|
zalando/app.py
|
zalando/app.py
|
Python
| 0.000003
|
@@ -0,0 +1,196 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Afrom flask import Flask%0Aapp = Flask(__name__)%0A%0A@app.route('/')%0Adef hello_world():%0A return 'Hello World!'%0A%0Aif __name__ == '__main__':%0A app.run()
|
|
90b92a1977c32dd660533567c0d5034b93d5c9c7
|
Add smarts to cope with slug clashes with other places with the same names.
|
pombola/core/management/commands/core_create_places_from_mapit_entries.py
|
pombola/core/management/commands/core_create_places_from_mapit_entries.py
|
# This script will copy areas from mapit to core.places, including creating the
# place kind if required.
# import re
# import sys
from django.core.management.base import LabelCommand
from mapit.models import Type
from pombola.core.models import Place, PlaceKind
from django.template.defaultfilters import slugify
class Command(LabelCommand):
help = 'Copy mapit.areas to core.places'
args = '<mapit.type.code>'
def handle_label(self, mapit_type_code, **options):
# load the mapit type
mapit_type = Type.objects.get(code=mapit_type_code)
# if needed create the core placetype
placekind, created = PlaceKind.objects.get_or_create(
name=mapit_type.description,
defaults={
'slug': slugify(mapit_type.description)
}
)
# create all the places as needed for all mapit areas of that type
for area in mapit_type.areas.all():
print area.name
place, created = Place.objects.get_or_create(
name=area.name,
kind=placekind,
defaults={
'slug': slugify(area.name),
}
)
place.mapit_area = area
place.save()
|
Python
| 0
|
@@ -938,16 +938,17 @@
.all():%0A
+%0A
@@ -955,23 +955,509 @@
-print area.name
+# There may be a slug clash as several areas have the same name but%0A # are different placekinds. Create the slug and then check to see%0A # if the slug is already in use for a placekind other than ours. If%0A # it is append the placekind to the slug.%0A slug = slugify(area.name)%0A%0A if Place.objects.filter(slug=slug).exclude(kind=placekind).exists():%0A slug = slug + '-' + placekind.slug%0A%0A print %22'%25s' (%25s)%22 %25 (area.name, slug)
%0A
@@ -1634,30 +1634,16 @@
g': slug
-ify(area.name)
,%0A
@@ -1672,21 +1672,9 @@
)%0A
-
%0A
+
@@ -1730,20 +1730,8 @@
.save()%0A
-
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.