commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
e238d7cfe1a4eace62ba6a9d199813f317c34c6a
|
Create AnalyticalDistributions.py
|
effective_quadratures/AnalyticalDistributions.py
|
effective_quadratures/AnalyticalDistributions.py
|
Python
| 0
|
@@ -0,0 +1,724 @@
+#!/usr/bin/env python%0Aimport numpy as np%0Afrom scipy.special import erf%0A%22%22%22%0A%0A Analytical definitions for some sample PDFs. Functions in this file are%0A called by PolyParams when constructing %22custom%22 orthogonal polynomials,%0A which require Stieltejes procedure for computing the recurrence coefficients.%0A%0A Pranay Seshadri%0A ps583@cam.ac.uk%0A%0A Copyright (c) 2016 by Pranay Seshadri%0A%22%22%22%0Adef Gaussian(N, mu, sigma):%0A x = np.linspace(-15*sigma, 15*sigma, N) # x scaled by the standard deviation!%0A w = 1.0/( np.sqrt(2 * sigma**2 * np.pi) * np.exp(-(x - mu)**2 * 1.0/(2 * sigma**2) )%0A w = w/np.sum(w) # normalize!%0A return x, w%0A %0Adef truncatedGaussian(N, mu, sigma, a, b):%0A%0A%0Adef GaussianPDF():%0A%0Adef GaussianCDF():%0A%0A%0A
|
|
0263eb5a96f610b5a2e77d11ad26e892d78c9eda
|
add 191
|
vol4/191.py
|
vol4/191.py
|
Python
| 0.999998
|
@@ -0,0 +1,216 @@
+if __name__ == %22__main__%22:%0A lst = %5B1, 3, 0, 2, 1, 0, 0, 1%5D%0A while lst%5B0%5D %3C 30:%0A n, t, a, b, c, d, e, f = lst%0A lst = %5Bn + 1, 2 * t + b - a, c, 2 * b - a + d, t - (a + c), e, f, t%5D%0A print lst%5B1%5D%0A
|
|
84d3e73fd7ff79e36268fce4b470af8fa3617f0c
|
Add couple of routes for the admin blueprint
|
app/admin/routes.py
|
app/admin/routes.py
|
Python
| 0
|
@@ -0,0 +1,944 @@
+from flask import render_template, redirect, url_for, flash, request%0Afrom flask.ext.login import login_required, current_user%0A%0Afrom . import admin%0Afrom .forms import ProfileForm%0A%0Afrom .. import db%0Afrom ..models import User%0A%0A@admin.route('/')%0A@login_required%0Adef index():%0A return render_template('admin/user.html', user=current_user)%0A%0A@admin.route('/edit_user', methods=%5B'GET', 'POST'%5D)%0A@login_required%0Adef edit_user():%0A form = ProfileForm()%0A%0A if form.validate_on_submit():%0A current_user.name = form.name.data%0A current_user.location = form.location.data%0A current_user.bio = form.bio.data%0A%0A db.session.add(current_user._get_current_object())%0A db.session.commit()%0A%0A flash(%22S%C3%AD%C3%B0an hefur veri%C3%B0 uppf%C3%A6r%C3%B0%22)%0A%0A return redirect(url_for('admin.index'))%0A%0A form.name.data = current_user.name%0A form.location.data = current_user.location%0A form.bio.data = current_user.bio%0A%0A return render_template('admin/edit_user.html', form=form)%0A%0A
|
|
12bb21ca19a36465241c85b4f69838294c817630
|
Update to version 3.4.2
|
lib/ansiblelint/version.py
|
lib/ansiblelint/version.py
|
__version__ = '3.4.1'
|
Python
| 0
|
@@ -16,7 +16,7 @@
3.4.
-1
+2
'%0A
|
4e4d4365a0ef1a20d181f1015152acb116226e3d
|
Add device class for low battery (#10829)
|
homeassistant/components/binary_sensor/__init__.py
|
homeassistant/components/binary_sensor/__init__.py
|
"""
Component to interface with binary sensors.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/binary_sensor/
"""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.entity import Entity
from homeassistant.const import (STATE_ON, STATE_OFF)
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
DOMAIN = 'binary_sensor'
SCAN_INTERVAL = timedelta(seconds=30)
ENTITY_ID_FORMAT = DOMAIN + '.{}'
DEVICE_CLASSES = [
'cold', # On means cold (or too cold)
'connectivity', # On means connection present, Off = no connection
'gas', # CO, CO2, etc.
'heat', # On means hot (or too hot)
'light', # Lightness threshold
'moisture', # Specifically a wetness sensor
'motion', # Motion sensor
'moving', # On means moving, Off means stopped
'occupancy', # On means occupied, Off means not occupied
'opening', # Door, window, etc.
'plug', # On means plugged in, Off means unplugged
'power', # Power, over-current, etc
'presence', # On means home, Off means away
'safety', # Generic on=unsafe, off=safe
'smoke', # Smoke detector
'sound', # On means sound detected, Off means no sound
'vibration', # On means vibration detected, Off means no vibration
]
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.In(DEVICE_CLASSES))
@asyncio.coroutine
def async_setup(hass, config):
"""Track states and offer events for binary sensors."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
yield from component.async_setup(config)
return True
# pylint: disable=no-self-use
class BinarySensorDevice(Entity):
"""Represent a binary sensor."""
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return None
@property
def state(self):
"""Return the state of the binary sensor."""
return STATE_ON if self.is_on else STATE_OFF
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return None
|
Python
| 0
|
@@ -625,16 +625,70 @@
SES = %5B%0A
+ 'battery', # On means low, Off means normal%0A
'col
|
4d2b0fdb4ee3289c7a2ba435f77f70c978cdd166
|
add script to fetch recursively [bug] infinite loop
|
fetch_recursively.py
|
fetch_recursively.py
|
Python
| 0
|
@@ -0,0 +1,1865 @@
+#!/usr/bin/env python%0Afrom sys import argv%0Aimport random %0Aimport urllib%0A%0A%0A%0A%0Adef fetch(src_url):%0A%09## first fetch the source HTML page%0A%09num = 100000 * random.random()%0A%09filename = str(num)%0A%09urllib.urlretrieve(src_url, filename=filename)%0A%09txt = open(filename, %22r%22)%0A%09for line in txt:%0A%09 index = line.find(%22.pdf%22)%0A%09%09index_ppt = line.find(%22.ppt%22)%0A%09 if index != -1 or index_ppt != -1:%0A%09 lists = line.split(%22%5C%22%22)%0A%09 rets = %5Blist for list in lists if list.find(%22.pdf%22) != -1 or list.find(%22.ppt%22) != -1%5D%0A%09 for ret in rets:%0A%09%09%09%09# for relative path, combine it with previous path%0A%09 if line.find(%22http:%22) == -1:%0A%09 fetch_url = url + str(ret)%0A%09 name = str(ret).split(%22/%22)%5B-1%5D%0A%09%09%09%09%09if fetch_url.find(%22%3C%22) == -1 or fetch_url.find(%22%3E%22) == -1:%0A%09%09%09%09%09%09print fetch_url + %22 ---%3E %22 + name%0A%09%09%09%09%09%09urllib.urlretrieve(fetch_url, filename=name)%0A%09%09%09%09# for absolute path, just use it%0A%09 else:%0A%09 name = str(ret).split(%22/%22)%5B-1%5D%0A%09%09%09%09%09# fix bugs: ret may not contain %22http%22, just with %22ppt/pdf%22(line has but ret not)%09%0A%09%09%09%09%09if ret.find(%22http:%22) != -1:%0A%09%09%09%09%09%09print ret + %22 ---%3E %22 + name%0A%09 %09urllib.urlretrieve(ret, filename=name)%0A%09%09else:%0A%09%09%09if line.find(%22http:%22) != -1: # there is an url%0A%09%09%09%09lists = line.split(%22%5C%22%22)%0A%09%09%09%09rets = %5Blist for list in lists if list.find(%22.htm%22) != -1 or list.find(%22.html%22) != -1%5D%0A%09%09%09%09for ret in rets:%0A%09%09%09%09%09if ret.find(%22http:%22) != -1:%0A%09%09%09%09%09%09print %22Now there is a new url, fetch it:%25s%22%25ret%0A%09%09%09%09%09%09fetch(ret)%0A%0A%0A### This needs to make sure again and again!%0Aurl=%22http://cs224d.stanford.edu/%22%0Aprint %22NOTE: add url webpage and make sure the fetch url is correct, especially the base url!%22%0A%0Asrc_url = argv%5B1%5D%0Aprint %22source webpage: %22 + src_url%0Afetch(src_url)%0A
|
|
2ea9f36c8eaf796011d93c361e663b01ba259842
|
Call new doc sync in repair command
|
src/sentry/runner/commands/repair.py
|
src/sentry/runner/commands/repair.py
|
"""
sentry.runner.commands.repair
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import click
from sentry.runner.decorators import configuration
@click.command()
@configuration
def repair():
"Attempt to repair any invalid data."
click.echo('Forcing documentation sync')
from sentry.tasks.sync_docs import sync_docs
sync_docs()
from sentry.models import Activity, Project, ProjectKey
click.echo('Creating missing project keys')
queryset = Project.objects.filter(key_set__isnull=True)
for project in queryset:
try:
ProjectKey.objects.get_or_create(
project=project,
)
except ProjectKey.MultipleObjectsReturned:
pass
from django.db import connection
click.echo("Correcting Group.num_comments counter")
cursor = connection.cursor()
cursor.execute("""
UPDATE sentry_groupedmessage SET num_comments = (
SELECT COUNT(*) from sentry_activity
WHERE type = %s and group_id = sentry_groupedmessage.id
)
""", [Activity.NOTE])
|
Python
| 0
|
@@ -234,16 +234,26 @@
nction%0A%0A
+import os%0A
import c
@@ -464,19 +464,25 @@
try.
-tasks.sync_
+utils.integration
docs
@@ -502,23 +502,150 @@
docs
-%0A sync_docs(
+, DOC_FOLDER%0A if os.access(DOC_FOLDER, os.W_OK):%0A sync_docs()%0A else:%0A click.echo(' - skipping (path cannot be written to)'
)%0A%0A
|
eec339635bee64d3e50a29167a639a93bc40a3a3
|
Create set selections for debconf
|
genes/debconf/set.py
|
genes/debconf/set.py
|
Python
| 0
|
@@ -0,0 +1,597 @@
+import os%0Afrom subprocess import call%0Afrom functools import partial%0A%0A#TODO: stop using sudo or ensure it exists%0A#TODOE: specify user to run as%0A#TODO: utilize functools partial to handle some of the above functionality%0Aclass Config:%0A SET_SELECTIONS = %5B'sudo', '-E', 'debconf-set-selections'%5D%0A ENV = os.environ.copy()%0A ENV%5B'DEBIAN_FRONTEND'%5D = %22noninteractive%22%0A ENV_CALL = partial(call, env=ENV)%0A %0Adef set_selections(*selections):%0A if selections:%0A Config.ENV_CALL(%5B'echo'%5D + list(selections) + %5B'%7C'%5D + Config.SET_SELECTIONS)%0A else:%0A #FIXME: add error%0A pass%0A
|
|
7a4857682567b5a23f940e05189e02d797599d51
|
Add tests/test_00_info.py that simply reports the icat version number and package directory to the terminal.
|
tests/test_00_info.py
|
tests/test_00_info.py
|
Python
| 0.000006
|
@@ -0,0 +1,1091 @@
+%22%22%22Report version info about python-icat being tested.%0A%22%22%22%0A%0Afrom __future__ import print_function%0Aimport pytest%0Aimport os.path%0Aimport icat%0A%0Aclass Reporter(object):%0A %22%22%22Cumulate messages and report them later using a terminalreporter.%0A %22%22%22%0A%0A def __init__(self, terminalreporter):%0A super(Reporter, self).__init__()%0A self.terminal = terminalreporter%0A self.msgs = %5B%5D%0A%0A def addmsg(self, m):%0A self.msgs.append(m)%0A%0A def flush(self):%0A for m in self.msgs:%0A self.terminal.write_line(%22 %22 + m)%0A self.msgs = %5B%5D%0A%0A@pytest.fixture()%0Adef terminal(pytestconfig):%0A return pytestconfig.pluginmanager.getplugin('terminalreporter')%0A%0A@pytest.fixture()%0Adef diag(request, terminal):%0A rep = Reporter(terminal)%0A request.addfinalizer(rep.flush)%0A return rep%0A%0Adef test_info(diag):%0A assert icat.__version__%0A assert icat.__revision__%0A diag.addmsg(%22Version: python-icat %25s (%25s)%22 %0A %25 (icat.__version__, icat.__revision__))%0A diag.addmsg(%22Path: %25s%22 %0A %25 os.path.dirname(os.path.abspath(icat.__file__)))%0A
|
|
9afbdbb0fa7f77269b08680e9290fa2628d88caf
|
add problem 069
|
problem_069.py
|
problem_069.py
|
Python
| 0.000088
|
@@ -0,0 +1,1141 @@
+#!/usr/bin/env python%0A#-*-coding:utf-8-*-%0A%0A'''%0AEuler's Totient function, %CF%86(n) %5Bsometimes called the phi function%5D,%0Ais used to determine the number of positive numbers less than or%0Aequal to n which are relatively prime to n.%0AFor example, as 1, 2, 4, 5, 7, and 8, are all less than nine and%0Arelatively prime to nine, %CF%86(9)=6.%0AThe number 1 is considered to be relatively prime to every positive number,%0Aso %CF%86(1)=1.%0A%0AInterestingly, %CF%86(87109)=79180, and it can be seen that 87109 is a permutation of 79180.%0A%0AFind the value of n, 1 %3C n %3C 107, for which %CF%86(n) is a permutation of n and%0Athe ratio n/%CF%86(n) produces a minimum.%0A'''%0A%0Aimport math%0Aimport timeit%0A%0A%0Adef is_prime(n):%0A for i in range(2, int(math.sqrt(n))+1):%0A if n %25 i == 0:%0A return False%0A return True%0A%0A%0Adef next_prime(n):%0A while True:%0A n += 1%0A if is_prime(n):%0A return n%0A%0A%0Adef calc(n):%0A prime = 1%0A phi = 1%0A while phi*prime %3C= n:%0A phi *= prime%0A prime = next_prime(prime)%0A return phi%0A%0A%0Aif __name__ == '__main__':%0A print calc(1000000)%0A print timeit.Timer('problem_069.calc(1000000)', 'import problem_069').timeit(1)%0A
|
|
24dc16add818d98594c5a3c463fe718aea3f2923
|
version bump for 0.25.14.6.
|
oneflow/__init__.py
|
oneflow/__init__.py
|
VERSION = '0.25.14.5'
|
Python
| 0
|
@@ -13,12 +13,12 @@
0.25.14.
-5
+6
'%0A%0A
|
292fd8b37f9f0b28176ceb3c41f3b2f85b227049
|
bump version in __init__.py
|
oneflow/__init__.py
|
oneflow/__init__.py
|
VERSION = '0.20.11.14'
|
Python
| 0.000023
|
@@ -18,7 +18,7 @@
11.1
-4
+5
'%0A
|
793e4f3f56851db954b3e656520165b6a2351ea7
|
Correct the placement of call-convention specifier.
|
glad/lang/c/debug.py
|
glad/lang/c/debug.py
|
from glad.lang.c.generator import CGenerator
DEFAULT_DEBUG_IMPL = '''
{return_def}
_pre_call_callback("{name}", {args_callback});
{return_assign} glad_{name}({args});
_post_call_callback("{name}", {args_callback});
{return_return}
'''
DEBUG_HEADER = '''
#define GLAD_DEBUG
typedef void (* GLADcallback)(const char *name, void *funcptr, int len_args, ...);
GLAPI void glad_set_pre_callback(GLADcallback cb);
GLAPI void glad_set_post_callback(GLADcallback cb);
'''
DEBUG_CODE = '''
static GLADcallback _pre_call_callback = _pre_call_callback_default;
void glad_set_pre_callback(GLADcallback cb) {
_pre_call_callback = cb;
}
static GLADcallback _post_call_callback = _post_call_callback_default;
void glad_set_post_callback(GLADcallback cb) {
_post_call_callback = cb;
}
'''
DEFAULT_CALLBACK = '''
void _pre_call_callback_default(const char *name, void *funcptr, int len_args, ...) {}
void _post_call_callback_default(const char *name, void *funcptr, int len_args, ...) {}
'''
DEFAULT_CALLBACK_GL = '''
void _pre_call_callback_default(const char *name, void *funcptr, int len_args, ...) {}
void _post_call_callback_default(const char *name, void *funcptr, int len_args, ...) {
GLenum error_code;
error_code = glad_glGetError();
if (error_code != GL_NO_ERROR) {
fprintf(stderr, "ERROR %d in %s\\n", error_code, name);
}
}
'''
class CDebugGenerator(CGenerator):
def write_code_head(self, f):
CGenerator.write_code_head(self, f)
if self.spec.NAME == 'gl':
f.write(DEFAULT_CALLBACK_GL)
else:
f.write(DEFAULT_CALLBACK)
f.write(DEBUG_CODE)
def write_api_header(self, f):
CGenerator.write_api_header(self, f)
f.write(DEBUG_HEADER)
def write_function_prototype(self, fobj, func):
fobj.write('typedef {} (APIENTRYP PFN{}PROC)({});\n'.format(
func.proto.ret.to_c(), func.proto.name.upper(),
', '.join('{} {}'.format(param.type.to_c(), param.name) for param in func.params)
))
fobj.write('GLAPI PFN{}PROC glad_{};\n'.format(
func.proto.name.upper(), func.proto.name
))
fobj.write('GLAPI PFN{}PROC glad_debug_{};\n'.format(
func.proto.name.upper(), func.proto.name
))
fobj.write('#define {0} glad_debug_{0}\n'.format(func.proto.name))
def write_function(self, fobj, func):
fobj.write('APIENTRY PFN{}PROC glad_{};\n'.format(
func.proto.name.upper(), func.proto.name
))
# write the default debug function
args_def = ', '.join(
'{type} arg{i}'.format(type=param.type.to_c(), i=i)
for i, param in enumerate(func.params)
)
fobj.write('APIENTRY {} glad_debug_impl_{}({}) {{'.format(
func.proto.ret.to_c(), func.proto.name, args_def
))
args = ', '.join('arg{}'.format(i) for i, _ in enumerate(func.params))
args_callback = ', '.join(filter(
None, ['(void*){}'.format(func.proto.name), str(len(func.params)), args]
))
return_def = ''
return_assign = ''
return_return = ''
# lower because of win API having VOID
if not func.proto.ret.to_c().lower() == 'void':
return_def = '\n {} ret;'.format(func.proto.ret.to_c())
return_assign = 'ret = '
return_return = 'return ret;'
fobj.write('\n'.join(filter(None, DEFAULT_DEBUG_IMPL.format(
name=func.proto.name, args=args, args_callback=args_callback,
return_def=return_def, return_assign=return_assign,
return_return=return_return
).splitlines())))
fobj.write('\n}\n')
fobj.write('PFN{0}PROC glad_debug_{1} = glad_debug_impl_{1};\n'.format(
func.proto.name.upper(), func.proto.name
))
|
Python
| 0
|
@@ -2432,25 +2432,16 @@
.write('
-APIENTRY
PFN%7B%7DPRO
@@ -2745,24 +2745,27 @@
.write('
+%7B%7D
APIENTRY
%7B%7D glad
@@ -2756,19 +2756,16 @@
APIENTRY
- %7B%7D
glad_de
|
9df7b7e49c5f7ec9d8962cc28f1b19f18dda114c
|
Add custom exceptions for api errors
|
overwatch/errors.py
|
overwatch/errors.py
|
Python
| 0
|
@@ -0,0 +1,476 @@
+class InvalidFilter(Exception):%0A %22%22%22%0A Raise when 'filter' key word argument is not recognized%0A %22%22%22%0A pass%0A%0A%0Aclass InvalidHero(Exception):%0A %22%22%22%0A Raise when 'hero' key word argument is not recognized%0A %22%22%22%0A pass%0A%0A%0Aclass InvalidCombination(Exception):%0A %22%22%22%0A Raise when 'filter' and 'hero' key word arguments%0A are an invalid combination.%0A %22%22%22%0A pass%0A%0A%0Aclass NotFound(Exception):%0A %22%22%22%0A Raise when stats could not be found%0A %22%22%22%0A pass%0A
|
|
0d3343300d62afc37ba7a3bc1ec6e81bb8f8c648
|
add parent.py to be forked
|
worker/namespace/parent.py
|
worker/namespace/parent.py
|
Python
| 0.000002
|
@@ -0,0 +1,1164 @@
+#TODO make sure listening on the pipe blocks correctly, better error handling%0A%0Aimport os, sys, ns, time%0Afrom subprocess import check_output%0A%0Asys.path.append('/handler') # assume submitted .py file is /handler/lambda_func%0A%0Adef handler(args, path):%0A import lambda_func%0A try:%0A ret = lambda_func(args)%0A except:%0A ret = json.dumps%7B'error': 'handler execution failed with args: %25s' %25 args%7D%0A %0A with open(path) as pipe:%0A pipe.write(ret)%0A%0Adef listen(path):%0A args = %22%22%0A with open(path) as pipe:%0A while True:%0A data = pipe.read()%0A if len(data) == 0:%0A break%0A args += data%0A%0A return args%0A%0Adef main(pid, inpath, outpath):%0A # parent never exits%0A while True:%0A args = listen(inpath)%0A%0A r = forkenter(pid)%0A if r == 0:%0A break # grandchild escapes%0A elif r %3C 0:%0A sys.exit(0) # child dies quietly%0A%0A handler(args, outpath)%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) %3C 3:%0A print('Usage: test.py %3Cns_pid%3E %3Cinput_pipe%3E %3Coutput_pipe%3E')%0A sys.exit(1)%0A else:%0A main(sys.argv%5B1%5D, sys.argv%5B2%5D sys.argv%5B3%5D)%0A
|
|
1b7cce5b8dd274b904466dc6deeac312238fc857
|
add test_comp.py, add test function stubs
|
wradlib/tests/test_comp.py
|
wradlib/tests/test_comp.py
|
Python
| 0
|
@@ -0,0 +1,761 @@
+#!/usr/bin/env python%0A# -------------------------------------------------------------------------------%0A# Name: test_comp.py%0A# Purpose: testing file for the wradlib.comp module%0A#%0A# Authors: wradlib developers%0A#%0A# Created: 26.02.2016%0A# Copyright: (c) wradlib developers%0A# Licence: The MIT License%0A# -------------------------------------------------------------------------------%0A%0Aimport unittest%0A%0Aimport wradlib.vpr as vpr%0Aimport wradlib.georef as georef%0Aimport numpy as np%0A%0Aclass ComposeTest(unittest.TestCase):%0A def test_extract_circle(self):%0A pass%0A%0A def test_togrid(self):%0A pass%0A%0A def compose_ko(self):%0A pass%0A%0A def compose_weighted(self):%0A pass%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
80089ad2856b60383692a8dd5abe8520f61a0895
|
Create write_OME-XML_from_file.py
|
write_OME-XML_from_file.py
|
write_OME-XML_from_file.py
|
Python
| 0
|
@@ -0,0 +1,2503 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A@author: Sebi%0A%0AFile: write_OME-XML_from_file.py%0ADate: 17.12.2015%0AVersion. 1.0%0A%22%22%22%0A%0Aimport bfimage as bf%0Afrom lxml import etree as etl%0A%0A%0Adef create_omexml(testdata, method=1, writeczi_metadata=True):%0A%0A # creates readable xml files from image data files. Default method should be = 1.%0A if method == 1:%0A # method 1%0A for i in range(0, len(testdata)):%0A%0A # Change File name and write XML file to same folder%0A xmlfile1 = testdata%5Bi%5D + '_MetaData1.xml'%0A%0A try:%0A # get the actual OME-XML%0A omexml = bf.createOMEXML(testdata%5Bi%5D)%0A # create root and tree from XML string and write %22pretty%22 to disk%0A root = etl.fromstring(omexml)%0A tree = etl.ElementTree(root)%0A tree.write(xmlfile1, pretty_print=True, encoding='utf-8', method='xml')%0A print 'Created OME-XML file for testdata: ', testdata%5Bi%5D%0A except:%0A print 'Creating OME-XML failed for testdata: ', testdata%5Bi%5D%0A%0A if method == 2:%0A # method 2%0A for i in range(0, len(testdata)):%0A%0A # Change File name and write XML file to same folder%0A xmlfile2 = testdata%5Bi%5D + '_MetaData2.xml'%0A%0A try:%0A # get the actual OME-XML%0A md, omexml = bf.get_metadata_store(testdata%5Bi%5D)%0A # create root and tree from XML string and write %22pretty%22 to disk%0A root = etl.fromstring(omexml)%0A tree = etl.ElementTree(root)%0A tree.write(xmlfile2, pretty_print=True, encoding='utf-8', method='xml')%0A print 'Created OME-XML file for testdata: ', testdata%5Bi%5D%0A except:%0A print 'Creating OME-XML failed for testdata: ', testdata%5Bi%5D%0A%0A if writeczi_metadata:%0A%0A # this writes the special CZI xml metadata to disk, when a CZI file was found.%0A for i in range(0, len(testdata)):%0A%0A if testdata%5Bi%5D%5B-4:%5D == '.czi':%0A try:%0A bf.czt.writexml_czi(testdata%5Bi%5D)%0A except:%0A print 'Could not write special CZI metadata for: ', testdata%5Bi%5D%0A%0A%0A# INSERT THE FILES INSIDE THE LIST BELOW%0A%0Atestfiles = %5Br'c:%5CUsers%5CTestuser%5COME-TIFF_Metadatatest%5Ctest1.czi',%0A r'c:%5CUsers%5CTestuser%5CDocuments%5CTestdata_Zeiss%5COME-TIFF_Metadatatest%5CTile=4_T=3_CH=2_Z=3.czi_Fiji_Export_allTiles.ome.tiff'%5D%0A%0Acreate_omexml(testfiles, method=1, writeczi_metadata=True)%0A%0A
|
|
5f5f48f2f6f8c82d97858230219f67229d3165a4
|
patch to add match_ids to history
|
patch-rating-history2.py
|
patch-rating-history2.py
|
Python
| 0
|
@@ -0,0 +1,1126 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A#%0A%0Afrom datetime import datetime%0Afrom dump_qlstats_data import connect_to_database%0A%0AGAMETYPES_AVAILABLE = %5B%22ad%22, %22ctf%22, %22tdm%22%5D%0A%0Adef main(args):%0A%0A try:%0A db = connect_to_database()%0A%0A except Exception as e:%0A print(%22error: %22 + str(e))%0A return 1%0A%0A for gametype in GAMETYPES_AVAILABLE:%0A print( gametype )%0A options = %7B gametype + %22.history.timestamp%22: %7B %22$ne%22: None %7D, gametype + %22.history.match_id%22: None %7D%0A for player in db.players.find(options):%0A print( player%5B%22_id%22%5D )%0A history_result = %5B%5D%0A for history_item in player%5Bgametype%5D%5B%22history%22%5D:%0A match = db.matches.find_one( %7B %22gametype%22: gametype, %22timestamp%22: history_item%5B%22timestamp%22%5D %7D )%0A print( match%5B%22_id%22%5D )%0A history_item%5B'match_id'%5D = match%5B%22_id%22%5D%0A history_result.append( history_item )%0A result = %7B %22timestamp%22: match%5B'timestamp'%5D, %22rating%22: player%5Bgametype%5D%5B'rating'%5D %7D%0A db.players.update( %7B %22_id%22: player%5B'_id'%5D %7D, %7B %22$set%22: %7B gametype + %22.history%22: history_result %7D %7D )%0A%0A return 0%0A%0Aif __name__ == '__main__':%0A import sys%0A sys.exit(main(sys.argv))%0A
|
|
38375800cee8c02051d7d8212ccc5fc843a109f1
|
Create client.py
|
client.py
|
client.py
|
Python
| 0.000001
|
@@ -0,0 +1,312 @@
+import socket%0A%0Adef Main():%0A%09host = '192.168.43.130'%0A%09port = 5000%0A%0A%09s = socket.socket()%0A%09s.connect((host, port))%0A%0A%09message = raw_input('--%3E')%0A%09while message != 'q':%0A%09%09s.send(message)%0A%09%09data = s.recv(1024)%0A%09%09print %22server: %22 + str(data)%0A%09%09message = raw_input('--%3E')%0A%0A%09s.close()%0A%0Aif __name__ == '__main__':%0A%09Main()%0A
|
|
901d430e3f6705af372974b6e1b42e36884ba47f
|
Add mplimporthook.py
|
jupyter/mplimporthook.py
|
jupyter/mplimporthook.py
|
Python
| 0.000001
|
@@ -0,0 +1,1594 @@
+%22%22%22Startup script for IPython kernel.%0A%0AInstalls an import hook to configure the matplotlib backend on the fly.%0A%0AOriginally from @minrk at%0Ahttps://github.com/minrk/profile_default/blob/master/startup/mplimporthook.py%0ARepurposed for docker-stacks to address repeat bugs like%0Ahttps://github.com/jupyter/docker-stacks/issues/235.%0A%22%22%22%0Aimport sys%0Afrom IPython import get_ipython%0A%0Aclass MatplotlibFinder(object):%0A %22%22%22Import hook that notices when matplotlib.pyplot or pylab is imported%0A and tries to configure the matplotlib backend appropriately for the%0A environment.%0A %22%22%22%0A _called = False%0A%0A def find_module(self, fullname, path=None):%0A if self._called:%0A # already handled%0A return%0A%0A if fullname not in ('pylab', 'matplotlib.pyplot'):%0A # not matplotlib%0A return%0A%0A # don't call me again%0A self._called = True%0A%0A try:%0A # remove myself from the import hooks%0A sys.meta_path = %5Bloader for loader in sys.meta_path if loader is not self%5D%0A except ValueError:%0A pass%0A%0A ip = get_ipython()%0A if ip is None:%0A # not in an interactive environment%0A return%0A%0A if ip.pylab_gui_select:%0A # backend already selected%0A return%0A%0A if hasattr(ip, 'kernel'):%0A # default to inline in kernel environments%0A ip.enable_matplotlib('inline')%0A else:%0A print('enabling matplotlib')%0A ip.enable_matplotlib()%0A%0A# install the finder immediately%0Asys.meta_path.insert(0, MatplotlibFinder())%0A
|
|
baaeee7b003030ded5336c7da9e01c04beea46f3
|
add swscale dependency
|
autoconf/swscale.py
|
autoconf/swscale.py
|
Python
| 0
|
@@ -0,0 +1,114 @@
+from _external import *%0A%0Aswscale = LibWithHeaderChecker(%0A%09%09%09%09'swscale',%0A%09%09%09%09'libswscale/swscale.h',%0A%09%09%09%09'c',%0A%09%09%09)%0A
|
|
626ce0dbd2450812e0cbac12293133e12bae0daf
|
Add parsing module
|
autosort/parsing.py
|
autosort/parsing.py
|
Python
| 0.000001
|
@@ -0,0 +1,3192 @@
+import ast%0Aimport textwrap%0Aimport tokenize%0Afrom collections import namedtuple%0Afrom tokenize import COMMENT, DEDENT, ENDMARKER, INDENT, NEWLINE, STRING, NAME%0A%0A%0Aclass Name(namedtuple('Name', 'name asname')):%0A CAMEL, SNAKE, CONST = range(3)%0A%0A @property%0A def kind(self):%0A name = self.name.split('.')%5B-1%5D%0A if name.isupper():%0A return self.CONST%0A if name%5B0%5D.isupper():%0A return self.CAMEL%0A return self.SNAKE%0A%0A def key(self):%0A return self.kind, str(self)%0A%0A def __str__(self):%0A if self.asname:%0A return '%7B0%7D as %7B1%7D'.format(self.name, self.asname)%0A return self.name%0A%0A%0Aclass Import(namedtuple('Import', 'kind module names noqa start end')):%0A%0A def merge(self, other):%0A names = sorted(set(self.names + other.names), key=Name.key)%0A noqa = self.noqa or other.noqa%0A return Import(self.kind, self.module, names, noqa, -1, -1)%0A%0A%0ABlock = namedtuple('Block', 'imports indent start')%0A%0A%0Adef parse_imports(lines):%0A it = iter(lines)%0A tokens = (_TokenInfo(*token) for token in%0A tokenize.generate_tokens(lambda: next(it)))%0A%0A parser = _ImportParser(tokens, lines)%0A return parser.parse_block('', 0)%0A%0A%0Aclass _TokenInfo(namedtuple('TokenInfo', 'type string start end line')):%0A @property%0A def name(self):%0A return self.type == NAME and self.string%0A%0A @property%0A def starts_block(self):%0A return self.type == INDENT%0A%0A @property%0A def ends_block(self):%0A return self.type in (DEDENT, ENDMARKER)%0A%0A%0Aclass _ImportParser(namedtuple('_ImportParser', 'tokens lines')):%0A def parse_block(self, indent, start):%0A imports = %5B%5D%0A token = next(self.tokens)%0A%0A # Push imports beneath docstring%0A if token.type == STRING:%0A start = token.end%5B0%5D + 1%0A token = next(self.tokens)%0A%0A while not token.ends_block:%0A if token.starts_block:%0A self.parse_block(token.string, token.start%5B0%5D - 1)%0A elif token.name in ('from', 'import'):%0A imports += self.parse_imports(token)%0A token = next(self.tokens)%0A%0A if imports:%0A # wrong%0A yield Block(imports, indent, start)%0A%0A def parse_imports(self, token):%0A first = token%0A comments = %5B%5D%0A while token.type != NEWLINE:%0A if token.type == COMMENT:%0A comments.append(token.string)%0A token = next(self.tokens)%0A%0A start, end = first.start%5B0%5D - 1, token.end%5B0%5D%0A source = '%5Cn'.join(self.lines%5Bstart:end%5D)%0A nodes = ast.parse(textwrap.dedent(source)).body%0A # TODO: error on multiple nodes%0A return self._make_imports(first.name, nodes%5B0%5D, comments, start, end)%0A%0A @staticmethod%0A def _make_imports(kind, node, comments, start, end):%0A noqa = any(c.startswith('# noqa') for c in comments)%0A names = sorted(%5BName(n.name, n.asname)%0A for n in node.names%5D, key=Name.key)%0A if kind == 'from':%0A modules = %5BName(node.module, None)%5D%0A else:%0A modules, names = names, %5B%5D%0A%0A return %5BImport(kind, m, names, noqa, start, end) for m in modules%5D%0A
|
|
059dae76827ac016524925d94c10e5ed0a83f2c2
|
Create PreFilterExample.py
|
home/Alessandruino/PreFilterExample.py
|
home/Alessandruino/PreFilterExample.py
|
Python
| 0
|
@@ -0,0 +1,435 @@
+from org.myrobotlab.opencv import OpenCVFilterAffine%0A%0Aaffine = OpenCVFilterAffine(%22affine%22)%0Aaffine.setAngle(180.0)%0A%0AleftPort= %22/dev/cu.wchusbserial1450%22 %0Ai01 = Runtime.start(%22i01%22,%22InMoov%22)%0AheadTracking = i01.startHeadTracking(leftPort)%0AeyesTracking = i01.startEyesTracking(leftPort,10,12)%0Ai01.headTracking.addPreFilter(affine)%0Ai01.eyesTracking.addPreFilter(affine)%0Asleep(1)%0Ai01.headTracking.faceDetect()%0Ai01.eyesTracking.faceDetect()%0A
|
|
c24fb3b6b41b2f06cee79fd21b090403f3a67457
|
Add TwrSearch class
|
twitter/twr_search.py
|
twitter/twr_search.py
|
Python
| 0
|
@@ -0,0 +1,2993 @@
+#!/usr/bin/env python%0A#%0A# Copyright (c) 2013 Martin Abente Lahaye. - tch@sugarlabs.org%0A%0A#Permission is hereby granted, free of charge, to any person obtaining a copy%0A#of this software and associated documentation files (the %22Software%22), to deal%0A#in the Software without restriction, including without limitation the rights%0A#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A#copies of the Software, and to permit persons to whom the Software is%0A#furnished to do so, subject to the following conditions:%0A%0A#The above copyright notice and this permission notice shall be included in%0A#all copies or substantial portions of the Software.%0A%0A#THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A#THE SOFTWARE.%0A%0Aimport json%0A%0Afrom gi.repository import GObject%0A%0Aimport twr_error%0Afrom twr_object import TwrObject%0A%0A%0Aclass TwrSearch(GObject.GObject):%0A%0A TWEETS_URL = 'https://api.twitter.com/1.1/search/tweets.json'%0A%0A __gsignals__ = %7B%0A 'tweets-downloaded': (GObject.SignalFlags.RUN_FIRST,%0A None, (%5Bobject%5D)),%0A 'tweets-downloaded-failed': (GObject.SignalFlags.RUN_FIRST,%0A None, (%5Bstr%5D))%7D%0A%0A def tweets(self, q, count=None, since_id=None, max_id=None):%0A params = %5B('q', (q))%5D%0A%0A if count is not None:%0A params += %5B('count', (count))%5D%0A if since_id is not None:%0A params += %5B('since_id', (since_id))%5D%0A if max_id is not None:%0A params += %5B('max_id', (max_id))%5D%0A%0A GObject.idle_add(self._get,%0A self.TWEETS_URL,%0A params,%0A self.__completed_cb,%0A self.__failed_cb,%0A 'tweets-downloaded',%0A 'tweets-downloaded-failed')%0A%0A def _get(self, url, params, completed_cb, failed_cb,%0A completed_data, failed_data):%0A%0A object = TwrObject()%0A object.connect('transfer-completed', completed_cb, completed_data)%0A object.connect('transfer-failed', failed_cb, failed_data)%0A object.request('GET', url, params)%0A%0A def __completed_cb(self, object, data, signal):%0A try:%0A info = json.loads(data)%0A%0A if isinstance(info, dict) and ('errors' in info.keys()):%0A raise twr_error.TwrSearchError(str(info%5B'errors'%5D))%0A%0A self.emit(signal, info)%0A except Exception, e:%0A print 'TwrSearch.__completed_cb crashed with %25s' %25 str(e)%0A%0A def __failed_cb(self, object, message, signal):%0A self.emit(signal, message)%0A
|
|
49edeac697b6c4457f2f55ff4086c9dbacedaa71
|
add try implementation
|
pymonet/try.py
|
pymonet/try.py
|
Python
| 0
|
@@ -0,0 +1,992 @@
+class Try:%0A%0A def __init__(self, value, is_success):%0A self.value = value%0A self.is_success = is_success%0A%0A def __eq__(self, other):%0A return self.value == other.value and self.is_success == other.is_success%0A%0A @classmethod%0A def of(cls, fn, *args):%0A try:%0A return cls(fn(*args), True)%0A except Exception as e:%0A return cls(e, True)%0A%0A def map(self, mapper):%0A if self.is_success:%0A return Try(%0A mapper(self.value),%0A True%0A )%0A return Try(self.value, False)%0A%0A def fold(self, mapper):%0A if self.is_success:%0A return Try.of(mapper, self.value)%0A return self.value%0A%0A def on_success(self, success_callback):%0A if self.is_success:%0A return success_callback(self.value)%0A%0A def on_fail(self, fail_callback):%0A if not self.is_success:%0A return fail_callback(self.value)%0A%0A def filter(self, filterer):%0A pass
|
|
f972606727fa302383acde4ee562a016fe216bd2
|
Version 0.2.3a
|
hornet/trampoline.py
|
hornet/trampoline.py
|
Python
| 0.000001
|
@@ -0,0 +1,551 @@
+import functools%0A%0A%0Adef trampoline(bouncing, *args, **kwargs):%0A while bouncing:%0A result = bouncing(*args, **kwargs)%0A results, bouncing, args, kwargs = result%0A yield from results%0A%0A%0Azero = iter(())%0A%0A%0Adef unit(item):%0A yield item%0A%0A%0Adef land(*args, **kwargs):%0A return zero, None, args, kwargs%0A%0A%0Adef throw(function, thrown, *args, **kwargs):%0A return unit(thrown), function, args, kwargs%0A%0A%0Adef bounce(function, *args, **kwargs):%0A return zero, function, args, kwargs%0A%0A%0Adef bouncy(f):%0A return functools.partial(bounce, f)%0A
|
|
3f35e9b3913bb99cf7b299c36528eefa878337f4
|
Add method to determine output name
|
mopidy/outputs/__init__.py
|
mopidy/outputs/__init__.py
|
import pygst
pygst.require('0.10')
import gst
import logging
logger = logging.getLogger('mopidy.outputs')
class BaseOutput(object):
"""Base class for providing support for multiple pluggable outputs."""
def get_bin(self):
"""
Build output bin that will attached to pipeline.
"""
description = 'queue ! %s' % self.describe_bin()
logger.debug('Creating new output: %s', description)
output = gst.parse_bin_from_description(description, True)
output.set_name(self.__class__.__name__)
self.modify_bin(output)
return output
def modify_bin(self, output):
"""
Modifies bin before it is installed if needed.
Overriding this method allows for outputs to modify the constructed bin
before it is installed. This can for instance be a good place to call
`set_properties` on elements that need to be configured.
:param output: gst.Bin to modify in some way.
:type output: :class:`gst.Bin`
"""
pass
def describe_bin(self):
"""
Return text string describing bin in gst-launch format.
For simple cases this can just be a plain sink such as `autoaudiosink`
or it can be a chain `element1 ! element2 ! sink`. See `man
gst-launch0.10` for details on format.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def set_properties(self, element, properties):
"""
Helper to allow for simple setting of properties on elements.
Will call `set_property` on the element for each key that has a value
that is not None.
:param element: gst.Element to set properties on.
:type element: :class:`gst.Element`
:param properties: Dictionary of properties to set on element.
:type properties: dict
"""
for key, value in properties.items():
if value is not None:
element.set_property(key, value)
|
Python
| 0.001316
|
@@ -527,26 +527,18 @@
elf.
-__class__._
+get
_name
-__
+()
)%0A
@@ -591,16 +591,234 @@
output%0A%0A
+ def get_name(self):%0A %22%22%22%0A Return name of output in gstreamer context.%0A%0A Defaults to class name, can be overriden by sub classes if required.%0A %22%22%22%0A return self.__class__.__name__%0A%0A
def
|
ea6458a88079b939188d0e5bf86eedeb62247609
|
add src/cs_utime.py
|
src/cs_utime.py
|
src/cs_utime.py
|
Python
| 0
|
@@ -0,0 +1,653 @@
+#!/usr/local/bin/python%0A# -*- coding: utf-8 -*-%0A'''cs_utime%0Aos.stat(fn).st_ctime # 978307200.0%0Att=time.strptime('20010101T090000', '%25Y%25m%25dT%25H%25M%25S') # (2001,1,1,9,0,0,0,1,-1)%0At=time.mktime(tt) # 978307200.0%0Aos.utime(fn, (t, t)) # (atime, mtime)%0A'''%0A%0Aimport sys, os, stat%0Aimport time%0A%0AFSENC = 'cp932'%0AUPATH = u'/tmp/tmp'%0AFILES = %5B%0A (u'f0.tsv', '20010101T090000'),%0A (u'f1.tsv', '20010101T090000'),%0A (u'f2.tsv', '20010101T090000')%5D%0A%0Adef set_ts(fn, ts):%0A t = time.mktime(time.strptime(ts, '%25Y%25m%25dT%25H%25M%25S'))%0A os.utime(fn, (t, t))%0A%0Adef main():%0A for fn, ts in FILES: set_ts((u'%25s/%25s' %25 (UPATH, fn)).encode(FSENC), ts)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
a2a268ea8b9b2876011453476baedaa4bca01559
|
Create ball.py
|
python/ball.py
|
python/ball.py
|
Python
| 0.000011
|
@@ -0,0 +1,425 @@
+#!/usr/bin/env python%0Afrom rgbmatrix import RGBMatrix%0Aimport sys, time%0Aimport math%0A%0Arows = 16%0Achains = 1%0Aparallel = 1%0AledMatrix = RGBMatrix(rows, chains, parallel)%0A%0AnumRows = 16%0Aheight = ledMatrix.height%0Awidth = ledMatrix.width%0A%0Atry:%0A%09print %22Press Ctrl + C to stop executing%22%0A%09while True:%0A%09%09nextFrame = ledMatrix.CreateFrameCanvas()%0A%09%09ledMatrix.SwapOnVSync(nextFrame)%0Aexcept KeyboardInterrupt:%0A%09print %22Exiting%5Cn%22%0A%09sys.exit(0)
|
|
5eb0dce7f2c287203ace05b6989785b7e4fdac75
|
add script to track xmin in the database
|
nagios/check_pgsql_xmin.py
|
nagios/check_pgsql_xmin.py
|
Python
| 0
|
@@ -0,0 +1,1019 @@
+%22%22%22%0AReturn the maximum xmin in the database %0A%22%22%22%0Aimport os%0Aimport sys%0Aimport stat%0Aimport psycopg2%0AIEM = psycopg2.connect(database='iem', host='iemdb', user='nobody')%0Aicursor = IEM.cursor()%0A%0Adef check():%0A icursor.execute(%22%22%22%0A SELECT datname, age(datfrozenxid) FROM pg_database%0A ORDER by age DESC LIMIT 1%0A %22%22%22)%0A row = icursor.fetchone()%0A%0A return row%0A %0Aif __name__ == '__main__':%0A dbname, count = check()%0A if count %3C 200000000:%0A print 'OK - %25s %25s %7Ccount=%25s;200000000;215000000;220000000' %25 (count, %0A dbname, count)%0A sys.exit(0)%0A elif count %3C 215000000:%0A print 'WARNING - %25s %25s %7Ccount=%25s;200000000;215000000;220000000' %25 (count, %0A dbname, count)%0A sys.exit(1)%0A else:%0A print 'CRITICAL - %25s %25s %7Ccount=%25s;200000000;215000000;220000000' %25 (count, %0A dbname, count)%0A sys.exit(2)
|
|
c373bbb351de421881d9f0e2f8a16d541bb21347
|
add test-receive-file-ipv6.py
|
tests/twisted/avahi/test-receive-file-ipv6.py
|
tests/twisted/avahi/test-receive-file-ipv6.py
|
Python
| 0.000002
|
@@ -0,0 +1,2055 @@
+import avahi%0Aimport urllib%0Aimport BaseHTTPServer%0Aimport SocketServer%0Aimport socket%0A%0Afrom saluttest import exec_test%0Afrom file_transfer_helper import ReceiveFileTest%0A%0Afrom avahitest import AvahiListener%0Afrom xmppstream import connect_to_stream6%0A%0Afrom twisted.words.xish import domish%0A%0Aclass TestReceiveFileIPv6(ReceiveFileTest):%0A def _resolve_salut_presence(self):%0A AvahiListener(self.q).listen_for_service(%22_presence._tcp%22)%0A e = self.q.expect('service-added', name = self.self_handle_name,%0A protocol = avahi.PROTO_INET6)%0A service = e.service%0A service.resolve()%0A%0A e = self.q.expect('service-resolved', service = service)%0A return str(e.pt), e.port%0A%0A def connect_to_salut(self):%0A host, port = self._resolve_salut_presence()%0A%0A self.outbound = connect_to_stream6(self.q, self.contact_name,%0A self.self_handle_name, host, port)%0A%0A e = self.q.expect('connection-result')%0A assert e.succeeded, e.reason%0A self.q.expect('stream-opened', connection = self.outbound)%0A%0A def send_ft_offer_iq(self):%0A iq = domish.Element((None, 'iq'))%0A iq%5B'to'%5D = self.self_handle_name%0A iq%5B'from'%5D = self.contact_name%0A iq%5B'type'%5D = 'set'%0A iq%5B'id'%5D = 'gibber-file-transfer-0'%0A query = iq.addElement(('jabber:iq:oob', 'query'))%0A url = 'http://%5B::1%5D:%25u/gibber-file-transfer-0/%25s' %25 %5C%0A (self.httpd.server_port, urllib.quote(self.file.name))%0A url_node = query.addElement('url', content=url)%0A url_node%5B'type'%5D = 'file'%0A url_node%5B'size'%5D = str(self.file.size)%0A url_node%5B'mimeType'%5D = self.file.content_type%0A query.addElement('desc', content=self.file.description)%0A self.outbound.send(iq)%0A%0A def _get_http_server_class(self):%0A class HTTPServer6(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):%0A address_family = getattr(socket, 'AF_INET6', None)%0A%0A return HTTPServer6%0A%0Aif __name__ == '__main__':%0A test = TestReceiveFileIPv6()%0A exec_test(test.test)%0A
|
|
f296eb4a87a1130cb72e00099f7e9441425548ec
|
add device handler
|
ncclient/devices/huawei.py
|
ncclient/devices/huawei.py
|
Python
| 0.000001
|
@@ -0,0 +1,1598 @@
+%22%22%22%0AHandler for Cisco Nexus device specific information.%0A%0ANote that for proper import, the classname has to be:%0A%0A %22%3CDevicename%3EDeviceHandler%22%0A%0A...where %3CDevicename%3E is something like %22Default%22, %22Huawei%22, etc.%0A%0AAll device-specific handlers derive from the DefaultDeviceHandler, which implements the%0Ageneric information needed for interaction with a Netconf server.%0A%0A%22%22%22%0A%0Afrom ncclient.xml_ import BASE_NS_1_0%0A%0Afrom .default import DefaultDeviceHandler%0A%0Aclass HuaweiDeviceHandler(DefaultDeviceHandler):%0A %22%22%22%0A Huawei handler for device specific information.%0A%0A In the device_params dictionary, which is passed to __init__, you can specify%0A the parameter %22ssh_subsystem_name%22. That allows you to configure the preferred%0A SSH subsystem name that should be tried on your Nexus switch. If connecting with%0A that name fails, or you didn't specify that name, the other known subsystem names%0A will be tried. However, if you specify it then this name will be tried first.%0A%0A %22%22%22%0A _EXEMPT_ERRORS = %5B%5D%0A%0A def __init__(self, device_params):%0A super(HuaweiDeviceHandler, self).__init__(device_params)%0A%0A def get_capabilities(self):%0A # Just need to replace a single value in the default capabilities%0A c = super(HuaweiDeviceHandler, self).get_capabilities()%0A return c%0A%0A def get_xml_base_namespace_dict(self):%0A return %7B %22xmlns%22:BASE_NS_1_0 %7D%0A%0A def get_xml_extra_prefix_kwargs(self):%0A d = %7B%0A # %22xmlns%22:%22http://www.huawei.com/netconf/vrp%22%0A %7D%0A d.update(self.get_xml_base_namespace_dict())%0A return d%0A%0A%0A
|
|
401fbbd7440f16c462ca31150d9873da5f052356
|
create app.py
|
reddit2telegram/channels/r_freegamefindings/app.py
|
reddit2telegram/channels/r_freegamefindings/app.py
|
Python
| 0.000004
|
@@ -0,0 +1,155 @@
+#encoding:utf-8%0A%0Asubreddit = 'freegamefindings'%0At_channel = '@r_freegamefindings'%0A%0A%0Adef send_post(submission, r2t):%0A return r2t.send_simple(submission)%0A
|
|
412828bea81f5aad917188881c1e7e4d6ce52400
|
Add tests for the management views
|
usingnamespace/tests/test_views_management.py
|
usingnamespace/tests/test_views_management.py
|
Python
| 0
|
@@ -0,0 +1,1911 @@
+import unittest%0Afrom pyramid import testing%0A%0Aclass ManagementViewsTest(unittest.TestCase):%0A def setUp(self):%0A self.config = testing.setUp()%0A%0A def tearDown(self):%0A testing.tearDown()%0A%0A def makeOne(self, context, request):%0A from usingnamespace.views.management import Management%0A%0A return Management(context, request)%0A%0A def makeWithInfo(self):%0A self.request = testing.DummyRequest()%0A self.context = testing.DummyResource()%0A self.request.context = self.context%0A%0A return self.makeOne(self.context, self.request)%0A%0A def test_verify_context_request(self):%0A view_class = self.makeOne(%221%22, %222%22)%0A%0A self.assertEqual(view_class.context, %221%22)%0A self.assertEqual(view_class.request, %222%22)%0A%0A def test_management_home(self):%0A view_class = self.makeWithInfo()%0A%0A self.assertEqual(view_class.home(), %7B%7D)%0A%0Aclass ManagementNotAuthorizedViewsTest(unittest.TestCase):%0A def setUp(self):%0A self.config = testing.setUp()%0A self.request = None%0A self.context = None%0A%0A def tearDown(self):%0A testing.tearDown()%0A%0A def makeOne(self, context, request):%0A from usingnamespace.views.management import ManagementNotAuthorized%0A return ManagementNotAuthorized(context, request)%0A%0A def makeWithInfo(self):%0A self.request = testing.DummyRequest()%0A self.context = testing.DummyResource()%0A self.request.context = self.context%0A%0A return self.makeOne(self.context, self.request)%0A%0A def test_view_forbidden(self):%0A from pyramid.httpexceptions import HTTPForbidden%0A%0A view_class = self.makeWithInfo()%0A self.assertRaises(HTTPForbidden, view_class.management_not_authed)%0A%0A def test_view_not_found(self):%0A view_class = self.makeWithInfo()%0A%0A view_class.management_not_found()%0A%0A self.assertEqual(self.request.response.status_int, 404)%0A
|
|
ef53650aa06c1121e50e30634d8ca0ba989ecccf
|
Change self.basket → self.instance.basket in the LineForm
|
oscar/apps/basket/forms.py
|
oscar/apps/basket/forms.py
|
from django import forms
from django.conf import settings
from django.db.models import get_model
from django.utils.translation import gettext_lazy as _
basketline_model = get_model('basket', 'line')
basket_model = get_model('basket', 'basket')
Product = get_model('catalogue', 'product')
class BasketLineForm(forms.ModelForm):
save_for_later = forms.BooleanField(initial=False, required=False)
def clean_quantity(self):
qty = self.cleaned_data['quantity']
basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD
if basket_threshold:
total_basket_quantity = self.basket.num_items
max_allowed = basket_threshold - total_basket_quantity
if qty > max_allowed:
raise forms.ValidationError(
_("Due to technical limitations we are not able to ship"
" more than %(threshold)d items in one order. Your basket"
" currently has %(basket)d items.") % {
'threshold': basket_threshold,
'basket': total_basket_quantity,
})
return qty
class Meta:
model = basketline_model
exclude = ('basket', 'product', 'line_reference', )
class SavedLineForm(forms.ModelForm):
move_to_basket = forms.BooleanField(initial=False, required=False)
class Meta:
model = basketline_model
exclude = ('basket', 'product', 'line_reference', 'quantity', )
class BasketVoucherForm(forms.Form):
code = forms.CharField(max_length=128)
def __init__(self, *args, **kwargs):
return super(BasketVoucherForm, self).__init__(*args,**kwargs)
class ProductSelectionForm(forms.Form):
product_id = forms.IntegerField(min_value=1)
def clean_product_id(self):
id = self.cleaned_data['product_id']
try:
return Product.objects.get(pk=id)
except Product.DoesNotExist:
raise forms.ValidationError(_("This product is not available for purchase"))
class AddToBasketForm(forms.Form):
product_id = forms.IntegerField(widget=forms.HiddenInput(), min_value=1)
quantity = forms.IntegerField(initial=1, min_value=1)
def __init__(self, basket, instance, *args, **kwargs):
super(AddToBasketForm, self).__init__(*args, **kwargs)
self.basket = basket
self.instance = instance
if instance:
if instance.is_group:
self._create_group_product_fields(instance)
else:
self._create_product_fields(instance)
def clean_product_id(self):
id = self.cleaned_data['product_id']
product = Product.objects.get(id=id)
if not product.has_stockrecord or not product.stockrecord.is_available_to_buy:
raise forms.ValidationError(_("This product is not available for purchase"))
return id
def clean_quantity(self):
qty = self.cleaned_data['quantity']
basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD
if basket_threshold:
total_basket_quantity = self.basket.num_items
max_allowed = basket_threshold - total_basket_quantity
if qty > max_allowed:
raise forms.ValidationError(
_("Due to technical limitations we are not able to ship"
" more than %(threshold)d items in one order. Your basket"
" currently has %(basket)d items.") % {
'threshold': basket_threshold,
'basket': total_basket_quantity,
})
return qty
def _create_group_product_fields(self, item):
"""
Adds the fields for a "group"-type product (eg, a parent product with a
list of variants.
"""
choices = []
for variant in item.variants.all():
if variant.has_stockrecord:
summary = u"%s (%s) - %.2f" % (variant.get_title(), variant.attribute_summary(),
variant.stockrecord.price_incl_tax)
choices.append((variant.id, summary))
self.fields['product_id'] = forms.ChoiceField(choices=tuple(choices))
def _create_product_fields(self, item):
u"""Add the product option fields."""
for option in item.options:
self._add_option_field(item, option)
def _add_option_field(self, item, option):
u"""
Creates the appropriate form field for the product option.
This is designed to be overridden so that specific widgets can be used for
certain types of options.
"""
self.fields[option.code] = forms.CharField()
|
Python
| 0.000001
|
@@ -603,32 +603,41 @@
quantity = self.
+instance.
basket.num_items
|
50d84e5b134b69cebcb4935da24a3cc702e1feef
|
Add coverage for resolve_ref_for_build
|
tests/zeus/tasks/test_resolve_ref.py
|
tests/zeus/tasks/test_resolve_ref.py
|
Python
| 0.000001
|
@@ -0,0 +1,397 @@
+from zeus import factories%0Afrom zeus.tasks import resolve_ref_for_build%0A%0A%0Adef test_resolve_ref_for_build(mocker, db_session, default_revision):%0A build = factories.BuildFactory.create(%0A repository=default_revision.repository, ref=default_revision.sha%0A )%0A%0A assert build.revision_sha is None%0A%0A resolve_ref_for_build(build.id)%0A%0A assert build.revision_sha == default_revision.sha%0A
|
|
fbb8bffd5e1cb633cc59eb5b1e61fef2067e836a
|
add empty unit test for viewimage module
|
supvisors/tests/test_viewimage.py
|
supvisors/tests/test_viewimage.py
|
Python
| 0
|
@@ -0,0 +1,1756 @@
+#!/usr/bin/python%0A#-*- coding: utf-8 -*-%0A%0A# ======================================================================%0A# Copyright 2016 Julien LE CLEACH%0A# %0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A# %0A# http://www.apache.org/licenses/LICENSE-2.0%0A# %0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A# ======================================================================%0A%0Aimport sys%0Aimport unittest%0A%0A%0Aclass ViewImageTest(unittest.TestCase):%0A %22%22%22 Test case for the viewimage module. %22%22%22%0A%0A def test_stats_images(self):%0A %22%22%22 Test the values set at construction. %22%22%22%0A from supvisors.viewimage import address_image_contents, process_image_contents%0A self.assertIsNotNone(address_image_contents)%0A self.assertIsNotNone(process_image_contents)%0A%0A def test_address_image_view(self):%0A %22%22%22 Test the values set at construction. %22%22%22%0A from supvisors.viewimage import AddressImageView%0A view = AddressImageView()%0A self.assertIsNotNone(view)%0A%0A def test_process_image_view(self):%0A %22%22%22 Test the values set at construction. %22%22%22%0A from supvisors.viewimage import ProcessImageView%0A view = ProcessImageView()%0A self.assertIsNotNone(view)%0A%0A%0Adef test_suite():%0A return unittest.findTestCases(sys.modules%5B__name__%5D)%0A%0Aif __name__ == '__main__':%0A unittest.main(defaultTest='test_suite')%0A
|
|
0f395ca18526ea4c6675bd772cc9af88a6baf006
|
Create __init__.py
|
channels/r_gonewild30plus/__init__.py
|
channels/r_gonewild30plus/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
10fe17255335e16aac9f828764050bd2c0874175
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/79deb43ff6fe999bc7767f19ebdc20814b1dfe80.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "0ca0913cd468b657b76a001b74ffa6e91e4eed03"
TFRT_SHA256 = "a85d109a5ca7daee97115903784bdc5430ae1421090f74c45080fc56f0e04351"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0
|
@@ -228,133 +228,133 @@
= %22
-0ca0913cd468b657b76a001b74ffa6e91e4eed03%22%0A TFRT_SHA256 = %22a85d109a5ca7daee97115903784bdc5430ae1421090f74c45080fc56f0e04351
+79deb43ff6fe999bc7767f19ebdc20814b1dfe80%22%0A TFRT_SHA256 = %223b9a217602bd20258595ebe997a204f675aad2006ce3d9cdeb2b431f8564e28d
%22%0A%0A
|
430b5daebbd5385551203c2a0cf23bb355a2c027
|
Add a script that uses the Flickr API.
|
doc/source/scripts/06-cat-of-cats.py
|
doc/source/scripts/06-cat-of-cats.py
|
Python
| 0
|
@@ -0,0 +1,614 @@
+import os%0Aimport photomosaic as pm%0Aimport photomosaic.flickr%0Aimport matplotlib.pyplot as plt%0A%0A%0A# For these published examples we use os.environ to keep our API key private.%0A# Just set your own Flickr API key here.%0AFLICKR_API_KEY = os.environ%5B'FLICKR_API_KEY'%5D%0A%0A# Get a pool of cat photos from Flickr.%0Apm.set_options(flickr_api_key=FLICKR_API_KEY)%0Aphotomosaic.flickr.from_search('cats', 'cats/', 1000)%0Apool = pm.make_pool('cats/*.jpg')%0Apm.export_pool(pool, 'cats/pool.json') # save color analysis for future reuse%0A%0A# Build mosaic.%0Amosaic = pm.basic_mosaic(img, pool, (30, 30), depth=4)%0Aplt.plot(mosaic)%0Aplt.show()%0A
|
|
3aad37e287a6d0fdb037393d4fcc30817c9dece5
|
Create statanalyisi.py
|
statanalyisi.py
|
statanalyisi.py
|
Python
| 0.000008
|
@@ -0,0 +1,1831 @@
+import string #This line defines string so as to be able to get rid of punctuation.%0Aimport re%0A%0Aname = raw_input(%22Enter file:%22)%0Aif len(name) %3C 1 : name = %22manqnohyphen.txt%22%0Aprint %22Much Ado about Nothing statistics%22%0Ahandle = open(name)%0A%0Atext = handle.read()%0Aprint ''%0Aprint 'There are', len(text), 'characters in the text.' #prints the number of characters in the text%0Alexis = text.split()%0Aprint ''%0Aprint 'There are', len(lexis), 'words in the text.'%0A%0Ahandle = open(name)%0A%0Acounts = dict()%0Afor line in handle:%0A%09line = line.rstrip()%0A%09line = line.translate(None, string.punctuation) #This line gets rid of punctuation.%0A%09words = line.split()%0A%09for word in words:%0A%09%09wrd = word.lower()%0A%09%09counts%5Bwrd%5D = counts.get(wrd,0) + 1%0A#print counts%0A%0A%0A%0Alst = list()%0Afor lexicon,occurrence in counts.items():%0A%09lst.append((occurrence, lexicon))%0A%09%0Aprint 'The least frequently used words are:'%0Alst.sort()%0Afor occurrence, lexicon in lst%5B:%5D:%0A%09if occurrence == 1:%0A%09%09print lexicon, occurrence%0A%0Aprint 'The most frequently used words are:'%09%0Alst.sort(reverse=True)%0A%0Afor occurrence, lexicon in lst%5B:30%5D:%0A%09print lexicon, occurrence%0A%0A%0Ahandle = open(%22manqnohyphen.txt%22)%0A%0Aprint %22Compound words divided with a hyphen:%22%0Alineno = 0%0Acompoundno = 0%0Afor line in handle:%0A%09line = line.rstrip()%0A%09lineno = lineno + 1%0A%09cp = re.findall(%22%5CS+-%5CS+%22, line)%0A%09if len(cp) %3E 0 :%0A%09%09compoundno = compoundno + 1%0A%09%09print cp%0Aprint %22There are%22, lineno, %22lines in the play.%22%09%0Aprint %22The number of lines in which compounds divided with hyphen appear are%22, compoundno, %22.%22%0Arelfre = float(compoundno) / lineno%0Aprint %22The relative frequency of the lines in which there are hyphenated compounds is:%22, relfre, %22.%22%0A%0Ahandle = open(%22manqnohyphen.txt%22)%0Atext = handle.read()%0Alexis = text.split()%0Aprint ''%0Aprint 'There are', len(lexis), 'words in the text.'%0Arelfrewords = 56.0 / len(lexis)%0Aprint relfrewords%0A
|
|
65ac6b1281ff9cbd17bd0d8dd2f9c188d71215f9
|
Fix log capture on py3
|
test/service.py
|
test/service.py
|
import logging
import re
import select
import subprocess
import threading
import time
__all__ = [
'ExternalService',
'SpawnedService',
]
class ExternalService(object):
def __init__(self, host, port):
logging.info("Using already running service at %s:%d", host, port)
self.host = host
self.port = port
def open(self):
pass
def close(self):
pass
class SpawnedService(threading.Thread):
def __init__(self, args=None, env=None):
threading.Thread.__init__(self)
if args is None:
raise TypeError("args parameter is required")
self.args = args
self.env = env
self.captured_stdout = []
self.captured_stderr = []
self.should_die = threading.Event()
def run(self):
self.run_with_handles()
def run_with_handles(self):
self.child = subprocess.Popen(
self.args,
env=self.env,
bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
alive = True
while True:
(rds, _, _) = select.select([self.child.stdout, self.child.stderr], [], [], 1)
if self.child.stdout in rds:
line = self.child.stdout.readline()
self.captured_stdout.append(line)
if self.child.stderr in rds:
line = self.child.stderr.readline()
self.captured_stderr.append(line)
if self.should_die.is_set():
self.child.terminate()
alive = False
poll_results = self.child.poll()
if poll_results is not None:
if not alive:
break
else:
self.dump_logs()
raise RuntimeError("Subprocess has died. Aborting. (args=%s)" % ' '.join(str(x) for x in self.args))
def dump_logs(self):
logging.critical('stderr')
for line in self.captured_stderr:
logging.critical(line.rstrip())
logging.critical('stdout')
for line in self.captured_stdout:
logging.critical(line.rstrip())
def wait_for(self, pattern, timeout=30):
t1 = time.time()
while True:
t2 = time.time()
if t2 - t1 >= timeout:
try:
self.child.kill()
except:
logging.exception("Received exception when killing child process")
self.dump_logs()
raise RuntimeError("Waiting for %r timed out after %d seconds" % (pattern, timeout))
if re.search(pattern, '\n'.join(self.captured_stdout), re.IGNORECASE) is not None:
logging.info("Found pattern %r in %d seconds via stdout", pattern, (t2 - t1))
return
if re.search(pattern, '\n'.join(self.captured_stderr), re.IGNORECASE) is not None:
logging.info("Found pattern %r in %d seconds via stderr", pattern, (t2 - t1))
return
time.sleep(0.1)
def start(self):
threading.Thread.start(self)
def stop(self):
self.should_die.set()
self.join()
|
Python
| 0
|
@@ -1309,32 +1309,48 @@
dout.append(line
+.decode('utf-8')
)%0A%0A i
|
b66b02be95e7b0c36a9ced53b07d91298190ca4a
|
Add tests for mpi4py.dl module
|
test/test_dl.py
|
test/test_dl.py
|
Python
| 0
|
@@ -0,0 +1,1574 @@
+from mpi4py import dl%0Aimport mpiunittest as unittest%0Aimport sys%0Aimport os%0A%0Aclass TestDL(unittest.TestCase):%0A%0A def testDL1(self):%0A if sys.platform == 'darwin':%0A libm = 'libm.dylib'%0A else:%0A libm = 'libm.so'%0A%0A handle = dl.dlopen(libm, dl.RTLD_LOCAL%7Cdl.RTLD_LAZY)%0A self.assertTrue(handle != 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A symbol = dl.dlsym(handle, 'sqrt')%0A self.assertTrue(symbol != 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A symbol = dl.dlsym(handle, 'xxxxx')%0A self.assertTrue(symbol == 0)%0A self.assertTrue(dl.dlerror() is not None)%0A%0A ierr = dl.dlclose(handle)%0A self.assertTrue(ierr == 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A def testDL2(self):%0A handle = dl.dlopen(None, dl.RTLD_GLOBAL%7Cdl.RTLD_NOW)%0A self.assertTrue(handle != 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A symbol = dl.dlsym(handle, 'malloc')%0A self.assertTrue(symbol != 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A symbol = dl.dlsym(handle, '!@#$%25%5E&*()')%0A self.assertTrue(symbol == 0)%0A self.assertTrue(dl.dlerror() is not None)%0A%0A ierr = dl.dlclose(handle)%0A self.assertTrue(ierr == 0)%0A self.assertTrue(dl.dlerror() is None)%0A%0A def testDL3(self):%0A handle = dl.dlopen('xxxxx', dl.RTLD_LOCAL%7Cdl.RTLD_LAZY)%0A self.assertTrue(handle == 0)%0A self.assertTrue(dl.dlerror() is not None)%0A%0A%0Aif os.name != 'posix':%0A del TestDL%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
ffabca96535a89bd7f3ef640d738fa2f7dc0a0af
|
fix for last commit
|
billy/importers/committees.py
|
billy/importers/committees.py
|
#!/usr/bin/env python
import os
import glob
import json
import datetime
import logging
from billy.core import db
from billy.core import settings
from billy.importers.names import get_legislator_id
from billy.importers.utils import prepare_obj, update, insert_with_id
import pymongo
logger = logging.getLogger('billy')
def ensure_indexes():
db.committees.ensure_index([('_all_ids', pymongo.ASCENDING)])
db.committees.ensure_index([(settings.LEVEL_FIELD, pymongo.ASCENDING),
('committee', pymongo.ASCENDING),
('subcommittee', pymongo.ASCENDING)])
def import_committees_from_legislators(current_term, abbr):
""" create committees from legislators that have committee roles """
# for all current legislators
for legislator in db.legislators.find({'roles': {'$elemMatch': {
'term': current_term, settings.LEVEL_FIELD: abbr}}}):
# for all committee roles
for role in legislator['roles']:
if (role['type'] == 'committee member' and
'committee_id' not in role):
spec = {settings.LEVEL_FIELD: abbr,
'chamber': role['chamber'],
'committee': role['committee']}
if 'subcommittee' in role:
spec['subcommittee'] = role['subcommittee']
committee = db.committees.find_one(spec)
if not committee:
committee = spec
committee['_type'] = 'committee'
# copy LEVEL_FIELD from legislator to committee
committee[settings.LEVEL_FIELD] = \
legislator[settings.LEVEL_FIELD]
committee['members'] = []
committee['sources'] = []
if 'subcommittee' not in committee:
committee['subcommittee'] = None
insert_with_id(committee)
# clear sources before we reimport (in case someone has left)
committee['sources'] = []
for member in committee['members']:
if member['leg_id'] == legislator['leg_id']:
break
else:
committee['members'].append(
{'name': legislator['full_name'],
'leg_id': legislator['leg_id'],
'role': role.get('position') or 'member'})
for source in legislator['sources']:
if source not in committee['sources']:
committee['sources'].append(source)
db.committees.save(committee, safe=True)
role['committee_id'] = committee['_id']
db.legislators.save(legislator, safe=True)
def import_committee(data, current_session, current_term):
abbr = data[settings.LEVEL_FIELD]
spec = {settings.LEVEL_FIELD: abbr,
'chamber': data['chamber'],
'committee': data['committee']}
if 'subcommittee' in data:
spec['subcommittee'] = data['subcommittee']
# insert/update the actual committee object
committee = db.committees.find_one(spec)
committee_return_status = None
if not committee:
insert_with_id(data)
committee = data
committee_return_status = "insert"
else:
update(committee, data, db.committees)
committee_return_status = "update"
# deal with the members, add roles
for member in committee['members']:
if not member['name']:
continue
leg_id = get_legislator_id(abbr, current_session, data['chamber'],
member['name'])
if not leg_id:
logger.debug("No matches for %s" % member['name'].encode('ascii',
'ignore'))
member['leg_id'] = None
continue
legislator = db.legislators.find_one({'_all_ids': leg_id})
if not legislator:
logger.warning('No legislator with ID %s' % leg_id)
member['leg_id'] = None
continue
member['leg_id'] = legislator['_id']
for role in legislator['roles']:
if (role['type'] == 'committee member' and
role['term'] == current_term and
role.get('committee_id') == committee['_id']):
# if the position hadn't been copied over before, copy it now
if role.get('position') != member['role']:
role['position'] = member['role']
db.legislators.save(legislator, safe=True)
break
else:
new_role = {'type': 'committee member',
'committee': committee['committee'],
'term': current_term,
'chamber': committee['chamber'],
'committee_id': committee['_id'],
'position': member['role']}
# copy over all necessary fields from committee
new_role[settings.LEVEL_FIELD] = committee[settings.LEVEL_FIELD]
if 'subcommittee' in committee:
new_role['subcommittee'] = committee['subcommittee']
legislator['roles'].append(new_role)
legislator['updated_at'] = datetime.datetime.utcnow()
db.legislators.save(legislator, safe=True)
db.committees.save(committee, safe=True)
return committee_return_status
def import_committees(abbr, data_dir):
data_dir = os.path.join(data_dir, abbr)
pattern = os.path.join(data_dir, 'committees', '*.json')
counts = {
"update": 0,
"insert": 0,
"total": 0
}
meta = db.metadata.find_one({'_id': abbr})
current_term = meta['terms'][-1]['name']
current_session = meta['terms'][-1]['sessions'][-1]
paths = glob.glob(pattern)
for committee in db.committees.find({settings.LEVEL_FIELD: abbr}):
committee['members'] = []
db.committees.save(committee, safe=True)
# import committees from legislator roles, no standalone committees scraped
if not paths:
import_committees_from_legislators(current_term, abbr)
for path in paths:
with open(path) as f:
data = prepare_obj(json.load(f))
counts["total"] += 1
ret = import_committee(data, current_session, current_term)
counts[ret] += 1
logger.info('imported %s committee files' % len(paths))
link_parents(abbr)
ensure_indexes()
return counts
def link_parents(abbr):
for comm in db.committees.find({settings.LEVEL_FIELD: abbr}):
sub = comm.get('subcommittee')
if not sub:
comm['parent_id'] = None
else:
parent = db.committees.find_one({settings.LEVEL_FIELD: abbr,
'chamber': comm['chamber'],
'committee': comm['committee']})
if not parent:
logger.warning("Failed finding parent for: %s" % sub)
comm['parent_id'] = None
else:
comm['parent_id'] = parent['_id']
db.committees.save(comm, safe=True)
|
Python
| 0
|
@@ -751,16 +751,189 @@
es %22%22%22%0A%0A
+ # first, clear committee sources (ones that aren't updated won't be saved)%0A for com in db.committees.find(%7Bsettings.LEVEL_FIELD: abbr%7D):%0A com%5B'sources'%5D = %5B%5D%0A%0A
# fo
@@ -2161,129 +2161,8 @@
e)%0A%0A
- # clear sources before we reimport (in case someone has left)%0A committee%5B'sources'%5D = %5B%5D%0A%0A
|
cf3cae6493a369173244e05d190cceae41b9abbd
|
Add some coverage for olog callback.
|
bluesky/tests/test_olog_cb.py
|
bluesky/tests/test_olog_cb.py
|
Python
| 0
|
@@ -0,0 +1,1447 @@
+from bluesky import Msg%0Afrom bluesky.callbacks.olog import logbook_cb_factory%0A%0Atext = %5B%5D%0A%0A%0Adef f(**kwargs):%0A text.append(kwargs%5B'text'%5D)%0A%0A%0Adef test_default_template(fresh_RE):%0A text.clear()%0A fresh_RE.subscribe('start', logbook_cb_factory(f))%0A fresh_RE(%5BMsg('open_run', plan_args=%7B%7D), Msg('close_run')%5D)%0A assert len(text%5B0%5D) %3E 0%0A%0A%0Adef test_trivial_template(fresh_RE):%0A text.clear()%0A fresh_RE.subscribe('start', logbook_cb_factory(f, desc_template='hello'))%0A fresh_RE(%5BMsg('open_run', plan_args=%7B%7D), Msg('close_run')%5D)%0A assert text%5B0%5D == 'hello'%0A%0A # smoke test the long_template%0A fresh_RE.subscribe('start', logbook_cb_factory(f, long_template='hello'))%0A fresh_RE(%5BMsg('open_run', plan_args=%7B%7D), Msg('close_run')%5D)%0A%0Adef test_template_dispatch(fresh_RE):%0A disp = %7B'a': 'A', 'b': 'B'%7D%0A text.clear()%0A fresh_RE.subscribe('start', logbook_cb_factory(f, desc_dispatch=disp))%0A fresh_RE(%5BMsg('open_run', plan_name='a', plan_args=%7B%7D),%0A Msg('close_run')%5D)%0A fresh_RE(%5BMsg('open_run', plan_name='b', plan_args=%7B%7D),%0A Msg('close_run')%5D)%0A assert text%5B0%5D == 'A'%0A assert text%5B1%5D == 'B'%0A%0A # smoke test the long_dispatch%0A fresh_RE.subscribe('start', logbook_cb_factory(f, long_dispatch=disp))%0A fresh_RE(%5BMsg('open_run', plan_name='a', plan_args=%7B%7D),%0A Msg('close_run')%5D)%0A fresh_RE(%5BMsg('open_run', plan_name='b', plan_args=%7B%7D),%0A Msg('close_run')%5D)%0A
|
|
57cfba649e3d7a441e7c10a25448ccb8413b964e
|
Put PageAdmin back
|
mangaki/mangaki/admin.py
|
mangaki/mangaki/admin.py
|
# coding=utf8
from mangaki.models import Anime, Track, OST, Artist, Rating, Page, Suggestion
from django.forms import Textarea
from django.db import models
from django.contrib import admin, messages
class AnimeAdmin(admin.ModelAdmin):
search_fields = ('id', 'title')
list_display = ('id', 'title', 'nsfw')
list_filter = ('nsfw',)
class TrackAdmin(admin.ModelAdmin):
pass
class OSTAdmin(admin.ModelAdmin):
pass
class ArtistAdmin(admin.ModelAdmin):
pass
class RatingAdmin(admin.ModelAdmin):
pass
class PageAdmin(admin.ModelAdmin):
pass
class SuggestionAdmin(admin.ModelAdmin):
list_display = ('work', 'problem', 'date', 'user', 'is_checked')
list_filter = ('problem',)
admin.site.register(Anime, AnimeAdmin)
admin.site.register(Track, TrackAdmin)
admin.site.register(OST, OSTAdmin)
admin.site.register(Artist, ArtistAdmin)
admin.site.register(Rating, RatingAdmin)
admin.site.register(Suggestion, SuggestionAdmin)
|
Python
| 0
|
@@ -899,24 +899,61 @@
atingAdmin)%0A
+admin.site.register(Page, PageAdmin)%0A
admin.site.r
|
e3d92ce2cd17a967ac19aecad2998c4094f2ae11
|
Add script to draw NACA foil
|
run.py
|
run.py
|
Python
| 0
|
@@ -0,0 +1,1228 @@
+#!/usr/bin/env python%0A%22%22%22%0AThis script generates a force and velocity vector diagram for a cross-flow%0Aturbine.%0A%22%22%22%0A%0Aimport gizeh as gz%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0A%0Adef gen_naca_points(naca=%220020%22, c=100, npoints=100):%0A %22%22%22Generate points for a NACA foil.%22%22%22%0A x = np.linspace(0, 1, npoints)*c%0A t = float(naca%5B2:%5D)/100.0%0A y = 5.0*t*c*(0.2969*np.sqrt(x/c) - 0.1260*(x/c) - 0.3516*(x/c)**2 %5C%0A + 0.2843*(x/c)**3 - 0.1015*(x/c)**4)%0A y = np.append(y, -y%5B::-1%5D)%0A x = np.append(x, x%5B::-1%5D)%0A points = %5B(x0, y0) for x0, y0 in zip(x, y)%5D%0A return points%0A%0A%0Adef test_gen_naca_points():%0A points = gen_naca_points()%0A x = %5B%5D%0A y = %5B%5D%0A for p in points:%0A x.append(p%5B0%5D)%0A y.append(p%5B1%5D)%0A fig, ax = plt.subplots()%0A ax.plot(x, y, %22o%22)%0A ax.set_aspect(1)%0A plt.show()%0A%0A%0Adef draw_foil(naca=%220020%22, c=100):%0A %22%22%22Draw NACA 0020 foil.%22%22%22%0A points = gen_naca_points(naca, c)%0A line = gz.polyline(points, close_path=False, stroke_width=2, xy=(300, 300))%0A return line%0A%0A%0Adef main():%0A canvas = gz.Surface(width=700, height=700)%0A foil = draw_foil()%0A foil.draw(canvas)%0A canvas.write_to_png(%22cft-vectors.png%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
4b8c5dd8ebc4261bcdb5e9f92e7936eba68fd5ad
|
Add BNB prediction script
|
bernoulli_nb/bnb_predict.py
|
bernoulli_nb/bnb_predict.py
|
Python
| 0.000001
|
@@ -0,0 +1,1640 @@
+import sys%0A%0Afrom sklearn.naive_bayes import BernoulliNB as BNB%0Aimport matplotlib.pyplot as plt%0Aimport numpy as np%0A%0Adef read_variants(flname):%0A%09fl = open(flname)%0A%09markers = %5B%5D%0A%09individuals = %5B%5D%0A%09population_ids = %5B%5D%0A%09population = -1%0A%09for ln in fl:%0A%09%09if %22Marker%22 in ln:%0A%09%09%09if len(individuals) == 0:%0A%09%09%09%09continue%0A%0A%09%09%09marker = dict()%0A%09%09%09marker%5B%22individuals%22%5D = np.array(individuals)%0A%09%09%09marker%5B%22population_labels%22%5D = np.array(population_ids)%0A%09%09%09markers.append(marker)%0A%09%09%09population = -1%0A%09%09%09population_ids = %5B%5D%0A%09%09%09individuals = %5B%5D%0A%09%09elif %22Population%22 in ln:%0A%09%09%09population += 1%0A%09%09else:%0A%09%09%09individual = map(float, ln.strip().split())%0A%09%09%09individuals.append(individual)%0A%09%09%09population_ids.append(population)%0A%0A%09if len(individuals) != 0:%0A%09%09marker = dict()%0A%09%09marker%5B%22individuals%22%5D = np.array(individuals)%0A%09%09marker%5B%22population_labels%22%5D = np.array(population_ids)%0A%09%09markers.append(marker)%0A%09fl.close()%0A%09return markers%0A%0Adef predict_scores(markers, threshold=0.05):%0A%09scores = %5B%5D%0A%09for i, marker in enumerate(markers):%0A%09%09try:%0A%09%09%09bnb = BNB()%0A%09%09%09bnb.fit(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D)%0A%09%09%09scores.append(bnb.score(marker%5B%22individuals%22%5D, marker%5B%22population_labels%22%5D))%0A%09%09except:%0A%09%09%09scores.append((0.0, i))%0A%09scores.sort()%0A%09scores.reverse()%0A%0A%09cutoff_idx = int(threshold * len(scores))%0A%0A%09return scores%5B:cutoff_idx%5D%0A%0Adef write_scores(scores, flname):%0A%09fl = open(flname, %22w%22)%0A%09for loci, score in scores:%0A%09%09fl.write(%22%25s %25s%5Cn%22 %25 (loci, score))%0A%09fl.close()%0A%0Aif __name__ == %22__main__%22:%0A%09variants_fl = sys.argv%5B1%5D%0A%09scores_flname = sys.argv%5B2%5D%0A%0A%09variants = read_variants(variants_fl)%0A%0A%09scores = predict_scores(variants)%0A%09write_scores(scores, scores_flname)%0A%0A
|
|
2ccbed0e4d867652554dff208d7c1b7bdd1710f9
|
add bench/test_curry
|
bench/test_curry.py
|
bench/test_curry.py
|
Python
| 0.000014
|
@@ -0,0 +1,152 @@
+from toolz.curried import get%0A%0Apairs = %5B(1, 2) for i in range(100000)%5D%0A%0Adef test_get_curried():%0A first = get(0)%0A for p in pairs:%0A first(p)%0A
|
|
f864faea8725659c001783de3a9451f9b693beb4
|
support exporting all data for SMS Gateway Fee Criteria report
|
corehq/apps/smsbillables/interface.py
|
corehq/apps/smsbillables/interface.py
|
from django.db.models.aggregates import Count
from corehq.apps.accounting.filters import DateCreatedFilter
from corehq.apps.reports.datatables import (
DataTablesColumn,
DataTablesHeader,
)
from corehq.apps.reports.generic import GenericTabularReport
from corehq.apps.sms.models import (
INCOMING,
OUTGOING,
)
from corehq.apps.smsbillables.dispatcher import SMSAdminInterfaceDispatcher
from corehq.apps.smsbillables.filters import (
CountryCodeFilter,
DateSentFilter,
DirectionFilter,
DomainFilter,
GatewayTypeFilter,
ShowBillablesFilter,
SpecificGateway,
)
from corehq.apps.smsbillables.models import (
SmsBillable,
SmsGatewayFee,
SmsGatewayFeeCriteria,
)
class SMSBillablesInterface(GenericTabularReport):
base_template = "accounting/report_filter_actions.html"
section_name = "Accounting"
dispatcher = SMSAdminInterfaceDispatcher
name = "SMS Billables"
description = "List of all SMS Billables"
slug = "sms_billables"
ajax_pagination = True
exportable = True
exportable_all = True
fields = [
'corehq.apps.smsbillables.interface.DateSentFilter',
'corehq.apps.accounting.interface.DateCreatedFilter',
'corehq.apps.smsbillables.interface.ShowBillablesFilter',
'corehq.apps.smsbillables.interface.DomainFilter',
]
@property
def headers(self):
return DataTablesHeader(
DataTablesColumn("Date of Message"),
DataTablesColumn("Project Space"),
DataTablesColumn("Direction"),
DataTablesColumn("Gateway Fee", sortable=False),
DataTablesColumn("Usage Fee", sortable=False),
DataTablesColumn("Message Log ID", sortable=False),
DataTablesColumn("Phone Number"),
DataTablesColumn("Is Valid?", sortable=False),
DataTablesColumn("Date Created"),
)
@property
def sort_field(self):
sort_fields = [
'date_sent',
'domain',
'direction',
'phone_number',
'date_created',
]
sort_index = int(self.request.GET.get('iSortCol_0', 2))
sort_index = 1 if sort_index == 0 else sort_index - 1
field = sort_fields[sort_index]
sort_descending = self.request.GET.get('sSortDir_0', 'asc') == 'desc'
return field if not sort_descending else '-{0}'.format(field)
@property
def shared_pagination_GET_params(self):
return DateSentFilter.shared_pagination_GET_params(self.request) + \
DateCreatedFilter.shared_pagination_GET_params(self.request) + [
{
'name': DateCreatedFilter.optional_filter_slug(),
'value': DateCreatedFilter.optional_filter_string_value(self.request)
},
{
'name': ShowBillablesFilter.slug,
'value': ShowBillablesFilter.get_value(self.request, self.domain)},
{
'name': DomainFilter.slug,
'value': DomainFilter.get_value(self.request, self.domain)
},
]
@property
def total_records(self):
query = self.sms_billables
return query.aggregate(Count('id'))['id__count']
@property
def rows(self):
query = self.sms_billables
query = query.order_by(self.sort_field)
sms_billables = query[self.pagination.start:(self.pagination.start + self.pagination.count)]
return [
[
sms_billable.date_sent,
sms_billable.domain,
("Incoming"
if sms_billable.direction == INCOMING
else ("Outgoing"
if sms_billable.direction == OUTGOING
else "")),
sms_billable.gateway_charge,
(sms_billable.usage_fee.amount
if sms_billable.usage_fee is not None else ""),
sms_billable.log_id,
sms_billable.phone_number,
sms_billable.is_valid,
sms_billable.date_created,
]
for sms_billable in sms_billables
]
@property
def sms_billables(self):
selected_billables = SmsBillable.objects.filter(
date_sent__gte=DateSentFilter.get_start_date(self.request),
date_sent__lte=DateSentFilter.get_end_date(self.request),
)
if DateCreatedFilter.use_filter(self.request):
selected_billables = selected_billables.filter(
date_created__gte=DateCreatedFilter.get_start_date(
self.request),
date_created__lte=DateCreatedFilter.get_end_date(self.request),
)
show_billables = ShowBillablesFilter.get_value(
self.request, self.domain)
if show_billables:
selected_billables = selected_billables.filter(
is_valid=(show_billables == ShowBillablesFilter.VALID),
)
domain = DomainFilter.get_value(self.request, self.domain)
if domain:
selected_billables = selected_billables.filter(
domain=domain,
)
return selected_billables
class SMSGatewayFeeCriteriaInterface(GenericTabularReport):
base_template = "accounting/report_filter_actions.html"
section_name = "Accounting"
dispatcher = SMSAdminInterfaceDispatcher
name = "SMS Gateway Fee Criteria"
description = "List of all SMS Gateway Fee Criteria"
slug = "sms_gateway_fee_criteria"
exportable = True
exportable_all = True
fields = [
'corehq.apps.smsbillables.interface.GatewayTypeFilter',
'corehq.apps.smsbillables.interface.SpecificGateway',
'corehq.apps.smsbillables.interface.DirectionFilter',
'corehq.apps.smsbillables.interface.CountryCodeFilter',
]
@property
def headers(self):
return DataTablesHeader(
DataTablesColumn("Gateway Type"),
DataTablesColumn("Specific Gateway"),
DataTablesColumn("Direction"),
DataTablesColumn("Country Code"),
DataTablesColumn("Fee (Amount, Currency)")
)
@property
def rows(self):
rows = []
for criteria in self.sms_gateway_fee_criteria:
gateway_fee = SmsGatewayFee.get_by_criteria_obj(criteria)
rows.append([
criteria.backend_api_id,
(criteria.backend_instance
if criteria.backend_instance is not None else "Any"),
criteria.direction,
(criteria.country_code
if criteria.country_code is not None else "Any"),
"%(amount)s %(currency)s" % {
'amount': str(gateway_fee.amount),
'currency': gateway_fee.currency.code,
},
])
return rows
@property
def sms_gateway_fee_criteria(self):
selected_criteria = SmsGatewayFeeCriteria.objects.filter()
gateway_type = GatewayTypeFilter.get_value(self.request, self.domain)
if gateway_type:
selected_criteria = selected_criteria.filter(
backend_api_id=gateway_type,
)
specific_gateway = SpecificGateway.get_value(self.request, self.domain)
if specific_gateway:
selected_criteria = selected_criteria.filter(
backend_instance=specific_gateway,
)
direction = DirectionFilter.get_value(self.request, self.domain)
if direction:
selected_criteria = selected_criteria.filter(
direction=direction,
)
country_code = CountryCodeFilter.get_value(self.request, self.domain)
if country_code:
selected_criteria = selected_criteria.filter(
country_code=int(country_code),
)
return selected_criteria
|
Python
| 0
|
@@ -6245,32 +6245,100 @@
y)%22)%0A )%0A%0A
+ @property%0A def get_all_rows(self):%0A return self.rows%0A%0A
@property%0A
|
3017396176491dae5bab3effda82a59e64a3591f
|
Add button_example
|
examples/pygobject/button_example.py
|
examples/pygobject/button_example.py
|
Python
| 0.000002
|
@@ -0,0 +1,1504 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A# Copyright 2011 Sebastian P%C3%B6lsterl%0A#%0A# Permission is granted to copy, distribute and/or modify this document%0A# under the terms of the GNU Free Documentation License, Version 1.3%0A# or any later version published by the Free Software Foundation;%0A# with no Invariant Sections, no Front-Cover Texts, and no Back-Cover Texts.%0A%0Aimport sys%0Asys.path.insert(0, '../..')%0Aimport pgi%0Apgi.install_as_gi()%0A%0Afrom gi.repository import Gtk%0A%0Aclass ButtonWindow(Gtk.Window):%0A%0A def __init__(self):%0A Gtk.Window.__init__(self, title=%22Button Demo%22)%0A self.set_border_width(10)%0A%0A hbox = Gtk.Box(spacing=6)%0A self.add(hbox)%0A%0A button = Gtk.Button(%22Click Me%22)%0A button.connect(%22clicked%22, self.on_click_me_clicked)%0A hbox.pack_start(button, True, True, 0)%0A%0A button = Gtk.Button(stock=Gtk.STOCK_OPEN)%0A button.connect(%22clicked%22, self.on_open_clicked)%0A hbox.pack_start(button, True, True, 0)%0A%0A button = Gtk.Button(%22_Close%22, use_underline=True)%0A button.connect(%22clicked%22, self.on_close_clicked)%0A hbox.pack_start(button, True, True, 0)%0A%0A def on_click_me_clicked(self, button):%0A print %22%5C%22Click me%5C%22 button was clicked%22%0A%0A def on_open_clicked(self, button):%0A print %22%5C%22Open%5C%22 button was clicked%22%0A%0A def on_close_clicked(self, button):%0A print %22Closing application%22%0A Gtk.main_quit()%0A%0Awin = ButtonWindow()%0Awin.connect(%22delete-event%22, Gtk.main_quit)%0Awin.show_all()%0AGtk.main()%0A
|
|
c7a543237e7d2d40f234e19341be7a835c1ba3ef
|
Remove old page admin code for forcing order/slug to be set.
|
mezzanine/pages/admin.py
|
mezzanine/pages/admin.py
|
from copy import deepcopy
from django.contrib import admin
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import NoReverseMatch
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from mezzanine.pages.models import Page, RichTextPage, Link
from mezzanine.core.admin import DisplayableAdmin
from mezzanine.utils.urls import admin_url
page_fieldsets = deepcopy(DisplayableAdmin.fieldsets)
page_fieldsets[0][1]["fields"] += ("in_menus", "login_required",)
class PageAdmin(DisplayableAdmin):
"""
Admin class for the ``Page`` model and all subclasses of
``Page``. Handles redirections between admin interfaces for the
``Page`` model and its subclasses.
"""
fieldsets = page_fieldsets
def __init__(self, *args, **kwargs):
"""
For ``Page`` subclasses that are registered with an Admin class
that doesn't implement fieldsets, add any extra model fields
to this instance's fieldsets. This mimics Django's behaviour of
adding all model fields when no fieldsets are defined on the
Admin class.
"""
super(PageAdmin, self).__init__(*args, **kwargs)
# Test that the fieldsets don't differ from PageAdmin's.
if self.model is not Page and self.fieldsets == PageAdmin.fieldsets:
# Make a copy so that we aren't modifying other Admin
# classes' fieldsets.
self.fieldsets = deepcopy(self.fieldsets)
# Insert each field between the publishing fields and nav
# fields. Do so in reverse order to retain the order of
# the model's fields.
for field in reversed(self.model._meta.fields):
if field not in Page._meta.fields and field.name != "page_ptr":
self.fieldsets[0][1]["fields"].insert(3, field.name)
def in_menu(self):
"""
Hide subclasses from the admin menu.
"""
return self.model is Page
def _check_permission(self, request, page, permission):
"""
Runs the custom permission check and raises an
exception if False.
"""
if not getattr(page, "can_" + permission)(request):
raise PermissionDenied
def add_view(self, request, extra_context=None, **kwargs):
"""
For the ``Page`` model, redirect to the add view for the
``RichText`` model.
"""
if self.model is Page:
try:
add_url = admin_url(RichTextPage, "add")
return HttpResponseRedirect(add_url)
except NoReverseMatch:
pass
return super(PageAdmin, self).add_view(request, **kwargs)
def change_view(self, request, object_id, extra_context=None):
"""
For the ``Page`` model, check ``page.get_content_model()``
for a subclass and redirect to its admin change view.
Also enforce custom change permissions for the page instance.
"""
page = get_object_or_404(Page, pk=object_id)
content_model = page.get_content_model()
self._check_permission(request, content_model, "change")
if self.model is Page:
if content_model is not None:
change_url = admin_url(content_model.__class__, "change",
content_model.id)
return HttpResponseRedirect(change_url)
extra_context = extra_context or {}
extra_context["hide_delete_link"] = not page.can_delete(request)
extra_context["hide_slug_field"] = page.overridden()
return super(PageAdmin, self).change_view(request, object_id,
extra_context=extra_context)
def delete_view(self, request, object_id, extra_context=None):
"""
Enforce custom delete permissions for the page instance.
"""
page = get_object_or_404(Page, pk=object_id)
content_model = page.get_content_model()
self._check_permission(request, content_model, "delete")
return super(PageAdmin, self).delete_view(request, object_id,
extra_context)
def changelist_view(self, request, extra_context=None):
"""
Redirect to the ``Page`` changelist view for ``Page``
subclasses.
"""
if self.model is not Page:
return HttpResponseRedirect(admin_url(Page, "changelist"))
return super(PageAdmin, self).changelist_view(request, extra_context)
def save_model(self, request, obj, form, change):
"""
Set the ID of the parent page if passed in via querystring.
"""
# Force parent to be saved to trigger handling of ordering and slugs.
parent = request.GET.get("parent")
if parent is not None and not change:
obj.parent_id = parent
obj._order = None
obj.slug = None
obj.save()
super(PageAdmin, self).save_model(request, obj, form, change)
def _maintain_parent(self, request, response):
"""
Maintain the parent ID in the querystring for response_add and
response_change.
"""
location = response._headers.get("location")
parent = request.GET.get("parent")
if parent and location and "?" not in location[1]:
url = "%s?parent=%s" % (location[1], parent)
return HttpResponseRedirect(url)
return response
def response_add(self, request, obj):
"""
Enforce page permissions and maintain the parent ID in the
querystring.
"""
response = super(PageAdmin, self).response_add(request, obj)
return self._maintain_parent(request, response)
def response_change(self, request, obj):
"""
Enforce page permissions and maintain the parent ID in the
querystring.
"""
response = super(PageAdmin, self).response_change(request, obj)
return self._maintain_parent(request, response)
# Drop the meta data fields, and move slug towards the stop.
link_fieldsets = deepcopy(page_fieldsets[:1])
link_fieldsets[0][1]["fields"] = link_fieldsets[0][1]["fields"][:-1]
link_fieldsets[0][1]["fields"].insert(1, "slug")
class LinkAdmin(PageAdmin):
fieldsets = link_fieldsets
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Make slug mandatory.
"""
if db_field.name == "slug":
kwargs["required"] = True
return super(LinkAdmin, self).formfield_for_dbfield(db_field, **kwargs)
admin.site.register(Page, PageAdmin)
admin.site.register(RichTextPage, PageAdmin)
admin.site.register(Link, LinkAdmin)
|
Python
| 0
|
@@ -4945,66 +4945,8 @@
ent%0A
- obj._order = None%0A obj.slug = None%0A
|
efc7097c0248394716144f552522daa1d44b74ce
|
add test python script
|
twython_test.py
|
twython_test.py
|
Python
| 0.000002
|
@@ -0,0 +1,367 @@
+import datetime%0Aimport sys%0Afrom twython import Twython%0A%0Afrom gettokens import tokens%0A%0Atweet = datetime.datetime.utcnow().strftime('%25Y-%25m-%25d %25H:%25M:%25S') %0A%0Aapi = Twython(tokens%5B'api_key'%5D,%0A tokens%5B'api_secret'%5D,%0A tokens%5B'access_token'%5D,%0A tokens%5B'access_token_secret'%5D)%0A%0Aapi.update_status(status=tweet)%0A%0A#print(%22Tweeted: %22 + tweet)%0A
|
|
bb5457ab736b5f94b9efb9772da16c5ebf97fa06
|
Test for interpolation functions
|
tests/theanolm/probfunctions_test.py
|
tests/theanolm/probfunctions_test.py
|
Python
| 0
|
@@ -0,0 +1,1753 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport unittest%0Aimport math%0Afrom theanolm.probfunctions import *%0A%0Aclass TestProbFunctions(unittest.TestCase):%0A def setUp(self):%0A pass%0A%0A def tearDown(self):%0A pass%0A%0A def test_interpolate_linear(self):%0A self.assertAlmostEqual(%0A interpolate_linear(math.log(0.2), math.log(0.3), 0.25),%0A math.log(0.25 * 0.2 + 0.75 * 0.3))%0A self.assertAlmostEqual(%0A interpolate_linear(float('-inf'), math.log(0.3), 0.01),%0A math.log(0.3 * 0.99))%0A self.assertEqual(%0A interpolate_linear(float('-inf'), -10.0, 0.0),%0A -10.0)%0A self.assertAlmostEqual(%0A interpolate_linear(math.log(0.3), float('-inf'), 0.99),%0A math.log(0.3 * 0.99))%0A self.assertEqual(%0A interpolate_linear(-10.0, float('-inf'), 1.0),%0A -10.0)%0A self.assertAlmostEqual(%0A interpolate_linear(-1001, -1002, 0.25),%0A -1001.64263, # ln(0.25 * exp(-1001) + 0.75 * exp(-1002))%0A places=4)%0A%0A def test_interpolate_loglinear(self):%0A self.assertEqual(%0A interpolate_loglinear(-1001.0, -1002.0, 0.25, 0.75),%0A -1001.75)%0A self.assertEqual(%0A interpolate_loglinear(float('-inf'), -1002.0, 0.25, 0.75),%0A float('-inf'))%0A self.assertEqual(%0A interpolate_loglinear(float('-inf'), -1002.0, 0.0, 1.0),%0A -1002.0)%0A self.assertEqual(%0A interpolate_loglinear(-1001.0, float('-inf'), 0.25, 0.75),%0A float('-inf'))%0A self.assertEqual(%0A interpolate_loglinear(-1001.0, float('-inf'), 1.0, 0.0),%0A -1001.0)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
b8e27997000c448d191121ef0f8b08ebca877ed0
|
Add GNU Prolog package.
|
var/spack/repos/builtin/packages/gnu-prolog/package.py
|
var/spack/repos/builtin/packages/gnu-prolog/package.py
|
Python
| 0
|
@@ -0,0 +1,1748 @@
+##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0Aclass GnuProlog(Package):%0A %22%22%22A free Prolog compiler with constraint solving over finite domains.%22%22%22%0A homepage = %22http://www.gprolog.org/%22%0A url = %22http://www.gprolog.org/gprolog-1.4.4.tar.gz%22%0A%0A version('1.4.4', '37009da471e5217ff637ad1c516448c8')%0A%0A parallel = False%0A%0A def install(self, spec, prefix):%0A with working_dir('src'):%0A configure('--with-install-dir=%25s' %25 prefix,%0A '--without-links-dir')%0A make()%0A make('install')%0A
|
|
d57a1049b45614abfe393f80328a07d78c98b5b2
|
Add system tray support
|
main.py
|
main.py
|
Python
| 0
|
@@ -0,0 +1,1328 @@
+#!/usr/bin/env python%0A# -*-coding: utf8 -*-%0A%0Aimport wx%0Aimport webbrowser%0A%0ATRAY_TOOLTIP = 'System Tray Demo'%0ATRAY_ICON = 'icon/network.png'%0A%0A%0Adef create_menu_item(menu, label, func):%0A item = wx.MenuItem(menu, -1, label)%0A menu.Bind(wx.EVT_MENU, func, id=item.GetId())%0A menu.AppendItem(item)%0A return item%0A%0A%0Aclass TaskBarIcon(wx.TaskBarIcon):%0A def __init__(self):%0A super(TaskBarIcon, self).__init__()%0A self.set_icon(TRAY_ICON)%0A self.Bind(wx.EVT_TASKBAR_LEFT_DCLICK, self.on_double_click)%0A%0A def CreatePopupMenu(self):%0A menu = wx.Menu()%0A create_menu_item(menu, 'Open', self.on_double_click)%0A create_menu_item(menu, 'Settings', self.on_settings)%0A menu.AppendSeparator()%0A create_menu_item(menu, 'Exit', self.on_exit)%0A return menu%0A%0A def set_icon(self, path):%0A icon = wx.IconFromBitmap(wx.Bitmap(path))%0A self.SetIcon(icon, TRAY_TOOLTIP)%0A%0A def on_double_click(self, event):%0A print 'Tray icon was left-clicked.'%0A webbrowser.open('http://www.google.com', new=0, autoraise=True)%0A%0A def on_settings(self, event):%0A print 'Settings window.'%0A%0A def on_exit(self, event):%0A wx.CallAfter(self.Destroy)%0A%0A%0Adef main():%0A app = wx.App(False)%0A TaskBarIcon()%0A app.MainLoop()%0A%0A%0Aif __name__ == '__main__':%0A main()
|
|
d6120537ec982f50d08fa188e91c68c023809db3
|
Send ERROR when the user disconnects
|
txircd/modules/rfc/response_error.py
|
txircd/modules/rfc/response_error.py
|
Python
| 0.000001
|
@@ -0,0 +1,525 @@
+from twisted.plugin import IPlugin%0Afrom txircd.module_interface import IModuleData, ModuleData%0Afrom zope.interface import implements%0A%0Aclass ErrorResponse(ModuleData):%0A implements(IPlugin, IModuleData)%0A %0A name = %22errorResponse%22%0A core = True%0A %0A def actions(self):%0A return %5B(%22quit%22, 10, self.sendError)%5D%0A %0A def sendError(self, user, reason):%0A user.sendMessage(%22ERROR%22, %22:Closing Link: %7B%7D@%7B%7D %5B%7B%7D%5D%22.format(user.ident, user.host, reason), to=None, prefix=None)%0A%0AerrorResponse = ErrorResponse()
|
|
12dda7a7473c094b4483789630e178fd60b0eba4
|
add routes.py
|
web/transit/routes.py
|
web/transit/routes.py
|
Python
| 0.000002
|
@@ -0,0 +1,385 @@
+import transit%0Aimport transit.views.basic%0A%0Afrom transit import app%0A%0Aroutes = (%0A ('/api/trains/', transit.views.basic.get_all_trains),%0A ('/api/trains/line/%3Cline%3E/', transit.views.basic.get_trains_on_line),%0A ('/api/trains/station/%3Cstation%3E/', transit.views.basic.get_trains_at_station)%0A)%0A%0Afor route, func in routes:%0A app.add_url_rule(rule=route, endpoint.__name__, endpoint)%0A
|
|
d940abd618a9da4494a9337134f5be1600316601
|
Fix an emit_series debug message
|
flexget/plugins/input/emit_series.py
|
flexget/plugins/input/emit_series.py
|
from __future__ import unicode_literals, division, absolute_import
import logging
from sqlalchemy import desc, and_
from flexget import plugin
from flexget.event import event
from flexget.entry import Entry
log = logging.getLogger('emit_series')
try:
from flexget.plugins.filter.series import SeriesTask, Series, Episode, Release, get_latest_release
except ImportError as e:
log.error(e.message)
raise plugin.DependencyError(issued_by='emit_series', missing='series')
class EmitSeries(object):
"""
Emit next episode number from all series configured in this task.
Supports only 'ep' and 'sequence' mode series.
"""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'from_start': {'type': 'boolean', 'default': False},
'backfill': {'type': 'boolean', 'default': False}
},
'additionalProperties': False
}
]
}
def ep_identifiers(self, season, episode):
return ['S%02dE%02d' % (season, episode),
'%dx%02d' % (season, episode)]
def sequence_identifiers(self, episode):
return ['%d' % episode,
'%02d' % episode,
'%03d' % episode]
def search_entry(self, series, season, episode, task, rerun=True):
if series.identified_by == 'ep':
search_strings = ['%s %s' % (series.name, id) for id in self.ep_identifiers(season, episode)]
series_id = 'S%02dE%02d' % (season, episode)
else:
search_strings = ['%s %s' % (series.name, id) for id in self.sequence_identifiers(episode)]
series_id = episode
entry = Entry(title=search_strings[0], url='',
search_strings=search_strings,
series_name=series.name,
series_season=season,
series_episode=episode,
series_id=series_id,
series_id_type=series.identified_by)
if rerun:
entry.on_complete(self.on_search_complete, task=task, identified_by=series.identified_by)
return entry
def on_task_input(self, task, config):
if not config:
return
if isinstance(config, bool):
config = {}
if not task.is_rerun:
self.try_next_season = {}
entries = []
for seriestask in task.session.query(SeriesTask).filter(SeriesTask.name == task.name).all():
series = seriestask.series
if not series:
# TODO: How can this happen?
log.debug('Found SeriesTask item without series specified. Cleaning up.')
task.session.delete(seriestask)
continue
if series.identified_by not in ['ep', 'sequence']:
log.verbose('Can only emit ep or sequence based series. `%s` is identified_by %s' %
(series.name, series.identified_by or 'auto'))
continue
low_season = 0 if series.identified_by == 'ep' else -1
latest_season = get_latest_release(series)
if latest_season:
latest_season = latest_season.season
else:
latest_season = low_season + 1
if self.try_next_season.get(series.name):
entries.append(self.search_entry(series, latest_season + 1, 1, task))
else:
for season in xrange(latest_season, low_season, -1):
log.debug('Adding episodes for %d' % latest_season)
check_downloaded = not config.get('backfill')
latest = get_latest_release(series, season=season, downloaded=check_downloaded)
if series.begin and (not latest or latest < series.begin):
entries.append(self.search_entry(series, series.begin.season, series.begin.number, task))
elif latest:
start_at_ep = 1
episodes_this_season = (task.session.query(Episode).
filter(Episode.series_id == series.id).
filter(Episode.season == season))
if series.identified_by == 'sequence':
# Don't look for missing too far back with sequence shows
start_at_ep = max(latest.number - 10, 1)
episodes_this_season = episodes_this_season.filter(Episode.number >= start_at_ep)
latest_ep_this_season = episodes_this_season.order_by(desc(Episode.number)).first()
downloaded_this_season = (episodes_this_season.join(Episode.releases).
filter(Release.downloaded == True).all())
# Calculate the episodes we still need to get from this season
if series.begin and series.begin.season == season:
start_at_ep = max(start_at_ep, series.begin.number)
eps_to_get = range(start_at_ep, latest_ep_this_season.number + 1)
for ep in downloaded_this_season:
try:
eps_to_get.remove(ep.number)
except ValueError:
pass
entries.extend(self.search_entry(series, season, x, task, rerun=False) for x in eps_to_get)
# If we have already downloaded the latest known episode, try the next episode
if latest_ep_this_season.releases:
entries.append(self.search_entry(series, season, latest_ep_this_season.number + 1, task))
else:
if config.get('from_start') or config.get('backfill'):
entries.append(self.search_entry(series, season, 1, task))
else:
log.verbose('Series `%s` has no history. Set begin option, or use CLI `series begin` '
'subcommand to set first episode to emit' % series.name)
break
if not config.get('backfill'):
break
return entries
def on_search_complete(self, entry, task=None, identified_by=None, **kwargs):
series = task.session.query(Series).filter(Series.name == entry['series_name']).first()
latest = get_latest_release(series)
episode = (task.session.query(Episode).join(Episode.series).
filter(Series.name == entry['series_name']).
filter(Episode.season == entry['series_season']).
filter(Episode.number == entry['series_episode']).
first())
if entry.accepted or (episode and len(episode.releases) > 0):
self.try_next_season.pop(entry['series_name'], None)
task.rerun()
elif latest and latest.season == entry['series_season']:
if identified_by != 'ep':
# Do not try next season if this is not an 'ep' show
return
if entry['series_name'] not in self.try_next_season:
self.try_next_season[entry['series_name']] = True
task.rerun()
else:
# Don't try a second time
self.try_next_season[entry['series_name']] = False
@event('plugin.register')
def register_plugin():
plugin.register(EmitSeries, 'emit_series', api_ver=2)
|
Python
| 0.000064
|
@@ -3664,21 +3664,21 @@
for
-%25d' %25 latest_
+season %25d' %25
seas
|
2a590a11f92e03d923d66cfc6e8fe5837feb0f20
|
Add a snippet: 'matplotlib/gaussian_convolution'.
|
matplotlib/gaussian_convolution/gaussian_convolution_1d.py
|
matplotlib/gaussian_convolution/gaussian_convolution_1d.py
|
Python
| 0.999972
|
@@ -0,0 +1,2439 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22Gaussian convolution 1D%22%22%22%0A%0A# Copyright (c) 2012 J%C3%A9r%C3%A9mie DECOCK (http://www.jdhp.org)%0A%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A %0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A# THE SOFTWARE.%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0ASIGMA = 0.5%0A%0Adef d_square(x, xarray):%0A %22%22%22Computes the squared euclidian distance between x and xarray values.%22%22%22%0A%0A #d = np.sum(np.power(x - xarray, 2), 1) # if x is a vector%0A d = np.power(x - xarray, 2)%0A return d%0A%0A%0Adef estimate(x, x_known, y_known, sigma):%0A %22%22%22Estimates the value y of x, knowing x_known and y_known sets.%22%22%22%0A%0A d = d_square(x, x_known)%0A e = np.exp( -1. / pow(sigma, 2) * d )%0A%0A term1 = np.sum(e * y_known)%0A term2 = np.sum(e) # to normalize %22term1%22%0A%0A y_hat = term1 / term2%0A%0A #return y_hat%0A return y_hat, term1, term2 # uncomment this line to see term1 and term2%0A%0A%0Adef main():%0A %22%22%22Main function%22%22%22%0A%0A # Known points%0A x_known = np.array(%5B-3., -2., -1., 0., 1., 2.%5D)%0A x_known = np.array(%5B-3., -2., -1.1, -1., 0., 1., 2.%5D)%0A y_known = np.array(%5B-1., -2., 1.2, 1., 4., 3., 2.%5D)%0A%0A # Points to approximate%0A x_test = np.arange(-5., 5., 0.05).tolist()%0A y_test = np.array(%5Bestimate(x, x_known, y_known, SIGMA) for x in x_test%5D)%0A%0A # Plot%0A plt.plot(x_known, y_known, 'r*')%0A plt.plot(x_test, y_test)%0A plt.xlabel('$x$')%0A plt.ylabel('$%5Chat%7By%7D$')%0A plt.show()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
fac031300c81c31f5d6022a65d3637fd4e62fa91
|
add blink 3 script
|
raspberry-pi/led-blink3.py
|
raspberry-pi/led-blink3.py
|
Python
| 0
|
@@ -0,0 +1,773 @@
+import RPi.GPIO as GPIO%0Aimport time%0A%0Adef blink(ledPin, onTime, offTime):%0A GPIO.output(ledPin, GPIO.HIGH)%0A time.sleep(onTime)%0A GPIO.output(ledPin, GPIO.LOW)%0A time.sleep(offTime)%0A return%0A %0Adef blinkThree(redLedPin, blueLedPin, greenLedPin):%0A blink(redLedPin, 1, 0)%0A blink(blueLedPin, 1, 0)%0A blink(greenLedPin, 1, 0)%0A %0A# tell GPIO to use the Pi's board numbering for pins%0AGPIO.setmode(GPIO.BOARD)%0A%0AredLedPin = 22%0AblueLedPin = 14%0AgreenLedPin = 12%0A%0A# set data direction of LED pin to output%0AGPIO.setup(redLedPin, GPIO.OUT)%0AGPIO.setup(blueLedPin, GPIO.OUT)%0AGPIO.setup(greenLedPin, GPIO.OUT)%0A%0A# loop until keyboard interrupt%0A%0Atry:%0A while True:%0A blinkThree(redLedPin, blueLedPin, greenLedPin)%0Aexcept KeyboardInterrupt:%0A pass%0A%0AGPIO.cleanup()
|
|
ac44a041e3e7808305b025e1087f48b7d4a9234a
|
Add script to delete Bit.ly raw results from S3
|
tools/bitly/delete_bitly_blobs.py
|
tools/bitly/delete_bitly_blobs.py
|
Python
| 0
|
@@ -0,0 +1,1751 @@
+#!/usr/bin/env python3%0A%0Aimport argparse%0Aimport boto3%0Aimport os%0Afrom typing import List%0A%0Afrom mediawords.util.log import create_logger%0A%0Al = create_logger(__name__)%0A%0A%0Adef delete_bitly_blobs(story_ids: List%5Bint%5D):%0A session = boto3.Session(profile_name='mediacloud')%0A s3 = session.resource('s3')%0A bucket = s3.Bucket('mediacloud-bitly-processing-results')%0A%0A chunk_size = 999 # up to 1000 objects to be deleted at once%0A story_ids_chunks = %5Bstory_ids%5Bx:x + chunk_size%5D for x in range(0, len(story_ids), chunk_size)%5D%0A%0A l.info('Deleting %25d Bit.ly blobs, split into %25d chunks...' %25 (len(story_ids), len(story_ids_chunks)))%0A%0A for chunk in story_ids_chunks:%0A objects_to_delete = %5B%5D%0A%0A for stories_id in chunk:%0A objects_to_delete.append(%7B'Key': 'json_blobs/%25d' %25 stories_id%7D)%0A%0A bucket.delete_objects(%0A Delete=%7B%0A 'Objects': objects_to_delete,%0A %7D%0A )%0A%0A l.info('Done deleting %25d Bit.ly blobs.' %25 len(story_ids))%0A%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser(description='Delete Bit.ly raw results from S3.',%0A formatter_class=argparse.ArgumentDefaultsHelpFormatter)%0A parser.add_argument('-i', '--input_file', type=str, required=True, help='Input file with Bit.ly story IDs.')%0A%0A args = parser.parse_args()%0A%0A if not os.path.isfile(args.input_file):%0A raise Exception('Input file %22%25s%22 does not exist.' %25 args.input_file)%0A%0A bitly_story_ids = %5B%5D%0A with open(args.input_file, 'r') as fh:%0A for line in fh:%0A line = line.rstrip(%22%5Cn%22)%0A if line:%0A line = int(line)%0A bitly_story_ids.append(line)%0A%0A delete_bitly_blobs(story_ids=bitly_story_ids)%0A
|
|
37c0257fcc5e65b67fabfd17c2bf884ad8fe03e1
|
Add migration to reset signatures
|
recipe-server/normandy/recipes/migrations/0038_remove_invalid_signatures.py
|
recipe-server/normandy/recipes/migrations/0038_remove_invalid_signatures.py
|
Python
| 0
|
@@ -0,0 +1,1202 @@
+%22%22%22%0ARemoves signatures, so they can be easily recreated during deployment.%0A%0AThis migration is intended to be used between %22eras%22 of signatures. As%0Athe serialization format of recipes changes, the signatures need to%0Aalso change. This could be handled automatically, but it is easier to%0Adeploy if we just remove everything in a migration, and allow the%0Anormal processes to regenerate the signatures.%0A%22%22%22%0A%0A# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.11 on 2017-01-27 00:03%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Adef remove_signatures(apps, schema_editor):%0A Recipe = apps.get_model('recipes', 'Recipe')%0A Signature = apps.get_model('recipes', 'Signature')%0A%0A for recipe in Recipe.objects.exclude(signature=None):%0A sig = recipe.signature%0A recipe.signature = None%0A recipe.save()%0A sig.delete()%0A%0A for sig in Signature.objects.all():%0A sig.delete()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('recipes', '0037_auto_20170113_0627'),%0A %5D%0A%0A operations = %5B%0A # This function as both a forward and reverse migration%0A migrations.RunPython(remove_signatures, remove_signatures),%0A %5D%0A
|
|
f33ca28e7465a0b35d2419dd9016196f63a114d8
|
Add demo.py
|
demo.py
|
demo.py
|
Python
| 0.000002
|
@@ -0,0 +1,995 @@
+#!/usr/bin/python3%0A# -*- encoding: utf-8 -*-%0A%0Afrom indexes import *%0Aimport global_data%0A%0A%0Adef main(stock='000001', date=global_data.NEWEST_TRADE_DATE, p_MA=5, p_MACD=(12,26,9),%0A p_RSI=6, p_KDJ=(9,3), p_MTM=(12,6)):%0A %22%22%22%0A Example%0A date: str, '2017-08-18'%0A p_MA: int, 5%0A p_MACD: tuple, (12,26,9)%0A p_RSI: int, 6%0A p_KDJ: tuple, (9,3)%0A p_MTM: tuple, (12,6)%0A %22%22%22%0A%0A rsi = RSI(stock)%0A ma = MA(stock)%0A macd = MACD(stock)%0A mtm = MTM(stock)%0A kdj = KDJ(stock)%0A%0A global_data.add_data(stock) # download data to database%0A%0A print(stock, date)%0A print('MA%25s' %25 str(p_MA), ma.get_ma(date, p_MA))%0A print('MACD%25s' %25 str(p_MACD), macd.get_macd(date, *p_MACD))%0A print('RSI%25s' %25 str(p_RSI), rsi.get_rsi(date, p_RSI))%0A print('KDJ%25s' %25 str(p_KDJ), kdj.get_kdj(date, *p_KDJ))%0A print('MTM%25s' %25 str(p_MTM), mtm.get_mtm(date, *p_MTM))%0A%0A global_data.save_database(global_data.DB_FILE)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
c19c1838802ef8b4429df605e085176aef3bb45f
|
Create 04_peery_beams.py
|
examples/01-advanced/04_peery_beams.py
|
examples/01-advanced/04_peery_beams.py
|
Python
| 0
|
@@ -0,0 +1,1788 @@
+r%22%22%22%0A.. _ref_ex_peery_beams:%0A%0ASymmetric and Unsymmetric Beams in Complex Bending%0A--------------------------------------------------%0A%0ACalculate section properties of two different beams%0Agiven in examples from 'Aircraft Structures,' by Peery. %0AThese cases have known results, and the output from %0ASectionProperties can be compared for accuracy. These %0Aexamples represent a more rigourous 'proof' against a %0A'real' problem. Only results that have values in the %0A%0Areference material are tested here.%0A%0ABibTeX Entry for reference:%0A%0A @Book%7BPeery,%0A title = %7BAircraft Structures%7D,%0A author = %7BDavid J. Peery%7D,%0A organization = %7BPensylvania State University%7D,%0A publisher = %7BMcGraw-Hill Book Company%7D,%0A year = %7B1950%7D,%0A edition = %7BFirst%7D,%0A ISBN = %7B978-0486485805%7D%0A %7D%0A%0A%22%22%22%0A%0A# sphinz_gallery_thumbnail_number = 1%0A%0Afrom sectionproperties.pre.library import nastran_sections%0Afrom sectionproperties.analysis.section import Section%0A%0A# %25%25%0A# Example 1 in Sec. 6.2 (Symmetric Bending)%0A# This is a symmetric I-section with no lateral supports,%0A# undergoing pure unidirectional cantilever bending.%0A# Note that units here are **inches**, to match the text.%0A# %0A# We'll use a very coarse mesh here, to show a conservative%0A# comparison for accuracy. Theoretically, with more %0A# discretization, we would capture the real results more accurately.%0Ageometry = nastran_sections.nastran_i(6,3,3,1,1,1)%0Ageometry = geometry.shift_section(x_offset=0,y_offset=-3)%0Ageometry = geometry.create_mesh(mesh_sizes=%5B0.25%5D)%0Asection = Section(geometry)%0Asection.plot_mesh()%0A%0A# %25%25%0A# Perform a geometric analysis on the section, and plot properties%0A# We don't need warping or plastic analysis for these simple checks.%0Asection.calculate_geometric_properties()%0Asection.plot_centroids()%0A%0A
|
|
c29bf2b9a87fdeb58d78a3ef3219292742371314
|
test script for BEL2 stmt checks
|
bin/test_bel2_validation.py
|
bin/test_bel2_validation.py
|
Python
| 0
|
@@ -0,0 +1,1630 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AUsage: program.py %3Ccustomer%3E%0A%0A%22%22%22%0A%0A%0Aimport requests%0Aimport json%0A%0Abase_url = %22http://localhost:9292%22%0A%0Afiles = %5B'bel2.0-example-statements.bel', 'bel2_document_examples.bel'%5D%0A%0A%0Adef send_request(bel):%0A # Issue #134%0A # GET http://localhost:9292/api/expressions/rxn(reactants(a(CHEBI:superoxide)),products(a(CHEBI:%2522hydrogen%2520peroxide%2522),%2520a(CHEBI:%2520%2522oxygen%2522))/validation%0A%0A try:%0A response = requests.get(%0A url=f%22%7Bbase_url%7D/api/expressions/%7Bbel%7D/validation%22,%0A )%0A try:%0A r = response.json()%0A except:%0A r = None%0A%0A # print(f%22Status %7Bresponse.status_code%7D Response: %7Br%7D%22)%0A return (response.status_code, r)%0A%0A except requests.exceptions.RequestException:%0A # return (response.status_code, response.json())%0A print(f%22Error %7Bresponse.status_code%7D, %7Bbel%7D%22)%0A%0A%0Adef run_examples():%0A results = %5B%5D%0A cnt = error_cnt = success_cnt = 0%0A for fn in files:%0A with open(fn, 'r') as f:%0A for bel in f:%0A cnt += 1%0A bel = bel.strip()%0A%0A print(f%22Running bel: %7Bbel%7D%22)%0A%0A (status, msg) = send_request(bel)%0A if status != 200:%0A error_cnt += 1%0A results.append((status, bel, msg))%0A else:%0A success_cnt += 1%0A%0A print(f%22Total: %7Bcnt%7D Success: %7Bsuccess_cnt%7D Errors: %7Berror_cnt%7D%22)%0A with open('test_results.json', 'w') as f:%0A json.dump(results, f, indent=4)%0A%0A%0Adef main():%0A run_examples()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A%0A
|
|
cec17c0aaf794cdd108713642e0662c7eac7a020
|
Create cesarEncription.py
|
assigment1/cesarEncription.py
|
assigment1/cesarEncription.py
|
Python
| 0.000005
|
@@ -0,0 +1,2398 @@
+%0A# Chechinput: returns false is the inputed text %0A# has any numeric character%0A%0Adef checkInput(userInput):%0A%09return userInput.isalpha()%0A %0A# CheckNumber: returns false is the inputed text %0A# has any numeric character%0A%0Adef checkNumber(userInput):%0A%09return userInput.isdigit()%0A%0Adef readMessage():%0A%09valid = False; %0A%09message = '';%0A%09while(not valid):%0A%09%09# Iterate until input message has not have any numeric element (number)%0A%09%09message = raw_input('Introduce your message: ');%0A%09%09valid = checkInput(message);%0A%0A%09return message;%0A%0A# Now, let the user inputs the desired shift number%0A# iterates until inputed number is correct.%0A%0Adef readNumber():%0A%09valid = False;%0A%09number = -1;%0A%09while(not valid):%0A%09%09number = raw_input('Introduce your shift Number: ');%0A%09%09valid = checkNumber(number);%0A%09%09if (not valid):%0A%09%09%09print 'Come on! this is not a number :P'; %0A%0A%09return number;%0A%0A# Returns and object with a message and a number.%0A%0Adef userInput(): %0A%09message = readMessage();%0A%09number = readNumber(); %0A%0A%09# Compose and object and returns to the user%0A%09userInput = %7B'message': message, 'number': number%7D;%0A%09return userInput;%0A%0Adef simpleEncription(message, shiftNumber):%0A%09%0A%09if (type(shiftNumber) is not int):%0A%09%09shiftNumber = int(shiftNumber); # Int casting of shiftNumber when typed is not expressed as integer. Sometimes may ocurr the program interprets a string...%0A%09%0A%09auxChar = '';%0A%09encriptedMsg = list();%0A%09shiftNumber = shiftNumber %25 26; # English alfhabet has 26 elements. when user shifs more than 26, then make the modulus! if not, an error will crash lines below.%0A%0A%09# Convert each character of the array into a%0A%09# number, then sum the shif and finally convert this%0A%09# number into the alfabhet letter corresponding.%0A%09 %0A%09for char in message:%0A%09%09%0A%09%09if (char == 'z'):%0A%09%09%09auxChar = ord('a') - 1;%0A%09%09elif (char == 'Z'):%0A%09%09%09auxChar = ord('A') - 1;%0A%09%09else:%0A%09%09%09auxChar = ord(char);%0A%0A%09%09auxChar += shiftNumber;%0A%09%09encriptedChar = chr(auxChar);%0A%09%09encriptedMsg.append(encriptedChar);%0A%0A%09return encriptedMsg;%0A%0A%0AuserInput = userInput(); # Returns a valid message and shif number from user in a list.%0Amessage = userInput%5B'message'%5D; # Local variable for the message inputed by the user%0Anumber = userInput%5B'number'%5D; # Local variable for the number inputed by the user%0A%0AencriptedMsgList = simpleEncription(message, number);%0A%0Aprint encriptedMsgList%0A%0AoutputMsg = '';%0Afor index, char in enumerate(encriptedMsgList):%0A%09outputMsg += char;%0A%0Aprint outputMsg%0A
|
|
fc35e902e4a41176e1860b5a42fb5ce51c3042f7
|
Add scaleioutil
|
scaleiopy/scaleioutil.py
|
scaleiopy/scaleioutil.py
|
Python
| 0
|
@@ -0,0 +1,1499 @@
+import logging%0A%0A%0Aclass ScaleIOLogger: %0A instance = None%0A%0A @classmethod%0A def get(cls):%0A if cls.instance is None:%0A cls.instance = cls()%0A return cls.instance%0A%0A def __init__(self):%0A # How to use:%0A # loggerInstance = ScaleIOLogger.get()%0A # logger = loggerInstance.getLogger('DEBUG')%0A logging.basicConfig(format='%25(asctime)s: %25(levelname)s %25(module)s:%25(funcName)s %7C %25(message)s',level=self._get_log_level(debugLevel))%0A self.logger = logging.getLogger(__name__)%0A self.logger.debug(%22Logger initialized!%22)%0A %0A def getLogger(self, loglevel):%0A return _get_log_level(loglevel)%0A %0A @staticmethod%0A def _get_log_level(level):%0A %22%22%22%0A small static method to get logging level%0A :param str level: string of the level e.g. %22INFO%22%0A :returns logging.%3CLEVEL%3E: appropriate debug level%0A %22%22%22%0A # default to DEBUG%0A if level is None or level == %22DEBUG%22:%0A return logging.DEBUG%0A%0A level = level.upper()%0A # Make debugging configurable%0A if level == %22INFO%22:%0A return logging.INFO%0A elif level == %22WARNING%22:%0A return logging.WARNING%0A elif level == %22CRITICAL%22:%0A return logging.CRITICAL%0A elif level == %22ERROR%22:%0A return logging.ERROR%0A elif level == %22FATAL%22:%0A return logging.FATAL%0A else:%0A raise Exception(%22UnknownLogLevelException: enter a valid log level%22)%0A
|
|
156d635653e1ec93fbaff7ee7c872a5f6035f9a8
|
Add new-command.py to standard_commands
|
extra/standard_commands/new-command.py
|
extra/standard_commands/new-command.py
|
Python
| 0.000056
|
@@ -0,0 +1,1680 @@
+%22%22%22Finds a directory or file inside the current project.%22%22%22%0Afrom . import DodoCommand%0Afrom dodo_commands.framework.config import CommandPath%0Aimport os%0Aimport sys%0A%0Ascript_src = %22%22%22# noqa%0Afrom dodo_commands.default_commands.standard_commands import DodoCommand%0A%0A%0Aclass Command(DodoCommand): # noqa%0A help = %22%22%0A decorators = %5B%5D%0A%0A def add_arguments_imp(self, parser): # noqa%0A parser.add_argument('foo')%0A parser.add_argument(%0A '--bar',%0A required=True,%0A help=''%0A )%0A%0A def handle_imp(self, foo, bar, **kwargs): # noqa%0A pass%0A%22%22%22%0A%0Aclass Command(DodoCommand): # noqa%0A help = %22Creates a new Dodo command.%22%0A%0A def add_arguments_imp(self, parser): # noqa%0A %22%22%22%0A Entry point for subclassed commands to add custom arguments.%0A %22%22%22%0A parser.add_argument('name')%0A parser.add_argument(%0A '--next-to',%0A required=True,%0A help='Create the new command at the location of this command'%0A )%0A%0A def handle_imp(self, name, next_to, **kwargs): # noqa%0A dest_path = None%0A command_path = CommandPath(self.get_config(%22/ROOT/project_dir%22))%0A for item in command_path.items:%0A script_path = os.path.join(%0A item.full_path, next_to + %22.py%22%0A )%0A if os.path.exists(script_path):%0A dest_path = os.path.join(%0A item.full_path, name + %22.py%22%0A )%0A%0A if not dest_path:%0A sys.stderr.write(%22Script not found: %25s%5Cn%22 %25 next_to)%0A return%0A%0A with open(dest_path, %22w%22) as f:%0A f.write(script_src)%0A%0A print(dest_path)%0A
|
|
421bd355cb3d471ac61d608c9e39cc821b06089f
|
Create analyzefiles.py
|
bin/interpret/analyzefiles.py
|
bin/interpret/analyzefiles.py
|
Python
| 0.000001
|
@@ -0,0 +1,18 @@
+#!/usr/bin/python%0A
|
|
4ec3cb1ddb08e14ba3c2ba169b4c5c47c779740a
|
Add new package: unuran (#8397)
|
var/spack/repos/builtin/packages/unuran/package.py
|
var/spack/repos/builtin/packages/unuran/package.py
|
Python
| 0
|
@@ -0,0 +1,2432 @@
+##############################################################################%0A# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass Unuran(AutotoolsPackage):%0A %22%22%22Universal Non-Uniform Random number generator.%22%22%22%0A%0A homepage = %22http://statmath.wu.ac.at/unuran%22%0A url = %22http://statmath.wu.ac.at/unuran/unuran-1.8.1.tar.gz%22%0A%0A version('1.8.1', 'a5885baab53a2608c1d85517bf5d06a5')%0A%0A variant('shared', default=True,%0A description=%22Enable the build of shared libraries%22)%0A variant('rngstreams', default=True,%0A description=%22Use RNGSTREAM library for uniform random generation%22)%0A variant('gsl', default=False,%0A description=%22Use random number generators from GNU Scientific Library%22)%0A%0A depends_on('gsl', when=%22+gsl%22)%0A depends_on('rngstreams', when=%22+rngstreams%22)%0A%0A def configure_args(self):%0A%0A spec = self.spec%0A%0A args = %5B%0A '--%25s-shared' %25 ('enable' if '+shared' in spec else 'disable'),%0A '--with-urgn-default=%25s' %25 (%0A 'rngstream' if '+rngstreams' in spec else 'builtin'),%0A '--%25s-urng-gsl' %25 (%0A 'with' if '+gsl' in spec else 'without'),%0A '--%25s-urng-rngstreams' %25 (%0A 'with' if '+rngstreams' in spec else 'without')%0A %5D%0A%0A return args%0A
|
|
0c8ebb64bb138da46623449d37078be39432ec54
|
Simplify if plugin validator by using new validator features.
|
flexget/plugins/filter/if_condition.py
|
flexget/plugins/filter/if_condition.py
|
import logging
import re
import datetime
from copy import copy
from flexget.feed import Feed
from flexget.plugin import register_plugin, get_plugins_by_phase, get_plugin_by_name, priority
log = logging.getLogger('if')
def safer_eval(statement, locals):
"""A safer eval function. Does not allow __ or try statements, only includes certain 'safe' builtins."""
allowed_builtins = ['True', 'False', 'str', 'unicode', 'int', 'float', 'len', 'any', 'all']
for name in allowed_builtins:
locals[name] = globals()['__builtins__'].get(name)
if re.search(r'__|try\s*:', statement):
raise ValueError('\'__\' or try blocks not allowed in if statements.')
return eval(statement, {'__builtins__': None}, locals)
class FilterIf(object):
"""Can run actions on entries that satisfy a given condition.
Actions include accept, reject, and fail, as well as the ability to run other filter plugins on the entries."""
def validator(self):
from flexget import validator
root = validator.factory('list')
key_validator = validator.factory('regexp_match',
message='If statements cannot contain \'__\' or \'try\' statements')
key_validator.reject(r'.*?(__|try\s*:)')
key_validator.accept('.')
action = root.accept('dict').accept_valid_keys('root', key_validator=key_validator)
action.accept('choice').accept_choices(['accept', 'reject', 'fail'])
filter_action = action.accept('dict')
# Get a list of apiv2 input plugins, make sure to exclude self
valid_filters = [plugin for plugin in get_plugins_by_phase('filter')
if plugin.api_ver > 1 and plugin.name != 'if']
# Build a dict validator that accepts the available filter plugins and their settings
for plugin in valid_filters:
if hasattr(plugin.instance, 'validator'):
validator = plugin.instance.validator()
if validator.name == 'root':
# If a root validator is returned, grab the list of child validators
filter_action.valid[plugin.name] = validator.valid
else:
filter_action.valid[plugin.name] = [plugin.instance.validator()]
else:
filter_action.valid[plugin.name] = [validator.factory('any')]
return root
@priority(80)
def on_feed_filter(self, feed, config):
entry_actions = {
'accept': feed.accept,
'reject': feed.reject,
'fail': feed.fail}
for entry in feed.entries:
# Make entry fields and other utilities available in the eval namespace
# We need our namespace to be an Entry instance for lazy loading to work
eval_locals = copy(entry)
eval_locals.update({'has_field': lambda f: f in entry,
'timedelta': datetime.timedelta,
'now': datetime.datetime.now()})
for item in config:
requirement, action = item.items()[0]
try:
# Restrict eval namespace to have no globals and locals only from eval_locals
passed = safer_eval(requirement, eval_locals)
except NameError, e:
# Extract the name that did not exist
missing_field = e.message.split('\'')[1]
log.debug('%s does not contain the field %s' % (entry['title'], missing_field))
except Exception, e:
log.error('Error occurred in if statement: %r' % e)
else:
if passed:
log.debug('%s matched requirement %s' % (entry['title'], requirement))
if isinstance(action, basestring):
# Simple entry action (accept, reject or fail) was specified as a string
entry_actions[action](entry, 'Matched requirement: %s' % requirement)
else:
# Other filters were specified to run on this entry
fake_feed = Feed(feed.manager, feed.name, feed.config)
fake_feed.session = feed.session
fake_feed.entries = [entry]
try:
for filter_name, filter_config in action.iteritems():
filter = get_plugin_by_name(filter_name)
method = filter.phase_handlers['filter']
method(fake_feed, filter_config)
except Exception:
raise
else:
# Populate changes from the fake feed to the real one
for e in fake_feed.accepted:
feed.accept(e, e.get('reason'))
for e in fake_feed.rejected:
feed.reject(e, e.get('reason'))
for e in fake_feed.failed:
feed.fail(e, e.get('reason'))
register_plugin(FilterIf, 'if', api_ver=2)
|
Python
| 0.999993
|
@@ -1549,231 +1549,8 @@
')%0D%0A
- # Get a list of apiv2 input plugins, make sure to exclude self%0D%0A valid_filters = %5Bplugin for plugin in get_plugins_by_phase('filter')%0D%0A if plugin.api_ver %3E 1 and plugin.name != 'if'%5D%0D%0A
@@ -1666,21 +1666,38 @@
in
-valid_
+get_plugins_by_phase('
filter
-s
+')
:%0D%0A
@@ -1714,340 +1714,95 @@
if
-hasattr(plugin.instance, 'validator'):%0D%0A validator = plugin.instance.validator()%0D%0A if validator.name == 'root':%0D%0A # If a root validator is returned, grab the list of child validators%0D%0A filter_action.valid%5Bplugin.name%5D = validator.valid%0D%0A else:%0D%0A
+plugin.api_ver %3E 1 and hasattr(plugin.instance, 'validator') and plugin.name != 'if':%0D%0A
@@ -1827,38 +1827,23 @@
_action.
-valid%5Bplugin.name%5D = %5B
+accept(
plugin.i
@@ -1863,109 +1863,26 @@
ator
-()%5D%0D%0A else:%0D%0A filter_action.valid%5Bplugin.name%5D = %5Bvalidator.factory('any')%5D
+, key=plugin.name)
%0D%0A
|
ef62d0cca3f9f28cef6891b87da7b3d9e0ade953
|
Add stub file
|
2018/python/2018_02.py
|
2018/python/2018_02.py
|
Python
| 0
|
@@ -0,0 +1,187 @@
+%22%22%22Advent of Code 2018 Day 2: Inventory Management System%22%22%22%0A%0Aimport aoc_common%0Aimport pytest%0A%0ADAY = 2%0A%0A%0A%0A%0Aif __name__ == '__main__':%0A puzzle_input = aoc_common.load_puzzle_input(DAY)%0A
|
|
d5b1dd851b87542ff215bc74f74e3b2e76fc5894
|
create traditional box score
|
nba_data/data/traditional_box_score.py
|
nba_data/data/traditional_box_score.py
|
Python
| 0.00048
|
@@ -0,0 +1,1043 @@
+class TraditionalBoxScore:%0A def __init__(self, seconds_played, field_goals_made, field_goal_attempted,%0A three_point_field_goals_made, three_point_field_goal_attempted,%0A free_throws_made, free_throws_attempted, offensive_rebounds, defensive_rebounds, assists,%0A steals, blocks, turnovers, personal_fouls):%0A self.seconds_played = seconds_played%0A self.field_goals_made = field_goals_made%0A self.field_goal_attempted = field_goal_attempted%0A self.three_point_field_goals_made = three_point_field_goals_made%0A self.three_point_field_goal_attempted = three_point_field_goal_attempted%0A self.free_throws_made = free_throws_made%0A self.free_throws_attempted = free_throws_attempted%0A self.offensive_rebounds = offensive_rebounds%0A self.defensive_rebounds = defensive_rebounds%0A self.assists = assists%0A self.steals = steals%0A self.blocks = blocks%0A self.turnovers = turnovers%0A self.personal_fouls = personal_fouls%0A
|
|
c87f42579826cf236953bc955d15a9cc98c67d05
|
Add Migration File this time.
|
applications/migrations/0029_application_proposed_development_description.py
|
applications/migrations/0029_application_proposed_development_description.py
|
Python
| 0
|
@@ -0,0 +1,493 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.6 on 2017-03-30 05:56%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('applications', '0028_auto_20170329_1445'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='application',%0A name='proposed_development_description',%0A field=models.TextField(blank=True, null=True),%0A ),%0A %5D%0A
|
|
609e143589d89b5be167914b70de99658776745f
|
add redirection test
|
ci/new_tsqa/tests/test_redirection.py
|
ci/new_tsqa/tests/test_redirection.py
|
Python
| 0.000001
|
@@ -0,0 +1,1864 @@
+# Licensed to the Apache Software Foundation (ASF) under one%0A# or more contributor license agreements. See the NOTICE file%0A# distributed with this work for additional information%0A# regarding copyright ownership. The ASF licenses this file%0A# to you under the Apache License, Version 2.0 (the%0A# %22License%22); you may not use this file except in compliance%0A# with the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport requests%0Aimport logging%0A%0Aimport helpers%0A%0Aimport tsqa.test_cases%0Aimport tsqa.utils%0Aimport tsqa.endpoint%0A%0Alog = logging.getLogger(__name__)%0A%0Aclass TestRedirection(helpers.EnvironmentCase):%0A @classmethod%0A def setUpEnv(cls, env):%0A cls.configs%5B'records.config'%5D%5B'CONFIG'%5D.update(%7B%0A 'proxy.config.http.redirection_enabled': 1,%0A 'proxy.config.http.number_of_redirections': 10%0A %7D)%0A cls.configs%5B'remap.config'%5D.add_line('map / http://httpbin.org');%0A%0A def test_redirection(self):%0A server_ports = self.configs%5B'records.config'%5D%5B'CONFIG'%5D%5B'proxy.config.http.server_ports'%5D%0A%0A # By default Requests will perform location redirection%0A # Disable redirection handling with the allow_redirects parameter%0A r = requests.get('http://127.0.0.1:%7B0%7D/redirect/9'.format(server_ports), allow_redirects=False)%0A self.assertEqual(r.status_code, 200)%0A%0A r = requests.get('http://127.0.0.1:%7B0%7D/redirect/10'.format(server_ports), allow_redirects=False)%0A self.assertEqual(r.status_code, 302)%0A
|
|
2d8b0266dc1bf0eadc2737d35844f5fb45bedd12
|
Add functional tests for loading by name
|
nose2/tests/functional/test_loading.py
|
nose2/tests/functional/test_loading.py
|
Python
| 0
|
@@ -0,0 +1,2923 @@
+%22%22%22%0Apkg1%0Apkg1.test%0Apkg1.test.test_things%0Apkg1.test.test_things.test_func%0Apkg1.test.test_things.test_gen%0Apkg1.test.test_things.test_gen:3%0Apkg1.test.test_things.SomeTests%0Apkg1.test.test_things.SomeTests.test_ok%0A%0A# generator method%0A# generator method index%0A%0A# param func%0A# param func index%0A# param method%0A# param method index%0A%0A%22%22%22%0Afrom nose2.tests._common import FunctionalTestCase%0A%0A%0Aclass TestLoadTestsFromNames(FunctionalTestCase):%0A def test_module_name(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'Ran 16 tests' in stderr, stderr%0A%0A def test_function_name(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.test_func')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'test_func' in stderr%0A assert 'Ran 1 test' in stderr%0A assert 'OK' in stderr%0A%0A def test_generator_function_name(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.test_gen')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'test_gen' in stderr%0A assert 'Ran 5 tests' in stderr%0A%0A def test_generator_function_index(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.test_gen:3')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'test_gen' in stderr%0A assert 'Ran 1 test' in stderr%0A%0A def test_generator_function_index_1_based(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.test_gen:1')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'test_gen' in stderr%0A assert 'Ran 1 test' in stderr%0A assert 'OK' in stderr%0A%0A def test_testcase_name(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.SomeTests')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'SomeTests' in stderr, stderr%0A assert 'Ran 4 tests' in stderr, stderr%0A%0A def test_testcase_method(self):%0A proc = self.runIn(%0A 'scenario/tests_in_package',%0A '-v',%0A 'pkg1.test.test_things.SomeTests.test_ok')%0A stdout, stderr = proc.communicate()%0A self.assertEqual(proc.poll(), 0, stderr)%0A assert 'SomeTests' in stderr, stderr%0A assert 'Ran 1 test' in stderr, stderr%0A assert 'OK' in stderr, stderr%0A
|
|
0a610a44f0d20170ba9c3e6f9ec4eafaac937be1
|
Add unit test for Pattern filterer.
|
test/unit/filterer/test_pattern.py
|
test/unit/filterer/test_pattern.py
|
Python
| 0
|
@@ -0,0 +1,1786 @@
+# :coding: utf-8%0A# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips%0A# :license: See LICENSE.txt.%0A%0Aimport pytest%0A%0Afrom bark.log import Log%0Afrom bark.filterer.pattern import Pattern%0A%0A%0Adef test_missing_key_passes():%0A '''Test log record with missing key passes.'''%0A log = Log()%0A filterer = Pattern('bark%5C.test%5C..*')%0A assert filterer.filter(log) is False%0A%0A%0Adef test_non_string_key_fails():%0A '''Test log record with non-string key fails.'''%0A log = Log(name=None)%0A filterer = Pattern('bark%5C.test%5C..*')%0A assert filterer.filter(log) is True%0A%0A%0Adef test_include_mode():%0A '''Test only logs with matching value pass when mode is INCLUDE.'''%0A log = Log(name='bark.test.one')%0A filterer = Pattern('bark%5C.test%5C..*', mode=Pattern.INCLUDE)%0A assert filterer.filter(log) is False%0A%0A log = Log(name='bark.other.one')%0A assert filterer.filter(log) is True%0A%0A%0Adef test_exclude_mode():%0A '''Test only logs with matching value fail when mode is EXCLUDE.'''%0A log = Log(name='bark.test.one')%0A filterer = Pattern('bark%5C.test%5C..*', mode=Pattern.EXCLUDE)%0A assert filterer.filter(log) is True%0A%0A log = Log(name='bark.other.one')%0A assert filterer.filter(log) is False%0A%0A%0Adef test_different_key():%0A '''Test using key other than name.'''%0A log = Log()%0A filterer = Pattern('A message', key='message')%0A assert filterer.filter(log) is False%0A%0A log = Log(message='A message')%0A filterer = Pattern('A message', key='message')%0A assert filterer.filter(log) is False%0A%0A log = Log(message='Another message')%0A filterer = Pattern('A message', key='message')%0A assert filterer.filter(log) is True%0A%0A log = Log(message='A message')%0A filterer = Pattern('A message', key='message', mode=Pattern.EXCLUDE)%0A assert filterer.filter(log) is True%0A%0A
|
|
ef65c5eefdcbb21b83504710e3131affbeb88c88
|
Create map-reduce-advanced-count-number-of-friends.py
|
databases/nosql_xml_mapreduce/map-reduce-advanced-count-number-of-friends.py
|
databases/nosql_xml_mapreduce/map-reduce-advanced-count-number-of-friends.py
|
Python
| 0.000007
|
@@ -0,0 +1,1107 @@
+import sys%0Afrom collections import OrderedDict%0A%0Aclass MapReduce:%0A def __init__(self):%0A self.intermediate = OrderedDict()%0A self.result = %5B%5D%0A%0A%0A def emitIntermediate(self, key, value):%0A self.intermediate.setdefault(key, %5B%5D)%0A self.intermediate%5Bkey%5D.append(value)%0A%0A def emit(self, value):%0A self.result.append(value)%0A%0A def execute(self, data, mapper, reducer):%0A for record in data:%0A mapper(record)%0A%0A for key in self.intermediate:%0A reducer(key, self.intermediate%5Bkey%5D)%0A%0A self.result.sort()%0A for item in self.result:%0A print %22%7B%5C%22key%5C%22:%5C%22%22+item%5B0%5D+%22%5C%22,%5C%22value%5C%22:%5C%22%22 + str(item%5B1%5D) + %22%5C%22%7D%22%0A%0A%0AmapReducer = MapReduce()%0A%0Adef mapper(record):%0A split = record.split()%0A%0A mapReducer.emitIntermediate(split%5B0%5D, split%5B1%5D)%0A mapReducer.emitIntermediate(split%5B1%5D, split%5B0%5D)%0A%0Adef reducer(key, list_of_values):%0A mapReducer.emit((key, len(list_of_values)))%0A%0Aif __name__ == '__main__':%0A inputData = %5B%5D%0A for line in sys.stdin:%0A inputData.append(line)%0A%0A mapReducer.execute(inputData, mapper, reducer)%0A
|
|
517cb7c66f28e977bf44b7013846f50af8f673fb
|
Create QiClient.py
|
Basic/Python/QiPy/Python2/QiClient.py
|
Basic/Python/QiPy/Python2/QiClient.py
|
Python
| 0
|
@@ -0,0 +1 @@
+%0A
|
|
1c1967e5a1e941ffa6a8f32d35269f333644cd98
|
Create 1.py
|
solutions/1.py
|
solutions/1.py
|
Python
| 0.000001
|
@@ -0,0 +1,826 @@
+CODE = %7B'A': '.-', 'B': '-...', 'C': '-.-.',%0A 'D': '-..', 'E': '.', 'F': '..-.',%0A 'G': '--.', 'H': '....', 'I': '..',%0A 'J': '.---', 'K': '-.-', 'L': '.-..',%0A 'M': '--', 'N': '-.', 'O': '---',%0A 'P': '.--.', 'Q': '--.-', 'R': '.-.',%0A 'S': '...', 'T': '-', 'U': '..-',%0A 'V': '...-', 'W': '.--', 'X': '-..-',%0A 'Y': '-.--', 'Z': '--..',%0A%0A '0': '-----', '1': '.----', '2': '..---',%0A '3': '...--', '4': '....-', '5': '.....',%0A '6': '-....', '7': '--...', '8': '---..',%0A '9': '----.'%0A %7D%0A%0A%0Adef main():%0A%0A msg = raw_input('MESSAGE: ')%0A%0A for char in msg:%0A if char == ' ' :%0A print ' '%0A else :%0A print CODE%5Bchar.upper()%5D,%0A raw_input(%22When done, press %5BENTER%5D%22)%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
4a7b60d47a20084867015490cc52f3c5967b979f
|
add test file
|
AFQ/utils/tests/test_conversions.py
|
AFQ/utils/tests/test_conversions.py
|
Python
| 0.000001
|
@@ -0,0 +1,611 @@
+import numpy as np%0Aimport numpy.testing as npt%0A%0Aimport AFQ.data as afd%0Afrom AFQ.utils.conversion import matlab_tractography, matlab_mori_groups%0A%0Aimport os%0A%0Adef test_matlab_tractography():%0A sft = matlab_tractography(%0A %22AFQ/tests/data/WholeBrainFG_test.mat%22,%0A afd.read_mni_template())%0A npt.assert_equal(len(sft.streamlines), 2)%0A%0Adef test_matlab_mori_groups():%0A fiber_groups = matlab_mori_groups(%0A %22AFQ/tests/data/MoriGroups_Test.mat%22,%0A afd.read_mni_template())%0A npt.assert_equal(len(fiber_groups.keys()), 20)%0A npt.assert_equal(len(fiber_groups%5B'CST_R'%5D.streamlines), 2)%0A
|
|
c64a687f738cefd6f9461f487e76a3920d0f652c
|
Add new example
|
examples/consume_queue_until_empty.py
|
examples/consume_queue_until_empty.py
|
Python
| 0.000004
|
@@ -0,0 +1,557 @@
+import logging%0A%0Afrom amqpstorm import Connection%0A%0Alogging.basicConfig(level=logging.DEBUG)%0A%0A%0Adef consume_until_queue_is_empty():%0A with Connection('127.0.0.1', 'guest', 'guest') as connection:%0A with connection.channel() as channel:%0A while True:%0A message = channel.basic.get('simple_queue')%0A if not message:%0A print('Queue is empty')%0A break%0A print(message.body)%0A message.ack()%0A%0A%0Aif __name__ == '__main__':%0A consume_until_queue_is_empty()%0A
|
|
f09ee3772d6e15a104af284ed6864005cf8450ef
|
Add example from listing 11.4
|
ch11/radix_sort8.py
|
ch11/radix_sort8.py
|
Python
| 0
|
@@ -0,0 +1,2165 @@
+%22%22%22%0AListing 11.4: An eight-element radix sort%0A%22%22%22%0A%0Afrom io import open%0Aimport numpy as np%0Aimport pyopencl as cl%0Aimport utility%0A%0ANUM_SHORTS = 8%0A%0Akernel_src = '''%0A__kernel void radix_sort8(__global ushort8 *global_data) %7B%0A%0A typedef union %7B%0A ushort8 vec;%0A ushort array%5B8%5D;%0A %7D vec_array;%0A%0A uint one_count, zero_count;%0A uint cmp_value = 1;%0A vec_array mask, ones, data;%0A%0A data.vec = global_data%5B0%5D;%0A%0A /* Rearrange elements according to bits */%0A for(int i=0; i%3C3; i++) %7B%0A zero_count = 0;%0A one_count = 0;%0A%0A /* Iterate through each element in the input vector */%0A for(int j = 0; j %3C 8; j++) %7B%0A if(data.array%5Bj%5D & cmp_value)%0A%0A /* Place element in ones vector */%0A ones.array%5Bone_count++%5D = data.array%5Bj%5D;%0A else %7B%0A%0A /* Increment number of elements with zero */%0A mask.array%5Bzero_count++%5D = j;%0A %7D%0A %7D%0A%0A /* Create sorted vector */%0A for(int j = zero_count; j %3C 8; j++)%0A mask.array%5Bj%5D = 8 - zero_count + j;%0A data.vec = shuffle2(data.vec, ones.vec, mask.vec);%0A cmp_value %3C%3C= 1;%0A %7D%0A global_data%5B0%5D = data.vec;%0A%7D%0A'''%0A%0A# Get device and context, create command queue and program%0Adev = utility.get_default_device()%0Acontext = cl.Context(devices=%5Bdev%5D, properties=None, dev_type=None, cache_dir=None)%0Aqueue = cl.CommandQueue(context, dev, properties=None)%0A%0A# Build program in the specified context using the kernel source code%0Aprog = cl.Program(context, kernel_src)%0Atry:%0A prog.build(options=%5B'-Werror'%5D, devices=%5Bdev%5D, cache_dir=None)%0Aexcept:%0A print('Build log:')%0A print(prog.get_build_info(dev, cl.program_build_info.LOG))%0A raise%0A%0A# Data and device buffers%0Adata = np.arange(start=0, stop=NUM_SHORTS, dtype=np.uint16)%0Anp.random.shuffle(data)%0Aprint('Input: ' + str(data))%0A%0Amf = cl.mem_flags%0Adata_buffer = cl.Buffer(context, mf.READ_WRITE %7C mf.COPY_HOST_PTR, hostbuf=data)%0A%0A# Execute kernel%0A# radix_sort8(__global ushort8 *global_data)%0Akernel = prog.radix_sort8%0Akernel.set_arg(0, data_buffer)%0Acl.enqueue_task(queue, kernel)%0Acl.enqueue_copy(queue, dest=data, src=data_buffer, is_blocking=True)%0A%0Aprint('Output: ' + str(data))%0A%0A%0A%0A%0A%0A%0A
|
|
b33303779a520a0751648d63977f78890ca11d37
|
make SCRAPY_LOG_FILE and SCRAPY_FEED_URI optional
|
scrapyd/launcher.py
|
scrapyd/launcher.py
|
import sys
from datetime import datetime
from multiprocessing import cpu_count
from twisted.internet import reactor, defer, protocol, error
from twisted.application.service import Service
from twisted.python import log
from scrapy.utils.python import stringify_dict
from scrapyd.utils import get_crawl_args
from .interfaces import IPoller, IEnvironment
class Launcher(Service):
name = 'launcher'
def __init__(self, config, app):
self.processes = {}
self.finished = []
self.finished_to_keep = config.getint('finished_to_keep', 100)
self.max_proc = config.getint('max_proc', 0)
if not self.max_proc:
self.max_proc = cpu_count() * config.getint('max_proc_per_cpu', 4)
self.runner = config.get('runner', 'scrapyd.runner')
self.app = app
def startService(self):
for slot in range(self.max_proc):
self._wait_for_project(slot)
log.msg("%s started: max_proc=%r, runner=%r" % (self.parent.name, \
self.max_proc, self.runner), system="Launcher")
def _wait_for_project(self, slot):
poller = self.app.getComponent(IPoller)
poller.next().addCallback(self._spawn_process, slot)
def _spawn_process(self, message, slot):
msg = stringify_dict(message, keys_only=False)
project = msg['_project']
args = [sys.executable, '-m', self.runner, 'crawl']
args += get_crawl_args(msg)
e = self.app.getComponent(IEnvironment)
env = e.get_environment(msg, slot)
env = stringify_dict(env, keys_only=False)
pp = ScrapyProcessProtocol(slot, project, msg['_spider'], \
msg['_job'], env)
pp.deferred.addBoth(self._process_finished, slot)
reactor.spawnProcess(pp, sys.executable, args=args, env=env)
self.processes[slot] = pp
def _process_finished(self, _, slot):
process = self.processes.pop(slot)
process.end_time = datetime.now()
self.finished.append(process)
del self.finished[:-self.finished_to_keep] # keep last 100 finished jobs
self._wait_for_project(slot)
class ScrapyProcessProtocol(protocol.ProcessProtocol):
def __init__(self, slot, project, spider, job, env):
self.slot = slot
self.pid = None
self.project = project
self.spider = spider
self.job = job
self.start_time = datetime.now()
self.end_time = None
self.env = env
self.logfile = env['SCRAPY_LOG_FILE']
self.itemsfile = env['SCRAPY_FEED_URI']
self.deferred = defer.Deferred()
def outReceived(self, data):
log.msg(data.rstrip(), system="Launcher,%d/stdout" % self.pid)
def errReceived(self, data):
log.msg(data.rstrip(), system="Launcher,%d/stderr" % self.pid)
def connectionMade(self):
self.pid = self.transport.pid
self.log("Process started: ")
def processEnded(self, status):
if isinstance(status.value, error.ProcessDone):
self.log("Process finished: ")
else:
self.log("Process died: exitstatus=%r " % status.value.exitCode)
self.deferred.callback(self)
def log(self, msg):
msg += "project=%r spider=%r job=%r pid=%r log=%r items=%r" % (self.project, \
self.spider, self.job, self.pid, self.logfile, self.itemsfile)
log.msg(msg, system="Launcher")
|
Python
| 0
|
@@ -2478,33 +2478,37 @@
lf.logfile = env
-%5B
+.get(
'SCRAPY_LOG_FILE
@@ -2508,17 +2508,17 @@
OG_FILE'
-%5D
+)
%0A
@@ -2538,17 +2538,21 @@
le = env
-%5B
+.get(
'SCRAPY_
@@ -2560,17 +2560,17 @@
EED_URI'
-%5D
+)
%0A
|
75a0cbbd5af597c6683b6644659780c3076b835e
|
Disable test_nonlocal_symbol unit test
|
tensorflow/python/autograph/pyct/static_analysis/activity_py3_test.py
|
tensorflow/python/autograph/pyct/static_analysis/activity_py3_test.py
|
# python3
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for activity module, that only run in Python 3."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct.static_analysis import activity_test
from tensorflow.python.autograph.pyct.static_analysis import annos
from tensorflow.python.platform import test
NodeAnno = annos.NodeAnno
class ActivityAnalyzerTest(activity_test.ActivityAnalyzerTestBase):
"""Tests which can only run in Python 3."""
def test_nonlocal_symbol(self):
nonlocal_a = 3
nonlocal_b = 13
def test_fn(c):
nonlocal nonlocal_a
nonlocal nonlocal_b
nonlocal_a = nonlocal_b + c
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('nonlocal_b', 'c'), ('nonlocal_a',))
def test_annotated_assign(self):
b = int
def test_fn(c):
a: b = c
return a
node, _ = self._parse_and_analyze(test_fn)
fn_node = node
body_scope = anno.getanno(fn_node, NodeAnno.BODY_SCOPE)
self.assertScopeIs(body_scope, ('b', 'c', 'a'), ('a',))
ann_assign_scope = anno.getanno(fn_node.body[0], anno.Static.SCOPE)
self.assertScopeIs(ann_assign_scope, ('b', 'c'), ('a',))
if __name__ == '__main__':
test.main()
|
Python
| 0.000001
|
@@ -1235,24 +1235,44 @@
in Python 3.
+6 or later versions.
%22%22%22%0A%0A def t
@@ -1293,24 +1293,153 @@
ymbol(self):
+%0A # TODO(b/137761188): Remove this skipTest once fixed.%0A self.skipTest('Annotation syntax is not recognized by Python 3.5')
%0A%0A nonloc
|
f5271083eb9f90fba51bea91126d5eb7005f7c51
|
add script to delete webhooks
|
demos/webhooks_delete.py
|
demos/webhooks_delete.py
|
Python
| 0
|
@@ -0,0 +1,509 @@
+import sys%0A%0Afrom Ziggeo import Ziggeo%0A%0Aif(len(sys.argv) %3C 4):%0A print (%22Error%5Cn%22)%0A print (%22Usage: $%3Epython webhooks_delete.py YOUR_API_TOKEN YOUR_PRIVATE_KEY WEBHOOK_URL %5Cn%22)%0A print (%22Example: $%3Epython webhooks_delete.py 1234567890abcdef 1234567890abcdef http://yoursite.com %5Cn%22)%0A sys.exit()%0A%0Aapi_token = sys.argv%5B1%5D%0Aprivate_key = sys.argv%5B2%5D%0Atarget_url = sys.argv%5B3%5D%0A%0Aziggeo = Ziggeo(api_token, private_key)%0A%0Aarguments = %7B%7D%0Aarguments%5B'target_url'%5D = target_url%0A%0Aziggeo.webhooks().delete(arguments)
|
|
bbf056f834724a263dbce5c58104db296240a49c
|
add gyp file
|
pty.gyp
|
pty.gyp
|
Python
| 0.000001
|
@@ -0,0 +1,267 @@
+%7B%0A 'targets': %5B%7B%0A 'target_name': 'pty',%0A 'type': 'loadable_module',%0A 'product_extension': 'node',%0A 'product_prefix': '',%0A 'include_dirs': %5B%0A './src'%0A %5D,%0A 'sources': %5B%0A 'src/pty.cc'%0A %5D,%0A 'libraries': %5B%0A '-lutil'%0A %5D%0A %7D%5D%0A%7D%0A
|
|
d98db37dc70a1126de371bf64e89cc4f20e03511
|
Create repr.py
|
repr.py
|
repr.py
|
Python
| 0.000001
|
@@ -0,0 +1,457 @@
+%22%22%22%0AOne useful class method to override is the built-in __repr__() method, which is short for representation; by providing a return%0Avalue in this method, we can tell Python how to represent an object of our class%0A%22%22%22%0A%0Aclass Point3D(object):%0A def __init__(self,x,y,z):%0A self.x = x%0A self.y = y%0A self.z = z%0A %0A def __repr__(self):%0A return %22(%25d, %25d, %25d)%22 %25 (self.x, self.y, self.z)%0A%0Amy_point = Point3D(1,2,3)%0A%0Aprint my_point%0A
|
|
267bf6b38481b2753721e2b5245362c5e6b033cb
|
Add test for optional host argument in RedisBuffer
|
tests/sentry/buffer/redis/tests.py
|
tests/sentry/buffer/redis/tests.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from sentry.buffer.redis import RedisBuffer
from sentry.models import Group, Project
from sentry.tasks.process_buffer import process_incr
from tests.base import TestCase
class RedisBufferTest(TestCase):
def setUp(self):
self.buf = RedisBuffer(hosts={
0: {'db': 9}
})
self.buf.conn.flushdb()
def test_map_column_handles_foreignkeys(self):
self.assertEquals(self.buf._map_column(Group, 'project', Project(id=1)), 1)
def test_make_key_response(self):
column = 'times_seen'
filters = {'pk': 1}
self.assertEquals(self.buf._make_key(Group, filters, column), 'sentry.group:88b48b31b5f100719c64316596b10b0f:times_seen')
@mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
@mock.patch('sentry.buffer.base.maybe_delay')
def test_incr_delays_task(self, maybe_delay):
model = mock.Mock()
columns = {'times_seen': 1}
filters = {'pk': 1}
self.buf.incr(model, columns, filters)
maybe_delay.assert_called_once_with(process_incr, model=model, columns=columns, filters=filters)
@mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
@mock.patch('sentry.buffer.base.maybe_delay', mock.Mock())
def test_incr_does_buffer_to_conn(self):
model = mock.Mock()
columns = {'times_seen': 1}
filters = {'pk': 1}
self.buf.incr(model, columns, filters)
self.assertEquals(self.buf.conn.get('foo'), '1')
@mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
@mock.patch('sentry.buffer.base.Buffer.process')
def test_process_does_not_save_empty_results(self, process):
group = Group.objects.create(project=Project(id=1))
columns = {'times_seen': 1}
filters = {'pk': group.pk}
self.buf.process(Group, columns, filters)
self.assertFalse(process.called)
@mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
@mock.patch('sentry.buffer.base.Buffer.process')
def test_process_does_save_call_with_results(self, process):
group = Group.objects.create(project=Project(id=1))
columns = {'times_seen': 1}
filters = {'pk': group.pk}
self.buf.conn.set('foo', 2)
self.buf.process(Group, columns, filters)
process.assert_called_once_with(Group, {'times_seen': 2}, filters)
@mock.patch('sentry.buffer.redis.RedisBuffer._make_key', mock.Mock(return_value='foo'))
@mock.patch('sentry.buffer.base.Buffer.process')
def test_process_does_clear_buffer(self, process):
group = Group.objects.create(project=Project(id=1))
columns = {'times_seen': 1}
filters = {'pk': group.pk}
self.buf.conn.set('foo', 2)
self.buf.process(Group, columns, filters)
self.assertEquals(self.buf.conn.get('foo'), '0')
|
Python
| 0
|
@@ -405,16 +405,197 @@
shdb()%0A%0A
+ def test_default_host_is_local(self):%0A buf = RedisBuffer()%0A self.assertEquals(len(buf.conn.hosts), 1)%0A self.assertEquals(buf.conn.hosts.host, 'localhost')%0A%0A
def
|
e7faa99d9816745338ada38d1a7d974bf3a739ae
|
Create pretty table of tie averages + function for pretty averages
|
s5v3.py
|
s5v3.py
|
Python
| 0.000004
|
@@ -0,0 +1,1541 @@
+from s5v2 import *%0Afrom prettytable import PrettyTable%0A%0Adef my_table(): # no arguments are passed in, which seems a bit weird. We're hard-coding a function that only does one thing.%0A%09x = PrettyTable(%5B'Style', 'Average Price'%5D) # setup a new pretty table list and give and give it two list items%0A%09x.add_row(%5B'Print', pretty_average(print_ties)%5D) # add_row is a built-in function of prettytable. We're going to add a row and assign it the vales of 'Print' and the average price of all print ties%0A%09x.add_row(%5B'Solid', pretty_average(solid_ties)%5D)%0A%09x.add_row(%5B'Paisley', pretty_average(paisley_ties)%5D)%0A%09x.add_row(%5B'Striped', pretty_average(striped_ties)%5D)%0A%09x.add_row(%5B'Gucci', pretty_average(gucci_ties)%5D)%0A%09print(x) # print the table%0A%0Adef pretty_average(my_number):%0A%09pretty_avg = %22$%7B:03.2f%7D%22.format(find_average(my_number)) # assign a variable pretty_avg to the average of my number and then use the format specification mini-language to add three decimal places before the decimal point and 2 after the decimal point. the FSML says that 'f' is a fixed point and displays the number as a fixed-point number. That's like floating point number, but with a fixed amount of float? As far as what kind of variable it is (string, integer, float / decimal) it's still a decimal or float, just a fixed amount of float. See I was calling this string formatting, but really it's format specification mini-language and it doesn't automatically convert the result to a string (like I originally thought). %0A%09return pretty_avg%0A%0A# my_table() # run the function
|
|
dd9c96c7b12221029b7ea1a4f9748106520bd7a6
|
add syntax checker/linter/gotype support
|
gslint.py
|
gslint.py
|
Python
| 0
|
@@ -0,0 +1,2648 @@
+import sublime, sublime_plugin%0Aimport gscommon as gs%0Aimport re, threading%0A%0ALINE_PAT = re.compile(r':(%5Cd+):(%5Cd+):%5Cs+(.+)%5Cs*$', re.MULTILINE)%0A%0Aclass GsLint(sublime_plugin.EventListener):%0A rc = 0%0A errors = %7B%7D%0A%0A def on_selection_modified(self, view):%0A sel = view.sel()%5B0%5D.begin()%0A if view.score_selector(sel, 'source.go') %3E 0:%0A line = view.rowcol(sel)%5B0%5D%0A msg = self.errors.get(view.id(), %7B%7D).get(line, '')%0A view.set_status('GsLint', ('GsLint: ' + msg) if msg else '')%0A %0A def on_modified(self, view):%0A self.rc += 1%0A%0A pos = view.sel()%5B0%5D.begin()%0A scopes = view.scope_name(pos).split()%0A if 'source.go' in scopes:%0A should_run = (%0A 'string.quoted.double.go' not in scopes and%0A 'string.quoted.single.go' not in scopes and%0A 'string.quoted.raw.go' not in scopes and%0A 'comment.line.double-slash.go' not in scopes and%0A 'comment.block.go' not in scopes%0A )%0A%0A def cb():%0A self.lint(view)%0A %0A if should_run:%0A sublime.set_timeout(cb, int(gs.setting('gslint_timeout', 500)))%0A else:%0A # we want to cleanup if e.g settings changed or we caused an error entering an excluded scope%0A sublime.set_timeout(cb, 1000)%0A %0A def on_load(self, view):%0A self.on_modified(view)%0A %0A def lint(self, view):%0A self.rc -= 1%0A%0A if self.rc == 0:%0A cmd = gs.setting('gslint_cmd', 'gotype')%0A if cmd:%0A _, err = gs.runcmd(%5Bcmd%5D, view.substr(sublime.Region(0, view.size())))%0A else:%0A err = ''%0A lines = LINE_PAT.findall(err)%0A regions = %5B%5D%0A view_id = view.id() %0A self.errors%5Bview_id%5D = %7B%7D%0A if lines:%0A for m in lines:%0A line, start, err = int(m%5B0%5D)-1, int(m%5B1%5D)-1, m%5B2%5D%0A self.errors%5Bview_id%5D%5Bline%5D = err%0A lr = view.line(view.text_point(line, start))%0A regions.append(sublime.Region(lr.begin() + start, lr.end()))%0A if regions:%0A flags = sublime.DRAW_EMPTY_AS_OVERWRITE %7C sublime.DRAW_OUTLINED%0A flags = sublime.DRAW_EMPTY_AS_OVERWRITE%0A flags = sublime.DRAW_OUTLINED%0A view.add_regions('GsLint-errors', regions, 'invalid.illegal', 'cross', flags)%0A else:%0A view.erase_regions('GsLint-errors')%0A self.on_selection_modified(view)%0A
|
|
0e681d5b5e0d23517a6f050d168e1e86de9eb074
|
Add unit test for utils.termcolor (closes #4)
|
test/t_utils/test_termcolor.py
|
test/t_utils/test_termcolor.py
|
Python
| 0
|
@@ -0,0 +1,721 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A test.t_utils.test_termcolor%0A ~~~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0A :copyright: Copyright 2014 by the RootForum.org team, see AUTHORS.%0A :license: MIT License, see LICENSE for details.%0A%22%22%22%0Afrom unittest import TestCase%0Afrom magrathea.utils.termcolor import supports_color%0A%0A%0Aclass TestMagratheaUtilsTermColor(TestCase):%0A %22%22%22%0A Unit tests for :py:mod:%60magrathea.utils.termcolor%60%0A %22%22%22%0A%0A def test_01(self):%0A %22%22%22%0A Test Case 01:%0A Test return value of :py:func:%60~magrathea.utils.termcolor.supports_color%60.%0A%0A Test is passed if return value is of type bool.%0A %22%22%22%0A return_value = supports_color()%0A self.assertIsInstance(return_value, bool)%0A
|
|
a48c52ccb6e89edbc186e4e916b8151ff0fa232f
|
Add an admin
|
sendgrid_events/admin.py
|
sendgrid_events/admin.py
|
Python
| 0
|
@@ -0,0 +1,204 @@
+from django.contrib import admin%0A%0Afrom .models import Event%0A%0A%0Aadmin.site.register(Event, list_display=%5B%22kind%22, %22email%22, %22created_at%22%5D, list_filter=%5B%22created_at%22, %22kind%22%5D, search_fields=%5B%22email%22, %22data%22%5D)%0A
|
|
601b55aca479befbdc77ea65c4c3936c81f0e92a
|
Fix Travis CI Paths
|
tests/test_html_builder.py
|
tests/test_html_builder.py
|
import os
import sys
import unittest
try:
sys.path.insert(0, os.path.abspath('..')) #Works for local
from sheetmaker import html_builder
from data import test_html_constants
except:
from sheetmaker import html_builder #Works for Travis CI
from data import test_html_constants
class HtmlBuilderTestCase(unittest.TestCase):
"""Tests for `html_builder.py`."""
def test_create_empty_sheet(self):
"""Is empty sheet html created succesfully?"""
self.title = "title"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.create_empty_sheet()
self.assertEqual(test_html[0], test_html_constants.TEST_EMPTY_SHEET)
self.assertEqual(test_html[1], None)
def test_set_style(self):
"""Is color style created succesfully?"""
self.title = "title"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.set_style(1)
self.assertEqual(test_html[0], test_html_constants.TEST_COLOR_STYLE)
self.assertEqual(test_html[1], "<!-- css -->")
def test_build_columns(self):
"""Are columns created succesfully?"""
self.title = "title"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.build_columns(3)
self.assertEqual(test_html[0], test_html_constants.TEST_COLUMNS)
self.assertEqual(test_html[1], "<!-- columns -->")
def test_build_header(self):
"""Is header created succesfully?"""
self.title = "title"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.build_header("author")
self.assertEqual(test_html[0], test_html_constants.TEST_HEADER)
self.assertEqual(test_html[1], "<!-- header -->")
def test_build_footer(self):
"""Is footer created succesfully?"""
self.title = "test"
empty_sheet = html_builder.HtmlSheet(self.title, None, "author")
test_html = empty_sheet.build_footer("author.png", "http://author.com", "sponsor", "http://sponsor.com")
self.assertEqual(test_html[0], test_html_constants.TEST_FOOTER)
self.assertEqual(test_html[1], "<!-- footer -->")
def test_build_rows_block(self):
"""Is rows block created succesfully?"""
self.title = "test"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.build_rows_block(1, "block title", 2, ["row1", "row2"])
self.assertEqual(test_html[0], test_html_constants.TEST_ROWS_BLOCK)
self.assertEqual(test_html[1], "<!-- column1 -->")
def test_build_text_block(self):
"""Is text block created succesfully?"""
self.title = "test"
empty_sheet = html_builder.HtmlSheet(self.title, None)
test_html = empty_sheet.build_text_block(2, "block title", "text text text")
self.assertEqual(test_html[0], test_html_constants.TEST_TEXT_BLOCK)
self.assertEqual(test_html[1], "<!-- column2 -->")
def tearDown(self):
"""Removes extra files generated during tests
Args:
file_name (str): File to be removed.
"""
try:
#os.remove(self.test_file_name + ".html")
pass
except OSError as oserr:
print(oserr)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000003
|
@@ -188,16 +188,83 @@
except:%0A
+ sys.path.insert(0, os.path.abspath('.')) #Works for Travis CI%0A
from
@@ -294,38 +294,16 @@
_builder
- #Works for Travis CI
%0A fro
|
3a7459f0f9e171954eb1f86a9e320ef889b9d1a5
|
Tidy up and en-repo my little monitor
|
watch_emotes.py
|
watch_emotes.py
|
Python
| 0
|
@@ -0,0 +1,936 @@
+import sys%0D%0Aimport time%0D%0Aimport pprint%0D%0Aimport subprocess%0D%0Aimport requests%0D%0Asys.path.append(%22../mustard-mine%22)%0D%0Aimport config%0D%0A%0D%0Aemote_count = None%0D%0Awhile emote_count is None or emote_count == len(emotes):%0D%0A%09req = requests.get(%22https://api.twitch.tv/kraken/chat/emoticon_images?emotesets=317117,317121,317122%22, headers=%7B%0D%0A%09%09%22Client-ID%22: config.CLIENT_ID,%0D%0A%09%09%22Accept%22: %22application/vnd.twitchtv.v5+json%22,%0D%0A%09%7D)%0D%0A%09resp = req.json()%0D%0A%09emotes = %7Be%5B%22code%22%5D: e%5B%22id%22%5D for s in resp%5B%22emoticon_sets%22%5D.values() for e in s%7D%0D%0A%09resp = requests.get(%22https://api.betterttv.net/2/channels/rosuav%22).json()%0D%0A%09emotes.update(%7Be%5B%22code%22%5D: e%5B%22id%22%5D for e in resp%5B%22emotes%22%5D%7D)%0D%0A%09pprint.pprint(emotes)%0D%0A%09# Whatever number there are on the first run, that's considered %22current%22.%0D%0A%09# If it then changes, report it.%0D%0A%09if emote_count is None: emote_count = len(emotes)%0D%0A%09time.sleep(60)%0D%0Asubprocess.check_call(%5B%22vlc%22, %22/video/Clips/Let It Go/Turkish - Aldirma.mkv%22%5D)%0D%0A
|
|
5e5e74e606d9be3c60fb810ed215dfa109a6ad9f
|
fix #5 wavファイルを生成する処理を行うクラスを定義
|
libs/audio/create.py
|
libs/audio/create.py
|
Python
| 0
|
@@ -0,0 +1,1051 @@
+import wave%0Aimport struct%0Afrom pylab import *%0A%0A%22%22%22%0A%E6%8C%AF%E5%B9%85amp%E3%80%81%E5%9F%BA%E6%9C%AC%E5%91%A8%E6%B3%A2%E6%95%B0frq%E3%80%81%E3%82%B5%E3%83%B3%E3%83%97%E3%83%AA%E3%83%B3%E3%82%B0%E5%91%A8%E6%B3%A2%E6%95%B0 fs%E3%80%81%0A%E3%83%93%E3%83%83%E3%83%88%E6%B7%B1%E5%BA%A6bit_depthbit%E3%80%81%E9%95%B7%E3%81%95length%E7%A7%92%E3%81%AE%E6%AD%A3%E5%BC%A6%E6%B3%A2%E3%82%92%E4%BD%9C%E6%88%90%E3%81%97%E3%81%A6%E8%BF%94%E3%81%99%0A%22%22%22%0Adef createSineWave(amp, frq, fs, bit_depth, length) : %0A data = %5B%5D%0A clip_hi = 1.0%0A clip_lo = -1.0%0A %0A #%E6%AD%A3%E8%A6%8F%E5%8C%96%E3%81%97%E3%81%9F%E6%B3%A2%E3%82%92%E6%95%B4%E6%95%B0%E5%80%A4%E3%81%AB%E5%A4%89%E6%8F%9B%E3%81%99%E3%82%8B%E6%99%82%E3%81%AE%E5%80%8D%E7%8E%87%0A mult_bit = 0%0A if bit_depth == 16 : mult_bit = 32767.0%0A elif bit_depth == 24 : mult_bit = 16777216.0%0A else : exit() #16,24bit%E6%B7%B1%E5%BA%A6%E3%81%AE%E6%99%82%E4%BB%A5%E5%A4%96%E3%81%AF%E7%B5%82%E4%BA%86%0A %0A for n in arange(length * fs) :%0A s = amp * np.sin(2*np.pi * frq * n / fs)%0A %0A #%E3%82%AF%E3%83%AA%E3%83%83%E3%83%94%E3%83%B3%E3%82%B0%E5%87%A6%E7%90%86%0A if s %3E clip_hi : s = clip_hi%0A if s %3C clip_lo : s = clip_lo%0A %0A #%E6%9B%B8%E3%81%8D%E8%BE%BC%E3%81%BF%0A data.append(s)%0A %0A #nBit%E6%B7%B1%E5%BA%A6%E3%81%AE%E9%9F%B3%E6%BA%90%E3%81%AB%E5%A4%89%E6%8F%9B%0A data = %5Bint(x * mult_bit) for x in data%5D%0A #%E3%83%90%E3%82%A4%E3%83%8A%E3%83%AA%E3%81%AB%E5%A4%89%E6%8F%9B%0A data = struct.pack(%22h%22 * len(data), *data)%0A %0A return data%0A%0Aif __name__ == %22__main__%22 :%0A #freqList = %5B262, 294, 330, 349, 392, 440, 494, 523%5D # %E3%83%89%E3%83%AC%E3%83%9F%E3%83%95%E3%82%A1%E3%82%BD%E3%83%A9%E3%82%B7%E3%83%89%0A freqList = %5B440%5D%0A for f in freqList:%0A data = createSineWave(1.0, f, 44100, 16, 5.0)%0A #print(data)%0A save(data,44100,2,%22test.wav%22)%0A %0A
|
|
acda4ae5deff6b45f56b84cdecb867a09586af4a
|
Add lc295_find_median_from_data_stream.py
|
lc295_find_median_from_data_stream.py
|
lc295_find_median_from_data_stream.py
|
Python
| 0.000084
|
@@ -0,0 +1,1452 @@
+%22%22%22Leetcode 295. Find Median from Data Stream%0AHard%0A%0AURL: https://leetcode.com/problems/find-median-from-data-stream/%0A%0AMedian is the middle value in an ordered integer list. %0AIf the size of the list is even, there is no middle value. %0ASo the median is the mean of the two middle value.%0A%0AFor example,%0A%5B2,3,4%5D, the median is 3%0A%5B2,3%5D, the median is (2 + 3) / 2 = 2.5%0A%0ADesign a data structure that supports the following two operations:%0A- void addNum(int num) - Add a integer number from the data stream to %0A the data structure.%0A- double findMedian() - Return the median of all elements so far.%0A %0AExample:%0A%0AaddNum(1)%0AaddNum(2)%0AfindMedian() -%3E 1.5%0AaddNum(3) %0AfindMedian() -%3E 2%0A %0AFollow up:%0A- If all integer numbers from the stream are between 0 and 100, %0A how would you optimize it?%0A- If 99%25 of all integer numbers from the stream are between 0 and 100, %0A how would you optimize it?%0A%22%22%22%0A%0Aclass MedianFinder(object):%0A%0A def __init__(self):%0A %22%22%22%0A initialize your data structure here.%0A %22%22%22%0A pass%0A %0A%0A def addNum(self, num):%0A %22%22%22%0A :type num: int%0A :rtype: None%0A %22%22%22%0A pass %0A%0A def findMedian(self):%0A %22%22%22%0A :rtype: float%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A # Your MedianFinder object will be instantiated and called as such:%0A # obj = MedianFinder()%0A # obj.addNum(num)%0A # param_2 = obj.findMedian()%0A pass%0A %0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
e2a47d1ef44aa6b0ccc294aae68babd8ca54eb22
|
Create search_log_files.py
|
search_log_files.py
|
search_log_files.py
|
Python
| 0.000003
|
@@ -0,0 +1,1314 @@
+#!/usr/bin/env python%0Aimport os.path%0A%0A#Author: Sumit Joshi%0A#date:5/12/2015%0A# Log File Search Script%0A# This Script is useful for searching specific keyword in log files. For specific keyword it returns matching lines.%0A%0A%0Aprint %22*******************************************************%22%0Aprint %22%22%0Aprint %22%22%0Aprint %22Welcome to the Python Log Search Program%22%0Aprint %22Search log files with specific keywords...%22%0Aprint %22%22%0Aprint %22%22%0Aprint %22*******************************************************%22%0Aprint %22%22%0A%0Alog_file=raw_input(%22Enter the path of the log file that you want to read %22)%0Aprint log_file%0A%0Aif os.path.isfile(log_file):%0A keyword=raw_input('Enter the keywords that you want to search in log file with space ').split(%22 %22)%0A #for storing final log lines%0A final=%5B%5D%0A fd=open(log_file,%22r%22)%0A%0A for line in fd.readlines():%0A for item in keyword:%0A if item in line:%0A final.append(line)%0A final.append('')%0A if len(final)==0:%0A print %22No matching lines found in%22, log_file%0A else:%0A print 'Found matching lines in ', log_file%0A for log_line in final:%0A print log_line%0Aelse:%0A print 'Please enter valid log file name'%0A
|
|
ae90cf26caa471f85d7e5e20ef2e349b78183f41
|
make python2 wokr
|
python/static_dependencies/__init__.py
|
python/static_dependencies/__init__.py
|
Python
| 0.999998
|
@@ -0,0 +1,20 @@
+__all__ = %5B'ecdsa'%5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.