commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0319803c530ef0d42845e947be6c9510a70636d1
|
add missing import for previous commit
|
bin/addons/base/ir/ir_cron.py
|
bin/addons/base/ir/ir_cron.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
import netsvc
import tools
from tools.safe_eval import safe_eval as eval
import pooler
from osv import fields, osv
def str2tuple(s):
return eval('tuple(%s)' % (s or ''))
_intervalTypes = {
'work_days': lambda interval: relativedelta(days=interval),
'days': lambda interval: relativedelta(days=interval),
'hours': lambda interval: relativedelta(hours=interval),
'weeks': lambda interval: relativedelta(days=7*interval),
'months': lambda interval: relativedelta(months=interval),
'minutes': lambda interval: relativedelta(minutes=interval),
}
class ir_cron(osv.osv, netsvc.Agent):
_name = "ir.cron"
_order = 'name'
_columns = {
'name': fields.char('Name', size=60, required=True),
'user_id': fields.many2one('res.users', 'User', required=True),
'active': fields.boolean('Active'),
'interval_number': fields.integer('Interval Number',help="Repeat every x."),
'interval_type': fields.selection( [('minutes', 'Minutes'),
('hours', 'Hours'), ('work_days','Work Days'), ('days', 'Days'),('weeks', 'Weeks'), ('months', 'Months')], 'Interval Unit'),
'numbercall': fields.integer('Number of Calls', help='Number of time the function is called,\na negative number indicates no limit'),
'doall' : fields.boolean('Repeat Missed', help="Enable this if you want to execute missed occurences as soon as the server restarts."),
'nextcall' : fields.datetime('Next Execution Date', required=True, help="Next planned execution date for this scheduler"),
'model': fields.char('Object', size=64, help="Name of object whose function will be called when this scheduler will run. e.g. 'res.partener'"),
'function': fields.char('Function', size=64, help="Name of the method to be called on the object when this scheduler is executed."),
'args': fields.text('Arguments', help="Arguments to be passed to the method. e.g. (uid,)"),
'priority': fields.integer('Priority', help='0=Very Urgent\n10=Not urgent')
}
_defaults = {
'nextcall' : lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'),
'priority' : lambda *a: 5,
'user_id' : lambda obj,cr,uid,context: uid,
'interval_number' : lambda *a: 1,
'interval_type' : lambda *a: 'months',
'numbercall' : lambda *a: 1,
'active' : lambda *a: 1,
'doall' : lambda *a: 1
}
def _check_args(self, cr, uid, ids, context=None):
try:
for this in self.browse(cr, uid, ids, context):
str2tuple(this.args)
except:
return False
return True
_constraints = [
(_check_args, 'Invalid arguments', ['args']),
]
def _callback(self, cr, uid, model, func, args):
args = str2tuple(args)
m = self.pool.get(model)
if m and hasattr(m, func):
f = getattr(m, func)
try:
f(cr, uid, *args)
except Exception, e:
cr.rollback()
self._logger.notifyChannel('timers', netsvc.LOG_ERROR,
"Job call of self.pool.get('%s').%s(cr, uid, *%r) failed\n%s" %
(model, func, args, tools.exception_to_unicode(e)))
self._logger.notifyChannel('timers', netsvc.LOG_DEBUG, traceback.format_exc())
def _poolJobs(self, db_name, check=False):
try:
db, pool = pooler.get_db_and_pool(db_name)
except:
return False
cr = db.cursor()
try:
if not pool._init:
now = datetime.now()
cr.execute('select * from ir_cron where numbercall<>0 and active and nextcall<=now() order by priority')
for job in cr.dictfetchall():
nextcall = datetime.strptime(job['nextcall'], '%Y-%m-%d %H:%M:%S')
numbercall = job['numbercall']
ok = False
while nextcall < now and numbercall:
if numbercall > 0:
numbercall -= 1
if not ok or job['doall']:
self._callback(cr, job['user_id'], job['model'], job['function'], job['args'])
if numbercall:
nextcall += _intervalTypes[job['interval_type']](job['interval_number'])
ok = True
addsql = ''
if not numbercall:
addsql = ', active=False'
cr.execute("update ir_cron set nextcall=%s, numbercall=%s"+addsql+" where id=%s", (nextcall.strftime('%Y-%m-%d %H:%M:%S'), numbercall, job['id']))
cr.commit()
cr.execute('select min(nextcall) as min_next_call from ir_cron where numbercall<>0 and active')
next_call = cr.dictfetchone()['min_next_call']
if next_call:
next_call = time.mktime(time.strptime(next_call, '%Y-%m-%d %H:%M:%S'))
else:
next_call = int(time.time()) + 3600 # if do not find active cron job from database, it will run again after 1 day
if not check:
self.setAlarm(self._poolJobs, next_call, db_name, db_name)
except Exception, ex:
logger = netsvc.Logger()
logger.notifyChannel('cron', netsvc.LOG_WARNING,
'Exception in cron:'+str(ex))
finally:
cr.commit()
cr.close()
def restart(self, dbname):
self.cancel(dbname)
# Reschedule cron processing job asap, but not in the current thread
self.setAlarm(self._poolJobs, time.time(), dbname, dbname)
def create(self, cr, uid, vals, context=None):
res = super(ir_cron, self).create(cr, uid, vals, context=context)
cr.commit()
self.restart(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
res = super(ir_cron, self).write(cr, user, ids, vals, context=context)
cr.commit()
self.restart(cr.dbname)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(ir_cron, self).unlink(cr, uid, ids, context=context)
cr.commit()
self.restart(cr.dbname)
return res
ir_cron()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0
|
@@ -989,24 +989,41 @@
import time%0A
+import traceback%0A
from datetim
|
fea74aa88af88ea352b72525ecbf22a0fbd4e3db
|
Make a histogram and visualize it
|
ch02/histogram.py
|
ch02/histogram.py
|
Python
| 0.000114
|
@@ -0,0 +1,1097 @@
+# Load the parquet file containing flight delay records%0Aon_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')%0A%0A# Register the data for Spark SQL%0Aon_time_dataframe.registerTempTable(%22on_time_performance%22)%0A%0A# Compute a histogram of departure delays%0Aon_time_dataframe%5C%0A .select(%22DepDelay%22)%5C%0A .rdd%5C%0A .flatMap(lambda x: x)%5C%0A .histogram(10)%0A%0Aimport numpy as np%0Aimport matplotlib.mlab as mlab%0Aimport matplotlib.pyplot as plt%0A%0A# Function to plot a histogram using pyplot%0Adef create_hist(rdd_histogram_data):%0A %22%22%22Given an RDD.histogram, plot a pyplot histogram%22%22%22%0A heights = np.array(rdd_histogram_data%5B1%5D)%0A full_bins = rdd_histogram_data%5B0%5D%0A mid_point_bins = full_bins%5B:-1%5D%0A widths = %5Babs(i - j) for i, j in zip(full_bins%5B:-1%5D, full_bins%5B1:%5D)%5D%0A bar = plt.bar(mid_point_bins, heights, width=widths, color='b')%0A return bar%0A%0A# Compute a histogram of departure delays%0Adeparture_delay_histogram = on_time_dataframe%5C%0A .select(%22DepDelay%22)%5C%0A .rdd%5C%0A .flatMap(lambda x: x)%5C%0A .histogram(10, %5B-60,-30,-15,-10,-5,0,5,10,15,30,60,90,120,180%5D)%0A%0Acreate_hist(departure_delay_histogram)%0A
|
|
168f6a9d557d1813649fd060dbfa1217355443df
|
Implement main for entry
|
cheat_ext/main.py
|
cheat_ext/main.py
|
Python
| 0.000002
|
@@ -0,0 +1,943 @@
+from __future__ import print_function%0Aimport argparse%0A%0Afrom cheat_ext.installer import (%0A install, upgrade, remove%0A)%0Afrom cheat_ext.linker import link%0A%0A%0Adef _install(args):%0A install(args.repository)%0A link(args.repository)%0A%0A%0Adef _upgrade(args):%0A upgrade(args.repository)%0A%0A%0Adef _remove(args):%0A remove(args.repository)%0A%0A%0Aparser = argparse.ArgumentParser(description=%22cheat extension%22)%0A%0Asubparsers = parser.add_subparsers()%0A%0Ainstall_parser = subparsers.add_parser(%22install%22)%0Ainstall_parser.add_argument(%22repository%22, type=str)%0Ainstall_parser.set_defaults(func=_install)%0A%0A%0Aupgrade_parser = subparsers.add_parser(%22upgrade%22)%0Aupgrade_parser.add_argument(%22repository%22, type=str)%0Aupgrade_parser.set_defaults(func=_upgrade)%0A%0Aremove_parser = subparsers.add_parser(%22remove%22)%0Aremove_parser.add_argument(%22repository%22, type=str)%0Aremove_parser.set_defaults(func=_remove)%0A%0A%0Adef main():%0A options = parser.parse_args()%0A options.func(options)%0A
|
|
6420dc0127f0f33036fe0f9258d5350da5faef6d
|
Create filtering.py
|
sciquence/sequences/filtering.py
|
sciquence/sequences/filtering.py
|
Python
| 0.000001
|
@@ -0,0 +1,562 @@
+def parallel_filter(condition, *lists):%0A '''%0A %0A Parallelly filter multiple lists.%0A %0A Parameters%0A ----------%0A condition: callable%0A A function, which has as many arguments as the number of lists%0A lists: list of list%0A%0A Returns%0A -------%0A filtered_lists:%0A Filtered accordingly some criterion%0A%0A '''%0A%0A # TODO: check length%0A%0A output = %5B%5B%5D for _ in xrange(len(lists))%5D%0A%0A for d in zip(*lists):%0A if condition(*list(d)):%0A multi_append(output, *list(d))%0A print output%0A%0A return output%0A
|
|
0fb323e20cfd8662c1bdd84b0ab4d928cd6d35fc
|
Add missing unit test for NVP metadata_mode option
|
quantum/tests/unit/nicira/test_nvpopts.py
|
quantum/tests/unit/nicira/test_nvpopts.py
|
# Copyright 2013 Nicira Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import fixtures
import os
import testtools
from oslo.config import cfg
from quantum.common import config as q_config
from quantum.manager import QuantumManager
from quantum.openstack.common import uuidutils
from quantum.plugins.nicira.common import config # noqa
from quantum.plugins.nicira.common import exceptions
from quantum.plugins.nicira import nvp_cluster
BASE_CONF_PATH = os.path.join(os.path.dirname(__file__),
'../../etc/quantum.conf.test')
NVP_INI_PATH = os.path.join(os.path.dirname(__file__),
'etc/nvp.ini.basic.test')
NVP_INI_FULL_PATH = os.path.join(os.path.dirname(__file__),
'etc/nvp.ini.full.test')
NVP_INI_DEPR_PATH = os.path.join(os.path.dirname(__file__),
'etc/nvp.ini.grizzly.test')
NVP_PLUGIN_PATH = ('quantum.plugins.nicira.nicira_nvp_plugin.'
'QuantumPlugin.NvpPluginV2')
class NVPClusterTest(testtools.TestCase):
cluster_opts = {'default_tz_uuid': uuidutils.generate_uuid(),
'default_l2_gw_service_uuid': uuidutils.generate_uuid(),
'default_l2_gw_service_uuid': uuidutils.generate_uuid(),
'nvp_cluster_uuid': uuidutils.generate_uuid(),
'nvp_user': 'foo',
'nvp_password': 'bar',
'req_timeout': 45,
'http_timeout': 25,
'retries': 7,
'redirects': 23,
'default_interface_name': 'baz',
'nvp_controllers': ['1.1.1.1:443']}
def setUp(self):
super(NVPClusterTest, self).setUp()
self.addCleanup(cfg.CONF.reset)
def test_create_cluster(self):
cluster = nvp_cluster.NVPCluster(**self.cluster_opts)
for (k, v) in self.cluster_opts.iteritems():
self.assertEqual(v, getattr(cluster, k))
def test_create_cluster_default_port(self):
opts = self.cluster_opts.copy()
opts['nvp_controllers'] = ['1.1.1.1']
cluster = nvp_cluster.NVPCluster(**opts)
for (k, v) in self.cluster_opts.iteritems():
self.assertEqual(v, getattr(cluster, k))
def test_create_cluster_missing_required_attribute_raises(self):
opts = self.cluster_opts.copy()
opts.pop('default_tz_uuid')
self.assertRaises(exceptions.NvpInvalidClusterConfiguration,
nvp_cluster.NVPCluster, **opts)
class ConfigurationTest(testtools.TestCase):
def setUp(self):
super(ConfigurationTest, self).setUp()
self.addCleanup(cfg.CONF.reset)
self.useFixture(fixtures.MonkeyPatch(
'quantum.manager.QuantumManager._instance',
None))
def _assert_required_options(self, cluster):
self.assertEqual(cluster.nvp_controllers, ['fake_1:443', 'fake_2:443'])
self.assertEqual(cluster.default_tz_uuid, 'fake_tz_uuid')
self.assertEqual(cluster.nvp_user, 'foo')
self.assertEqual(cluster.nvp_password, 'bar')
def _assert_extra_options(self, cluster):
self.assertEqual(14, cluster.req_timeout)
self.assertEqual(13, cluster.http_timeout)
self.assertEqual(12, cluster.redirects)
self.assertEqual(11, cluster.retries)
self.assertEqual('whatever', cluster.default_l2_gw_service_uuid)
self.assertEqual('whatever', cluster.default_l3_gw_service_uuid)
self.assertEqual('whatever', cluster.default_interface_name)
def test_load_plugin_with_full_options(self):
q_config.parse(['--config-file', BASE_CONF_PATH,
'--config-file', NVP_INI_FULL_PATH])
cfg.CONF.set_override('core_plugin', NVP_PLUGIN_PATH)
plugin = QuantumManager().get_plugin()
cluster = plugin.cluster
self._assert_required_options(cluster)
self._assert_extra_options(cluster)
def test_load_plugin_with_required_options_only(self):
q_config.parse(['--config-file', BASE_CONF_PATH,
'--config-file', NVP_INI_PATH])
cfg.CONF.set_override('core_plugin', NVP_PLUGIN_PATH)
plugin = QuantumManager().get_plugin()
self._assert_required_options(plugin.cluster)
def test_defaults(self):
self.assertEqual(64, cfg.CONF.NVP.max_lp_per_bridged_ls)
self.assertEqual(256, cfg.CONF.NVP.max_lp_per_overlay_ls)
self.assertEqual(5, cfg.CONF.NVP.concurrent_connections)
self.assertEqual('stt', cfg.CONF.NVP.default_transport_type)
self.assertIsNone(cfg.CONF.default_tz_uuid)
self.assertIsNone(cfg.CONF.nvp_cluster_uuid)
self.assertEqual('admin', cfg.CONF.nvp_user)
self.assertEqual('admin', cfg.CONF.nvp_password)
self.assertEqual(30, cfg.CONF.req_timeout)
self.assertEqual(10, cfg.CONF.http_timeout)
self.assertEqual(2, cfg.CONF.retries)
self.assertEqual(2, cfg.CONF.redirects)
self.assertIsNone(cfg.CONF.nvp_controllers)
self.assertIsNone(cfg.CONF.default_l3_gw_service_uuid)
self.assertIsNone(cfg.CONF.default_l2_gw_service_uuid)
self.assertEqual('breth0', cfg.CONF.default_interface_name)
class OldConfigurationTest(testtools.TestCase):
def setUp(self):
super(OldConfigurationTest, self).setUp()
self.addCleanup(cfg.CONF.reset)
self.useFixture(fixtures.MonkeyPatch(
'quantum.manager.QuantumManager._instance',
None))
def _assert_required_options(self, cluster):
self.assertEqual(cluster.nvp_controllers, ['fake_1:443', 'fake_2:443'])
self.assertEqual(cluster.default_tz_uuid, 'fake_tz_uuid')
self.assertEqual(cluster.nvp_user, 'foo')
self.assertEqual(cluster.nvp_password, 'bar')
def test_load_plugin_with_deprecated_options(self):
q_config.parse(['--config-file', BASE_CONF_PATH,
'--config-file', NVP_INI_DEPR_PATH])
cfg.CONF.set_override('core_plugin', NVP_PLUGIN_PATH)
plugin = QuantumManager().get_plugin()
cluster = plugin.cluster
self._assert_required_options(cluster)
# Verify nvp_controller_connection has been fully parsed
self.assertEqual(4, cluster.req_timeout)
self.assertEqual(3, cluster.http_timeout)
self.assertEqual(2, cluster.retries)
self.assertEqual(1, cluster.redirects)
|
Python
| 0.000002
|
@@ -5121,16 +5121,87 @@
ctions)%0A
+ self.assertEqual('access_network', cfg.CONF.NVP.metadata_mode)%0A
|
0bd93c02ab7917d570a74cf151dfb5789c3bf174
|
Add a brutal script for removing concepts in bulk while testing
|
scripts/remove_concepts_after.py
|
scripts/remove_concepts_after.py
|
Python
| 0
|
@@ -0,0 +1,1937 @@
+# An entirely untested script to delete all the concepts in the%0A# CATMAID database for a particular project.%0A%0A# Mark Longair 2010%0A%0Aimport os%0A%0Afrom jarray import array%0A%0Afrom java.sql import DriverManager, Connection, SQLException, Types%0A%0A# FIXME: Just hardcode the user_id and project_id for the moment%0A%0Auser_id = 3%0Aproject_id = 4%0A%0A# Set up the JDBC connection:%0A%0Atry:%0A Class.forName(%22org.postgresql.Driver%22)%0Aexcept:%0A IJ.log(%22Failed to find the postgresql driver...%22)%0A raise%0A%0Acatmaid_db_user = None%0Acatmaid_db_password = None%0A%0Adb_login_filename = os.path.join(os.environ%5B'HOME'%5D,'.catmaid-db')%0Afp = open(db_login_filename)%0Afor i, line in enumerate(fp):%0A if i == 0:%0A catmaid_db_user = line.strip()%0A elif i == 1:%0A catmaid_db_password = line.strip()%0A%0Ac = DriverManager.getConnection(%22jdbc:postgresql://localhost/catmaid%22,%0A catmaid_db_user,%0A catmaid_db_password)%0A%0Adef run():%0A%0A # FIXME: ask in a dialog for the ID instead%0A first_id = 3859376%0A%0A where = ' where id %3E %25d'%25(first_id,))%0A%0A s = c.createStatement('delete from treenode_class_instance'+where)%0A s.executeQuery()%0A%0A s = c.createStatement('delete from connector_class_instance'+where)%0A s.executeQuery()%0A%0A s = c.createStatement('delete from class_instance'+where)%0A s.executeQuery()%0A%0A s = c.createStatement('alter table treenode drop constraint treenode_parent_id_fkey')%0A s.executeQuery()%0A s = c.createStatement('delete from treenode'+where)%0A s.executeQuery()%0A s = c.createStatement('alter table only treenode add constraint treenode_parent_id_fkey foreign key (parent_id) REFERENCES treenode(id)');%0A s.executeQuery()%0A%0A s = c.createStatement('delete from relation'+where)%0A s.executeQuery()%0A%0A s = c.createStatement('delete from connector'+where)%0A s.executeQuery()%0A%0A s = c.createStatement('delete from class_instance_class_instance'+where)%0A s.executeQuery()%0A
|
|
39b156cb3e208c3d06ced6fb086ab171209ac346
|
add ctable fixture
|
psi/ctable_mappings.py
|
psi/ctable_mappings.py
|
Python
| 0.000001
|
@@ -0,0 +1,1072 @@
+from ctable.fixtures import CtableMappingFixture%0Afrom ctable.models import ColumnDef, KeyMatcher%0A%0A%0Aclass EventsMapping(CtableMappingFixture):%0A name = 'events'%0A domains = %5B'psi-unicef', 'psi'%5D%0A couch_view = 'psi/events'%0A schedule_active = True%0A%0A @property%0A def columns(self):%0A columns = %5B%0A ColumnDef(name=%22domain%22, data_type=%22string%22, value_source=%22key%22, value_index=2),%0A ColumnDef(name=%22state%22, data_type=%22string%22, value_source=%22key%22, value_index=3),%0A ColumnDef(name=%22district%22, data_type=%22string%22, value_source=%22key%22, value_index=4),%0A ColumnDef(name=%22date%22, data_type=%22date%22, value_source=%22key%22, value_index=1),%0A %5D%0A for c in %5B'events', 'males', 'females', 'attendees', 'leaflets', 'gifts'%5D:%0A columns.append(ColumnDef(name=c, data_type=%22integer%22, value_source=%22value%22, value_attribute='sum',%0A match_keys=%5BKeyMatcher(index=5, value=c)%5D))%0A%0A return columns%0A%0A def customize(self, mapping):%0A mapping.couch_key_prefix = %5B'ctable'%5D%0A
|
|
f3a02b3570724964f60d10a8112e0d8eb32dddc7
|
Add 4chan download script
|
4chan.py
|
4chan.py
|
Python
| 0
|
@@ -0,0 +1,2143 @@
+#!/usr/bin/python%0D%0A%0D%0A# Protip: want to monitor a thread and download all new images every 5 seconds?%0D%0A# while x= 0 ; do 4c %5B-nf%5D url; sleep 5; done%0D%0A%0D%0Aimport re, urllib, urllib2, argparse, os%0D%0A%0D%0Aparser = argparse.ArgumentParser(description='Downloads all full-size images in one or more arbitrary 4chan threads.')%0D%0A%0D%0Aparser.add_argument('urllist', metavar='url', type=str, nargs='+',%0D%0A help='the URLs of the threads')%0D%0Aparser.add_argument('-n', '--newdir', dest='newdir', action='store_true',%0D%0A help='create a new directory for each thread in the current directory')%0D%0Aparser.add_argument('-f', '--force', dest='force_redownload', action='store_true',%0D%0A help='force redownloading every image, overwriting it if it already exists')%0D%0A %0D%0Aargs = parser.parse_args()%0D%0Aoptions = vars(args)%0D%0A%0D%0Aregex = 'href=%22(%5C/%5C/images%5C.4chan%5C.org%5C/%5Ba-z%5D+%5C/src%5C/%5B0-9%5D+%5C.%5Ba-z%5D+)%22'%0D%0A%0D%0Afor url in options%5B'urllist'%5D:%0D%0A%09print %22Thread URL: %25s%22 %25 url%0D%0A%09%0D%0A%09try:%0D%0A%09%09page = urllib2.urlopen(url).read()%0D%0A%09except ValueError:%0D%0A%09%09print %22That does not look like a valid URL.%22%0D%0A%09%09continue%0D%0A%09except urllib2.HTTPError:%0D%0A%09%09print %22The given URL returns a HTTP 404 status code - the thread may have died.%22%0D%0A%09%09continue%0D%0A%0D%0A%09if options%5B'newdir'%5D == True:%0D%0A%09%09thread_id = url.split('/')%5B-1%5D%0D%0A%09%09target_dir = %22%25s/%22 %25 thread_id%0D%0A%0D%0A%09%09if not os.path.exists(thread_id):%0D%0A%09%09%09os.makedirs(thread_id)%0D%0A%09else:%0D%0A%09%09target_dir = %22%22%0D%0A%0D%0A%09search = re.compile(regex)%0D%0A%09matches = search.finditer(page)%0D%0A%0D%0A%09urls = %5B%5D%0D%0A%0D%0A%09for match in matches:%0D%0A%09%09if match.group(1) not in urls:%0D%0A%09%09%09urls.append(match.group(1))%0D%0A%0D%0A%09current = 1%0D%0A%09total = len(urls)%0D%0A%0D%0A%09print %22 Parsed thread. Total images: %25d%22 %25 total%0D%0A%0D%0A%09for downloadurl in urls:%0D%0A%09%09downloadurl = %22http:%25s%22 %25 downloadurl%0D%0A%09%09filename = downloadurl.split('/')%5B-1%5D%0D%0A%09%09path = target_dir + filename%0D%0A%09%09%0D%0A%09%09if os.path.exists(path) and options%5B'force_redownload'%5D == False:%0D%0A%09%09%09print %22Skipped existing file %25s (%25d/%25d).%22 %25 (filename, current, total)%0D%0A%09%09else:%0D%0A%09%09%09urllib.urlretrieve(downloadurl, path)%0D%0A%09%09%09print %22Downloaded %25s (%25d/%25d).%22 %25 (filename, current, total)%0D%0A%09%09current += 1%0D%0A%09%0D%0A%09print %22Done.%22
|
|
8add47cf7d04f2f5e9cbea4eb036eb513e481ddd
|
fix populate argument
|
manage.py
|
manage.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import click
from quokka import create_app
from quokka.ext.blueprints import blueprint_commands
from quokka.core.db import db
app = create_app()
if app.config.get("LOGGER_ENABLED"):
logging.basicConfig(
level=getattr(logging, app.config.get("LOGGER_LEVEL", "DEBUG")),
format=app.config.get(
"LOGGER_FORMAT",
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'),
datefmt=app.config.get("LOGGER_DATE_FORMAT", '%d.%m %H:%M:%S')
)
@click.group()
def core_cmd():
""" Core commands """
pass
@core_cmd.command()
@click.option('--ipython/--no-ipython', default=True)
def shell(ipython):
"""Runs a Python shell with Quokka context"""
import code
import readline
import rlcompleter
_vars = globals()
_vars.update(locals())
_vars.update(dict(app=app, db=db))
readline.set_completer(rlcompleter.Completer(_vars).complete)
readline.parse_and_bind("tab: complete")
try:
if ipython is True:
from IPython import start_ipython
start_ipython(argv=[], user_ns=_vars)
else:
raise ImportError
except ImportError:
shell = code.InteractiveConsole(_vars)
shell.interact()
@core_cmd.command()
def check():
"""Prints app status"""
from pprint import pprint
print("Extensions.")
pprint(app.extensions)
print("Modules.")
pprint(app.blueprints)
print("App.")
return app
@core_cmd.command()
@click.option(
'--f',
help='Fixtures JSON path',
default='./etc/fixtures/initial_data.json')
def populate(f):
"""Populate the database with sample data"""
from quokka.utils.populate import Populate
Populate(db, filepath=f)()
@core_cmd.command()
def showconfig():
"""Print all config variables"""
from pprint import pprint
print("Config.")
pprint(dict(app.config))
@core_cmd.command()
@click.option('--reloader/--no-reloader', default=True)
@click.option('--host', default='127.0.0.1')
@click.option('--port', default=5000)
def runserver(reloader, host, port):
"""Run the Flask development server i.e. app.run()"""
app.run(use_reloader=reloader, host=host, port=port)
help_text = """
Subcommands are loaded from the modules/commands folder dynamically.
The file must be called cmd_<command_name> with a function 'cli'
being the click.command to be loaded:
Example:
\b
import click
@click.command()
def cli():
click.echo("Do whatever you want")
"""
manager = click.CommandCollection(help=help_text)
manager.add_source(core_cmd)
manager.add_source(blueprint_commands(app))
if __name__ == '__main__':
with app.app_context():
manager()
|
Python
| 0.000003
|
@@ -1560,13 +1560,30 @@
'-
--
f',%0A
+ '--filename',%0A
@@ -1671,16 +1671,23 @@
pulate(f
+ilename
):%0A %22
@@ -1777,16 +1777,16 @@
opulate%0A
-
Popu
@@ -1804,16 +1804,23 @@
lepath=f
+ilename
)()%0A%0A%0A@c
|
b166caa9fb0efa4aceab315fd6a945d2fe6922e4
|
Patch fixed
|
erpnext/patches/v7_2/update_salary_slips.py
|
erpnext/patches/v7_2/update_salary_slips.py
|
import frappe
from erpnext.hr.doctype.process_payroll.process_payroll import get_month_details
def execute():
salary_slips = frappe.db.sql("""select fiscal_year, month, name from `tabSalary Slip`
where (month is not null and month != '')
and (fiscal_year is not null and fiscal_year != '') and
(start_date is null or start_date = '') and
(end_date is null or end_date = '') and docstatus != 2""")
for salary_slip in salary_slips:
get_start_end_date = get_month_details(salary_slip.fiscal_year, salary_slip.month)
start_date = get_start_end_date['month_start_date']
end_date = get_start_end_date['month_end_date']
frappe.db.sql("""update `tabSalary Slip` set start_date = %s, end_date = %s where name = %s""",
(start_date, end_date, salary_slip.name))
|
Python
| 0.000001
|
@@ -411,16 +411,27 @@
!= 2%22%22%22
+, as_dict=1
)%0A%0A%09for
|
c2151ae33c44f29d15d494d4862645beb33671cb
|
Add comments tests
|
kansha/card_addons/comment/tests.py
|
kansha/card_addons/comment/tests.py
|
Python
| 0
|
@@ -0,0 +1,1272 @@
+# -*- coding:utf-8 -*-%0A#--%0A# Copyright (c) 2012-2014 Net-ng.%0A# All rights reserved.%0A#%0A# This software is licensed under the BSD License, as described in%0A# the file LICENSE.txt, which you should have received as part of%0A# this distribution.%0A#--%0A%0Afrom nagare import security%0A%0Afrom kansha.cardextension.tests import CardExtensionTestCase%0A%0Afrom .comp import Comments%0A%0A%0Aclass CommentsTest(CardExtensionTestCase):%0A def create_instance(self, card, action_log):%0A return Comments(card, action_log)%0A%0A def test_add_delete(self):%0A self.assertEqual(len(self.extension.comments), 0)%0A self.extension.add(u'test')%0A self.assertEqual(len(self.extension.comments), 1)%0A comment = self.extension.comments%5B0%5D()%0A self.assertEqual(comment.data.comment, u'test')%0A comment.set_comment(u'test2')%0A self.assertEqual(comment.data.comment, u'test2')%0A self.extension.delete_comment(self.extension.comments%5B0%5D)%0A%0A def test_comment_label(self):%0A self.extension.add(u'test')%0A label = self.extension.comments%5B0%5D().comment_label()%0A self.assertEqual(label.text, u'test')%0A label.change_text(u'test2')%0A self.assertEqual(label.text, u'test2')%0A self.assertTrue(label.is_author(security.get_user()))%0A%0A%0A
|
|
29a05075e500635b10a25931045702888b12618f
|
add main file
|
main.py
|
main.py
|
Python
| 0.000001
|
@@ -0,0 +1,170 @@
+import gas%0AGASANALOG = 0%0AALARMLED = 13%0A%0Agas = machine.ADC(GASANALOG)%0AgasLED = machine.Pin(ALARMLED, machine.Pin.OUT)%0A%0Ag = gas.CheckGas(led=gasLED, sensor=gas, time=1000)%0A
|
|
6b96008b3e89e3ff6a5616a68e49af3e41b2bc0b
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000001
|
@@ -0,0 +1,1620 @@
+#!/usr/bin/python%0A%0A%22%22%22%0A__version__ = %22$Revision: 1.3 $%22%0A__date__ = %22$Date: 2004/04/14 02:38:47 $%22%0A%22%22%22%0A%0Aimport nID%0Aimport plugin%0Aimport os%0A%0Arepository_nid=%22all_url:https://www.dropbox.com/s/tvyxx5iidodidz2/nid_sample_list.txt?dl=1@name:Sample Repo Name@owner:myselfminer%22%0A%0Afrom PythonCard import model%0A%0Aclass MyBackground(model.Background):%0A%0A def on_initialize(self, event):%0A # if you have any initialization%0A # including sizer setup, do it here%0A pass%0A def on_allPlugins_mouseClick(self, event):%0A allp=plugin.listall(repository_nid)# get a list of all plugins aviable in the repo%0A self.components.List1.items=allp#update List1 with the list%0A def on_load_mouseClick(self,event):%0A print(%22w%22)%0A a=open(%22nidlist.tmp%22,%22r%22)# open saved nids(presaved result of plugin.listall())%0A b=a.readlines()%0A a=plugin.get_nid_by_name(b,self.components.List1.stringSelection) # get nid by using the selection in List1%0A # ---Set Component values---%0A self.components.NameF.text=nID.parse(a, %22name%22)%0A self.components.VersionF.text=nID.parse(a, %22version%22)%0A self.components.OwnerF.text=nID.parse(a, %22Author%22)%0A self.components.DescriptionF.text=nID.parse(a, %22Description%22)%0A def on_Installed_mouseClick(self, event):%0A nids=plugin.get_local_nids()%0A final=%5B%5D%0A for i in nids:#create a nidlist of installed plugins%0A now=nID.parse(i, %22name%22)%0A final.append(now)%0A self.components.List1.items=final%0A %0A%0Aif __name__ == '__main__':%0A app = model.Application(MyBackground)%0A app.MainLoop()%0A
|
|
9e388ad5b78967f87a0b3b55235bd1e19183c152
|
Test for the PaladinSpellSchema values
|
tests/models/spells/test_paladin_spells.py
|
tests/models/spells/test_paladin_spells.py
|
Python
| 0
|
@@ -0,0 +1,2493 @@
+import unittest%0A%0Afrom tests.delete_test_db import delete_test_db # module that deletes the DB :)%0Aimport database.main%0Afrom tests.create_test_db import engine, session, Base%0A%0A%0Adatabase.main.engine = engine%0Adatabase.main.session = session%0Adatabase.main.Base = Base%0Aimport models.main%0Afrom models.spells.paladin_spells_template import PaladinSpellsSchema%0Afrom spells import PaladinSpell%0Afrom models.items.item_template import ItemTemplateSchema%0Afrom models.spells.spell_dots import DotSchema%0Afrom buffs import BeneficialBuff, DoT%0A%0A%0Aclass PaladinSpellsSchemaTests(unittest.TestCase):%0A def setUp(self):%0A %22%22%22%0A Test that the values in the Schema are as expected%0A And that the convert_to_paladin_spell_object function works%0A %22%22%22%0A self.spell_entry = 4%0A self.spell_name = 'Melting Strike'%0A self.expected_spell = PaladinSpell(name=self.spell_name, rank=1, damage1=3, damage2=0, damage3=0,%0A heal1=0, heal2=0, heal3=0, mana_cost=6, cooldown=3,%0A beneficial_effect=None, harmful_effect=None)%0A%0A def test_schema_values(self):%0A %22%22%22 Load a schema object and assert that every value is as expected%22%22%22%0A loaded_schema: PaladinSpellsSchema = session.query(PaladinSpellsSchema).get(self.spell_entry)%0A%0A self.assertTrue(isinstance(loaded_schema.id, int))%0A self.assertTrue(isinstance(loaded_schema.name, str))%0A self.assertTrue(isinstance(loaded_schema.rank, int))%0A self.assertTrue(isinstance(loaded_schema.level_required, int))%0A self.assertTrue(isinstance(loaded_schema.damage1, int))%0A self.assertTrue(isinstance(loaded_schema.damage2, int))%0A self.assertTrue(isinstance(loaded_schema.damage3, int))%0A self.assertTrue(isinstance(loaded_schema.heal1, int))%0A self.assertTrue(isinstance(loaded_schema.heal2, int))%0A self.assertTrue(isinstance(loaded_schema.heal3, int))%0A self.assertTrue(isinstance(loaded_schema.mana_cost, int))%0A self.assertIsNone(loaded_schema.beneficial_effect)%0A self.assertTrue(isinstance(loaded_schema.harmful_effect, int))%0A self.assertTrue(isinstance(loaded_schema.cooldown, int))%0A self.assertTrue(isinstance(loaded_schema.comment, str))%0A self.assertIsNone(loaded_schema.buff)%0A self.assertTrue(isinstance(loaded_schema.dot, DotSchema))%0A%0A%0A%0A%0Adef tearDownModule():%0A delete_test_db()%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
803201baa32fb847f363b6807f92f2d0b6a51c51
|
Test that an error in pre_gen_project aborts generation
|
tests/test_abort_generate_on_hook_error.py
|
tests/test_abort_generate_on_hook_error.py
|
Python
| 0.000002
|
@@ -0,0 +1,607 @@
+# -*- coding: utf-8 -*-%0A%0Aimport pytest%0A%0Afrom cookiecutter import generate%0Afrom cookiecutter import exceptions%0A%0A%0A@pytest.mark.usefixtures('clean_system')%0Adef test_pre_gen_hook(tmpdir):%0A context = %7B%0A 'cookiecutter': %7B%0A %22repo_dir%22: %22foobar%22,%0A %22abort_pre_gen%22: %22yes%22,%0A %22abort_post_gen%22: %22no%22%0A %7D%0A %7D%0A%0A with pytest.raises(exceptions.FailedHookException):%0A generate.generate_files(%0A repo_dir='tests/hooks-abort-render',%0A context=context,%0A output_dir=str(tmpdir)%0A )%0A%0A assert not tmpdir.join('foobar').isdir()%0A%0A
|
|
b079edc37cd8abb68194637ee90b9fecc51b9b98
|
Add basic test for document quickcaching
|
corehq/apps/cachehq/tests.py
|
corehq/apps/cachehq/tests.py
|
Python
| 0
|
@@ -0,0 +1,1199 @@
+from copy import deepcopy%0Afrom mock import patch, MagicMock%0Afrom django.test import SimpleTestCase%0Afrom dimagi.ext import couchdbkit as couch%0Afrom corehq.apps.cachehq.mixins import CachedCouchDocumentMixin%0A%0A%0Aclass BlogPost(CachedCouchDocumentMixin, couch.Document):%0A title = couch.StringProperty()%0A body = couch.StringProperty()%0A%0A%0Aclass TestCachedCouchDocumentMixin(SimpleTestCase):%0A @patch('dimagi.ext.couchdbkit.Document.save', MagicMock())%0A @patch('dimagi.ext.couchdbkit.Document.get')%0A def test_get(self, doc_get):%0A blog_post = BlogPost(title=%22My favorite colors%22, body=%22blue%22)%0A blog_post._id = 'idssrgglcfoyxdtrunbcae'%0A doc_get.return_value = deepcopy(blog_post)%0A blog_post.save()%0A blog_post.clear_caches()%0A%0A # Make two %60get%60s and assert that only one made it to Document.get%0A BlogPost.get(blog_post._id)%0A BlogPost.get(blog_post._id)%0A doc_get.assert_called_once_with(blog_post._id)%0A%0A # Update the doc, save, and assert that Document.get was hit again%0A blog_post.body = %22Actually, it's purple%22%0A blog_post.save()%0A BlogPost.get(blog_post._id)%0A self.assertEqual(doc_get.call_count, 2)%0A
|
|
c8cc6f4fc111d5dd2d55295e569a10cd5739ceee
|
test : fix for python 2.6
|
lib/spack/spack/test/environment.py
|
lib/spack/spack/test/environment.py
|
import unittest
import os
from spack.environment import EnvironmentModifications, apply_environment_modifications
class EnvironmentTest(unittest.TestCase):
def setUp(self):
os.environ.clear()
os.environ['UNSET_ME'] = 'foo'
os.environ['EMPTY_PATH_LIST'] = ''
os.environ['PATH_LIST'] = '/path/second:/path/third'
os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
def test_set_env(self):
env = EnvironmentModifications()
env.set_env('A', 'dummy value')
env.set_env('B', 3)
apply_environment_modifications(env)
self.assertEqual('dummy value', os.environ['A'])
self.assertEqual(str(3), os.environ['B'])
def test_unset_env(self):
env = EnvironmentModifications()
self.assertEqual('foo', os.environ['UNSET_ME'])
env.unset_env('UNSET_ME')
apply_environment_modifications(env)
self.assertRaises(KeyError, os.environ.__getitem__, 'UNSET_ME')
def test_path_manipulation(self):
env = EnvironmentModifications()
env.append_path('PATH_LIST', '/path/last')
env.prepend_path('PATH_LIST', '/path/first')
env.append_path('EMPTY_PATH_LIST', '/path/middle')
env.append_path('EMPTY_PATH_LIST', '/path/last')
env.prepend_path('EMPTY_PATH_LIST', '/path/first')
env.append_path('NEWLY_CREATED_PATH_LIST', '/path/middle')
env.append_path('NEWLY_CREATED_PATH_LIST', '/path/last')
env.prepend_path('NEWLY_CREATED_PATH_LIST', '/path/first')
env.remove_path('REMOVE_PATH_LIST', '/remove/this')
env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
apply_environment_modifications(env)
self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST'])
self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST'])
self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST'])
self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST'])
def test_extra_arguments(self):
env = EnvironmentModifications()
env.set_env('A', 'dummy value', who='Pkg1')
apply_environment_modifications(env)
self.assertEqual('dummy value', os.environ['A'])
def test_extend(self):
env = EnvironmentModifications()
env.set_env('A', 'dummy value')
env.set_env('B', 3)
copy_construct = EnvironmentModifications(env)
self.assertEqual(len(copy_construct), 2)
for x, y in zip(env, copy_construct):
self.assertIs(x, y)
|
Python
| 0.000013
|
@@ -2651,24 +2651,18 @@
-self.
assert
-Is(x,
+ x is
y
-)
%0A
|
ce5ba72605e93e4fd83f36cced28d7c813c95e54
|
Create myfile.py
|
myfile.py
|
myfile.py
|
Python
| 0.000011
|
@@ -0,0 +1,3496 @@
+import os%0Aimport re%0A%0A%0Adef searchByExt(rootpath, ext):%0A print '----- File List -----'%0A results = %5B%5D%0A for root, dirs, files in os.walk(rootpath):%0A for filename in files:%0A if re.search(r'.*%5C.%25s' %25 ext, filename):%0A result = os.path.join(root, filename)%0A results.append(result)%0A print 'Find: %25s' %25 result%0A print '-- End of File List--'%0A return results%0A%0A%0Adef modifyPrefix(filelist, oldPrefix='', newPrefix=''):%0A # add prefix%0A if oldPrefix == '' and newPrefix != '':%0A action = 'Add'%0A%0A # remove prefix%0A if oldPrefix != '' and newPrefix == '':%0A action = 'Remove'%0A%0A # change prefix%0A if oldPrefix != '' and newPrefix != '':%0A action = 'Change'%0A%0A # stay unchanged%0A if oldPrefix == newPrefix:%0A print 'The prefix stay unchanged.'%0A return%0A%0A for oldFile in filelist:%0A if os.path.exists(oldFile):%0A if os.path.isfile(oldFile):%0A dirname, filename = os.path.split(oldFile)%0A if filename%5B:len(oldPrefix)%5D == oldPrefix:%0A newFilename = newPrefix + filename%5Blen(oldPrefix):%5D%0A newFile = os.path.join(dirname, newFilename)%0A os.rename(oldFile, newFile)%0A if os.path.exists(newFile):%0A print '%25s prefix: %25s -%3E %25s. Succeed' %25 (action, filename, newFilename)%0A else:%0A print '%25s prefix: %25s -%3E %25s. Fail' %25 (action, filename, newFilename)%0A else:%0A print 'Warning: Invalid old prefix for file: %25s (The requested prefix to be %25sd is %22%25s%22). Skip' %25 (filename, action.lower(), oldPrefix)%0A continue%0A else:%0A print 'Warning: %25s is not a valid file. Skip' %25 oldFile%0A else:%0A print 'Warning: %25s does not exist. Skip' %25 oldFile%0A%0A%0Adef modifySuffix(filelist, oldSuffix='', newSuffix=''):%0A # add suffix%0A if oldSuffix == '' and newSuffix != '':%0A action = 'Add'%0A%0A # remove suffix%0A if oldSuffix != '' and newSuffix == '':%0A action = 'Remove'%0A%0A # change suffix%0A if oldSuffix != '' and newSuffix != '':%0A action = 'Change'%0A%0A # stay unchanged%0A if oldSuffix == newSuffix:%0A print 'The suffix stay unchanged.'%0A return%0A%0A for oldFile in filelist:%0A if os.path.exists(oldFile):%0A if os.path.isfile(oldFile):%0A dirname, fullfilename = os.path.split(oldFile)%0A filename, ext = os.path.splitext(fullfilename)%0A if filename%5Blen(filename)-len(oldSuffix):%5D == oldSuffix:%0A newFilename = filename%5B:len(filename)-len(oldSuffix)%5D + newSuffix + ext%0A newFile = os.path.join(dirname, newFilename)%0A os.rename(oldFile, newFile)%0A if os.path.exists(newFile):%0A print '%25s suffix: %25s -%3E %25s. Succeed' %25 (action, fullfilename, newFilename)%0A else:%0A print '%25s suffix: %25s -%3E %25s. Fail' %25 (action, fullfilename, newFilename)%0A else:%0A print 'Warning: Invalid old suffix for file: %25s (The requested suffix to be %25sd is %22%25s%22). Skip' %25 (fullfilename, action.lower(), oldSuffix)%0A continue%0A else:%0A print 'Warning: %25s is not a valid file. Skip' %25 oldFile%0A else:%0A print 'Warning: %25s does not exist. Skip' %25 oldFile%0A
|
|
b159433375714c67ac36e58d4323196222759f30
|
Add missing migration from 096092b.
|
babybuddy/migrations/0003_add_refresh_help_text.py
|
babybuddy/migrations/0003_add_refresh_help_text.py
|
Python
| 0
|
@@ -0,0 +1,939 @@
+# Generated by Django 2.0.5 on 2018-07-15 14:16%0A%0Aimport datetime%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('babybuddy', '0002_add_settings'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='settings',%0A name='dashboard_refresh_rate',%0A field=models.DurationField(blank=True, choices=%5B(None, 'disabled'), (datetime.timedelta(0, 60), '1 min.'), (datetime.timedelta(0, 120), '2 min.'), (datetime.timedelta(0, 180), '3 min.'), (datetime.timedelta(0, 240), '4 min.'), (datetime.timedelta(0, 300), '5 min.'), (datetime.timedelta(0, 600), '10 min.'), (datetime.timedelta(0, 900), '15 min.'), (datetime.timedelta(0, 1800), '30 min.')%5D, default=datetime.timedelta(0, 60), help_text='This setting will only be used when a browser does not support refresh on focus.', null=True, verbose_name='Refresh rate'),%0A ),%0A %5D%0A
|
|
0104f898a4a54027688411dd20d39aeecfc31f6d
|
Create player.py
|
player.py
|
player.py
|
Python
| 0
|
@@ -0,0 +1,1545 @@
+import pygame%0Aimport ss%0A%0Aclass Player(pygame.sprite.Sprite):%0A%09def __init__(self, level, *groups):%0A%09%09super(Player, self).__init__(*groups)%0A%09%09self.Rimg = pygame.image.load('RangerDanR.png')%0A%09%09self.Limg = pygame.image.load('RangerDanL.png')%0A%09%09self.image = self.Rimg%0A%09%09self.rect = pygame.rect.Rect((100,100), self.image.get_size())#320,240%0A%09%09self.level = level%0A%09%09%0A%09def command(self, cmd):%0A%09%09prev = self.rect.copy()%0A%09%09if cmd == %22U%22:%0A%09%09%09if self.rect.y %3C= 100:%0A%09%09%09%09self.level.move_BG(%22D%22)%0A%09%09%09else:%0A%09%09%09%09self.rect.y -= 100%0A%09%09if cmd == %22D%22:%0A%09%09%09if self.rect.y %3E= self.level.mastery - 200:%0A%09%09%09%09self.level.move_BG(%22U%22)%0A%09%09%09else:%0A%09%09%09%09self.rect.y += 100%0A%09%09if cmd == %22L%22:%0A%09%09%09self.image = self.Limg%0A%09%09%09if self.rect.x %3C= 100:%0A%09%09%09%09self.level.move_BG(%22R%22)%0A%09%09%09else:%0A%09%09%09%09self.rect.x -= 100%0A%09%09if cmd == %22R%22:%0A%09%09%09self.image = self.Rimg%0A%09%09%09if self.rect.x %3E= self.level.masterx - 200:%0A%09%09%09%09self.level.move_BG(%22L%22)%0A%09%09%09else:%0A%09%09%09%09self.rect.x += 100%0A%09%09new = self.rect%0A%09%09for loc in pygame.sprite.spritecollide(self, self.level.unpassable, False):%0A%09%09%09loc = loc.rect%0A%09%09%09if prev.right %3C= loc.left and new.right %3E loc.left:%0A%09%09%09%09new.right = loc.left%0A%09%09%09if prev.left %3E= loc.right and new.left %3C loc.right:%0A%09%09%09%09new.left = loc.right%0A%09%09%09if prev.bottom %3C= loc.top and new.bottom %3E loc.top:%0A%09%09%09%09new.bottom = loc.top%0A%09%09%09if prev.top %3E= loc.bottom and new.top %3C loc.bottom:%0A%09%09%09%09new.top = loc.bottom%0A%09%09%09%09%0A%09%09for thing in pygame.sprite.spritecollide(self, self.level.items, True):%0A%09%09%09if thing.flavor == 'gem':%0A%09%09%09%09pygame.mixer.Sound('tadaa.wav').play()%0A%09%09%09%09%0A%09%09%0A%09def update(self):%0A%09%09pass%0A%09%09%0A%09%09%0A%09%09%0A
|
|
59fa328c62cc7808bce365ddb1e0e1c0d744913b
|
add a basic reader
|
reader.py
|
reader.py
|
Python
| 0.000022
|
@@ -0,0 +1,126 @@
+import feedparser%0A%0Arss_url = %22http://towerjoo.github.io/feed.xml%22%0Afeed= feedparser.parse(rss_url)%0Aimport pdb;pdb.set_trace()%0A%0A
|
|
7a75185e7a7e7f5b3a1c78a21a6b75b24da1911a
|
Update __init__.py
|
tendrl/commons/flows/create_cluster/__init__.py
|
tendrl/commons/flows/create_cluster/__init__.py
|
# flake8: noqa
import json
import uuid
from tendrl.commons import flows
from tendrl.commons.event import Event
from tendrl.commons.message import Message
from tendrl.commons.flows import utils
from tendrl.commons.flows.create_cluster import ceph_help
from tendrl.commons.flows.create_cluster import gluster_help
from tendrl.commons.flows.import_cluster.ceph_help import import_ceph
from tendrl.commons.flows.import_cluster.gluster_help import import_gluster
from tendrl.commons.objects.job import Job
class CreateCluster(flows.BaseFlow):
def run(self):
integration_id = self.parameters['TendrlContext.integration_id']
NS.tendrl_context = NS.tendrl_context.load()
NS.tendrl_context.integration_id = integration_id
NS.tendrl_context.save()
ssh_job_ids = []
if self.parameters['sds_type'] == "ceph":
ssh_job_ids = utils.ceph_create_ssh_setup_jobs(self.parameters)
else:
ssh_job_ids = utils.gluster_create_ssh_setup_jobs(self.parameters)
all_ssh_jobs_done = False
while not all_ssh_jobs_done:
all_status = []
for job_id in ssh_job_ids:
all_status.append(NS.etcd_orm.client.read("/queue/%s/status" %
job_id).value)
if all(status == "finished" for status in all_status):
all_ssh_jobs_done = True
# SSH setup jobs finished above, now install sds bits and create cluster
if self.parameters['sds_type'] == "ceph":
ceph_help.create_ceph(self.parameters)
else:
gluster_help.create_gluster(self.parameters)
# Start jobs for importing cluster
node_list = self.parameters['Node[]']
try:
node_list.remove(NS.node_context.node_id)
except ValueError:
# key not found. ignore
pass
new_params = self.parameters.copy()
new_params['Node[]'] = node_list
# Get node context for one of the nodes from list
sds_pkg_name = NS.etcd_orm.client.read(
"nodes/%s/DetectedCluster/sds_pkg_name" % node_list[0]
).value
sds_pkg_version = NS.etcd_orm.client.read(
"nodes/%s/DetectedCluster/sds_pkg_version" % node_list[0]
).value
new_params['DetectedCluster.sds_pkg_name'] = \
sds_pkg_name
new_params['DetectedCluster.sds_pkg_version'] = \
sds_pkg_version
payload = {"integration_id": integration_id,
"node_ids": node_list,
"run": "tendrl.flows.ImportCluster",
"status": "new",
"parameters": new_params,
"parent": self.parameters['job_id'],
"type": "node"
}
Job(job_id=str(uuid.uuid4()),
status="new",
payload=json.dumps(payload)).save()
|
Python
| 0.000072
|
@@ -799,32 +799,42 @@
= %5B%5D%0A if
+%22ceph%22 in
self.parameters%5B
@@ -833,37 +833,41 @@
ameters%5B
-'sds_type'%5D == %22ceph%22
+%22TendrlContext.sds_name%22%5D
:%0A
@@ -1514,16 +1514,26 @@
if
+%22ceph%22 in
self.par
@@ -1544,29 +1544,33 @@
ers%5B
-'sds_type'%5D == %22ceph%22
+%22TendrlContext.sds_name%22%5D
:%0A
|
33ac2d94afb48e1ddeeefbe3f840c5f66a82fd98
|
call get_object_or_404 outside our try block
|
memopol2/main/views.py
|
memopol2/main/views.py
|
import time
from datetime import datetime
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.utils import simplejson
from django.core import serializers
from django.contrib.admin.views.decorators import staff_member_required
from couchdbkit import Server
from memopol2.main.models import Mep, Position
from memopol2 import settings
from memopol2.util import get_couch_doc_or_404
def index_names(request):
couch = Server(settings.COUCHDB)
map_fun = """
function(d) {
emit(null, {first: d.infos.name.first, last: d.infos.name.last, group: d.infos.group.abbreviation});
}
"""
couch_meps = couch["meps"]
meps_list = couch_meps.temp_view({"map": map_fun})
meps_list.fetch()
meps_list = meps_list.all()
return render_to_response('index.html', {'meps_list': meps_list}, context_instance=RequestContext(request))
def index_groups(request):
couch = Server(settings.COUCHDB)
map_fun = """
function(d) {
emit(d.infos.group.abbreviation, { name: d.infos.group.name, count: 1 });
}
"""
reduce_fun = """function(keys, values) {
var sum = 0;
for (var idx in values)
{
sum += values[idx].count;
}
return {name: values[0].name , count: sum};
}"""
couch_meps = couch["meps"]
groups = couch_meps.temp_view({"map": map_fun, "reduce": reduce_fun}, group="true")
groups.fetch()
groups = groups.all()
return render_to_response('index.html', {'groups': groups}, context_instance=RequestContext(request))
def index_countries(request):
couch = Server(settings.COUCHDB)
map_fun = """
function(d) {
emit(d.infos.constituency.country.name, { code: d.infos.constituency.country.code, count: 1 });
}
"""
reduce_fun = """function(keys, values) {
var sum = 0;
for (var idx in values)
{
sum += values[idx].count;
}
return {code: values[0].code, count: sum};
}"""
couch_meps = couch["meps"]
req = couch_meps.temp_view({"map": map_fun, "reduce": reduce_fun }, group=True)
req.fetch()
countries = req.all()
return render_to_response('index.html', {'countries': countries}, context_instance=RequestContext(request))
def index_by_country(request, country_code):
country_code = country_code.upper()
couch = Server(settings.COUCHDB)
map_fun = """
function(d) {
if (d.infos.constituency.country.code)
{
emit(d.infos.constituency.country.code, {first: d.infos.name.first, last: d.infos.name.last, group: d.infos.group.abbreviation});
}
}
"""
couch_meps = couch["meps"]
req = couch_meps.temp_view({"map": map_fun}, key=country_code)
req.fetch()
meps_list = req.all()
return render_to_response('index.html', {'meps_list': meps_list}, context_instance=RequestContext(request))
def index_by_group(request, group):
couch = Server(settings.COUCHDB)
map_fun = """
function(d) {
if (d.infos.group.abbreviation)
{
emit(d.infos.group.abbreviation, {first: d.infos.name.first, last: d.infos.name.last, group: d.infos.group.abbreviation});
}
}
"""
couch_meps = couch["meps"]
meps_list = couch_meps.temp_view({"map": map_fun}, key=group)
meps_list.fetch()
meps_list = meps_list.all()
return render_to_response('index.html', {'meps_list': meps_list}, context_instance=RequestContext(request))
def mep(request, mep_id):
data = get_couch_doc_or_404(Mep, mep_id)
ctx = {'mep_id': mep_id, 'mep': mep, 'd': data }
ctx['positions'] = Position.objects.filter(mep_id=mep_id)
ctx['visible_count'] = len([ x for x in ctx['positions'] if x.visible ])
return render_to_response('mep.html', ctx, context_instance=RequestContext(request))
def mep_raw(request, mep_id):
mep_ = get_couch_doc_or_404(Mep, mep_id)
jsonstr = simplejson.dumps(mep_, indent=4)
ctx = {'mep_id': mep_id, 'mep': mep_, 'jsonstr': jsonstr}
return render_to_response('mep_raw.html', ctx, context_instance=RequestContext(request))
def mep_addposition(request, mep_id):
if not request.is_ajax():
return HttpResponseServerError()
results = {'success':False}
# make sure the mep exists
mep_ = get_couch_doc_or_404(Mep, mep_id)
try:
text = request.GET[u'text']
if settings.DEBUG:
if 'slow' in text:
time.sleep(10)
if 'fail' in text:
raise TestFailure()
pos = Position(mep_id=mep_id, content=text)
pos.submitter_username = request.user.username
pos.submitter_ip = request.META["REMOTE_ADDR"]
pos.submit_datetime = datetime.today()
pos.moderated = False
pos.visible = False
pos.save()
results = {'success':True}
except:
pass
return HttpResponse(simplejson.dumps(results), mimetype='application/json')
@staff_member_required
def moderation(request):
ctx = {}
ctx['positions'] = Position.objects.filter(moderated=False)
return render_to_response('moderation.html', ctx, context_instance=RequestContext(request))
@staff_member_required
def moderation_get_unmoderated_positions(request):
if not request.is_ajax():
return HttpResponseServerError()
last_id = request.GET[u'last_id']
positions = Position.objects.filter(moderated=False, id__gt=last_id)
return HttpResponse(serializers.serialize('json', positions), mimetype='application/json')
@staff_member_required
def moderation_moderate_positions(request):
if not request.is_ajax():
return HttpResponseServerError()
results = {'success':False}
try:
position = get_object_or_404(Position, pk=int(request.GET[u'pos_id']))
position.moderated = True
position.visible = (request.GET[u'decision'] == "1")
position.save()
results = {'success':True}
except:
pass
return HttpResponse(simplejson.dumps(results), mimetype='application/json')
|
Python
| 0
|
@@ -5838,29 +5838,16 @@
:False%7D%0A
- try:%0A
posi
@@ -5913,16 +5913,25 @@
_id'%5D))%0A
+ try:%0A
|
374f516be38e9630ff1ff6cda4146d0ebd2a9537
|
remove model
|
corehq/apps/sms/migrations/0048_delete_sqlicdsbackend.py
|
corehq/apps/sms/migrations/0048_delete_sqlicdsbackend.py
|
Python
| 0.000001
|
@@ -0,0 +1,301 @@
+# Generated by Django 2.2.13 on 2020-10-28 09:55%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('sms', '0047_merge_20200918_1641'),%0A %5D%0A%0A operations = %5B%0A migrations.DeleteModel(%0A name='SQLICDSBackend',%0A ),%0A %5D%0A
|
|
e41b79855e966977c4484efd4ad6a02475833b3e
|
Add ex4.4: tornado multiple requests with asyncio integration
|
code/ex4.4-tornado_with_asyncio.py
|
code/ex4.4-tornado_with_asyncio.py
|
Python
| 0.000001
|
@@ -0,0 +1,616 @@
+from tornado.platform.asyncio import AsyncIOMainLoop, to_asyncio_future%0Afrom tornado.httpclient import AsyncHTTPClient%0Aimport asyncio%0Aimport time%0A%0A%0AURL = 'http://127.0.0.1:8000'%0A%0A%0A@asyncio.coroutine%0Adef get_greetings():%0A http_client = AsyncHTTPClient()%0A response = yield from to_asyncio_future(http_client.fetch(URL))%0A return response.body.decode('utf-8')%0A%0A%0Aif __name__ == %22__main__%22:%0A AsyncIOMainLoop().install()%0A loop = asyncio.get_event_loop()%0A t1 = time.time()%0A texts = loop.run_until_complete(get_greetings())%0A print(time.time() - t1, %22seconds passed%22)%0A print(texts)%0A loop.close()%0A
|
|
a8e66380cb63e52ad57f66cb9e1a652dca5b32b9
|
Create __init__.py
|
puppet/__init__.py
|
puppet/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
e81426b1f7890c056f926281c5a445bc6e74c80b
|
Create py-参数传递.py
|
py-参数传递.py
|
py-参数传递.py
|
Python
| 0.000005
|
@@ -0,0 +1,144 @@
+# %E5%8C%85%E8%A3%B9%E5%85%B3%E9%94%AE%E5%AD%97%E4%BC%A0%E9%80%92 dic%E6%98%AF%E4%B8%80%E4%B8%AA%E5%AD%97%E5%85%B8 %E6%94%B6%E9%9B%86%E6%89%80%E6%9C%89%E7%9A%84%E5%85%B3%E9%94%AE%E5%AD%97%E4%BC%A0%E9%80%92%E7%BB%99%E5%87%BD%E6%95%B0func_t%0Adef func_t(**dic):%0A print type(dic)%0A print dic%0A%0Aprint func_t(a=1, b=2)%0Aprint func_t(a=3, b=4, c=5)%0A
|
|
4f265b626c9ff5c333ea6c27cb08b45c2cecc7f3
|
Add plugin code
|
gitcommitautosave.py
|
gitcommitautosave.py
|
Python
| 0.000001
|
@@ -0,0 +1,554 @@
+%22%22%22Git Commit Auto Save.%0A%0ASublime Text 3 package to auto save commit messages when the window is closed.%0AThis allows the user to close the window without having to save before,%0Aor having to deal with the %22Save File%22 popup.%0A%22%22%22%0Aimport sublime_plugin%0A%0A%0Aclass GitCommitAutoSave(sublime_plugin.EventListener):%0A%09def on_load(self, view):%0A%09%09if view.file_name().endswith('COMMIT_EDITMSG'):%0A%09%09%09view.set_scratch(True) # disable save file dialog on exit%0A%0A%09def on_pre_close(self, view):%0A%09%09if view.file_name().endswith('COMMIT_EDITMSG'):%0A%09%09%09view.run_command(%22save%22)%0A
|
|
ac482caafe8c63de2606bb4894462f7b2e2bcb70
|
Add initial script to print rosbag files
|
python/printbag.py
|
python/printbag.py
|
Python
| 0
|
@@ -0,0 +1,1078 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22Convert a rosbag file to legacy lidar binary format.%0A%22%22%22%0A%0A%22%22%22LIDAR datatype format is:%0A (%0A timestamp (long),%0A flag (bool saved as int),%0A accelerometer%5B3%5D (double),%0A gps%5B3%5D (double),%0A distance%5BLIDAR_NUM_ANGLES%5D (long),%0A )%0A%0A 'int' and 'long' are the same size on the raspberry pi (32 bits).%0A%22%22%22%0Aimport sys%0Aimport rosbag%0A%0Adef decode_bag(bag):%0A topics = %5B'/scan', '/flagbutton_pressed'%5D%0A return %5Bmessage for message in bag.read_messages(topics=topics)%5D%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) %3C 2:%0A print(('Usage: %7B%7D %3Crosbag%3E %5B%3Coutfile%3E%5D %5Cn%5Cn'%0A 'Print contents of rosbag file. If %3Coutfile%3E is provided, %5Cn'%0A 'write contents of rosbag file to %3Coutfile%3E in the legacy %5Cn'%0A 'lidar binary format.').format(__file__))%0A sys.exit(1)%0A%0A outfile = None%0A filename = sys.argv%5B1%5D%0A%0A if len(sys.argv) == 3:%0A outfile = sys.argv%5B2%5D%0A%0A with rosbag.Bag(filename) as bag:%0A print(decode_bag(bag))%0A%0A sys.exit()%0A%0A
|
|
5cf2c2c4dcbc9e0cca57a7634e5118c2dc278c75
|
Add media compatibility
|
twilio/rest/resources/compatibility/media.py
|
twilio/rest/resources/compatibility/media.py
|
Python
| 0
|
@@ -0,0 +1,305 @@
+from twilio.rest.resources import InstanceResource, ListResource%0A%0A%0Aclass Media(InstanceResource):%0A pass%0A%0A%0Aclass MediaList(ListResource):%0A%0A def __call__(self, message_sid):%0A base_uri = %22%25s/Messages/%25s%22 %25 (self.base_uri, message_sid)%0A return MediaList(base_uri, self.auth, self.timeout)%0A
|
|
a2e566cc0b925f80c30602141e890cdf9b13306b
|
Migrate to latest version of db.
|
migrations/versions/1003fd6fc47_.py
|
migrations/versions/1003fd6fc47_.py
|
Python
| 0
|
@@ -0,0 +1,958 @@
+%22%22%22empty message%0A%0ARevision ID: 1003fd6fc47%0ARevises: 1a54c4cacbe%0ACreate Date: 2015-03-24 13:33:50.898511%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '1003fd6fc47'%0Adown_revision = '1a54c4cacbe'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('project', sa.Column('git_url', sa.String(length=400), nullable=True))%0A op.drop_column('project', 'github_url')%0A op.drop_column('project', 'age')%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('project', sa.Column('age', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))%0A op.add_column('project', sa.Column('github_url', sa.VARCHAR(length=400), autoincrement=False, nullable=True))%0A op.drop_column('project', 'git_url')%0A ### end Alembic commands ###%0A
|
|
4aced6fea8ff8ccd087362cb237a9f00d111d0d8
|
Add command to turn on locations flag
|
corehq/apps/commtrack/management/commands/toggle_locations.py
|
corehq/apps/commtrack/management/commands/toggle_locations.py
|
Python
| 0.000001
|
@@ -0,0 +1,976 @@
+from django.core.management.base import BaseCommand%0Afrom corehq.apps.domain.models import Domain%0Afrom corehq.feature_previews import LOCATIONS%0Afrom corehq.toggles import NAMESPACE_DOMAIN%0Afrom toggle.shortcuts import update_toggle_cache, namespaced_item%0Afrom toggle.models import Toggle%0A%0A%0Aclass Command(BaseCommand):%0A def handle(self, *args, **options):%0A domains = Domain.get_all()%0A%0A for domain in domains:%0A if domain.commtrack_enabled:%0A toggle = Toggle.get(LOCATIONS.slug)%0A toggle_user_key = namespaced_item(domain.name, NAMESPACE_DOMAIN)%0A%0A if toggle_user_key not in toggle.enabled_users:%0A toggle.enabled_users.append(toggle_user_key)%0A toggle.save()%0A update_toggle_cache(LOCATIONS.slug, toggle_user_key, True)%0A%0A if not domain.locations_enabled:%0A domain.locations_enabled = True%0A domain.save()%0A
|
|
1eaab9f929dc748e57865fb4c8717158e6c47fa5
|
Add more index on contact activities
|
ureport/stats/migrations/0018_better_indexes.py
|
ureport/stats/migrations/0018_better_indexes.py
|
Python
| 0
|
@@ -0,0 +1,530 @@
+# Generated by Django 3.2.6 on 2021-10-13 12:37%0A%0Afrom django.db import migrations%0A%0A# language=SQL%0AINDEX_SQL_CONTACTACTIVITY_ORG_DATE_SCHEME_NOT_NULL = %22%22%22%0ACREATE INDEX IF NOT EXISTS stats_contactactivity_org_id_date_scheme_not_null on stats_contactactivity (org_id, date, scheme) WHERE scheme IS NOT NULL;%0A%22%22%22%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22stats%22, %220017_better_indexes%22),%0A %5D%0A%0A operations = %5B%0A migrations.RunSQL(INDEX_SQL_CONTACTACTIVITY_ORG_DATE_SCHEME_NOT_NULL, %22%22),%0A %5D%0A
|
|
9e947d389e68ff7d2653681c065bf55544874ba6
|
move progress_bar.finish to after pool.join to avoid ipython zmq problems
|
qutip/parallel.py
|
qutip/parallel.py
|
# This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
__all__ = ['parfor', 'parallel_map', 'serial_map']
from scipy import array
from multiprocessing import Pool
from functools import partial
import os
import sys
import signal
import qutip.settings as qset
from qutip.ui.progressbar import BaseProgressBar, TextProgressBar
def _task_wrapper(args):
try:
return args[0](*args[1])
except KeyboardInterrupt:
os.kill(args[2], signal.SIGINT)
sys.exit(1)
def _task_wrapper_with_args(args, user_args):
try:
return args[0](*args[1], **user_args)
except KeyboardInterrupt:
os.kill(args[2], signal.SIGINT)
sys.exit(1)
def parfor(func, *args, **kwargs):
"""Executes a multi-variable function in parallel on the local machine.
Parallel execution of a for-loop over function `func` for multiple input
arguments and keyword arguments.
Parameters
----------
func : function_type
A function to run in parallel on the local machine. The function 'func'
accepts a series of arguments that are passed to the function as
variables. In general, the function can have multiple input variables,
and these arguments must be passed in the same order as they are
defined in the function definition. In addition, the user can pass
multiple keyword arguments to the function.
The following keyword argument is reserved:
num_cpus : int
Number of CPU's to use. Default uses maximum number of CPU's.
Performance degrades if num_cpus is larger than the physical CPU
count of your machine.
Returns
-------
result : list
A ``list`` with length equal to number of input parameters
containing the output from `func`.
"""
kw = _default_kwargs()
if 'num_cpus' in kwargs.keys():
kw['num_cpus'] = kwargs['num_cpus']
del kwargs['num_cpus']
if len(kwargs) != 0:
task_func = partial(_task_wrapper_with_args, user_args=kwargs)
else:
task_func = _task_wrapper
if kw['num_cpus'] > qset.num_cpus:
print("Requested number of CPUs (%s) " % kw['num_cpus'] +
"is larger than physical number (%s)." % qset.num_cpus)
print("Reduce 'num_cpus' for greater performance.")
pool = Pool(processes=kw['num_cpus'])
args = [list(arg) for arg in args]
var = [[args[j][i] for j in range(len(args))]
for i in range(len(list(args[0])))]
try:
map_args = ((func, v, os.getpid()) for v in var)
par_return = list(pool.map(task_func, map_args))
pool.close()
pool.join()
if isinstance(par_return[0], tuple):
par_return = [elem for elem in par_return]
num_elems = len(par_return[0])
dt = [type(ii) for ii in par_return[0]]
return [array([elem[ii] for elem in par_return], dtype=dt[ii])
for ii in range(num_elems)]
else:
return list(par_return)
except KeyboardInterrupt:
pool.terminate()
def serial_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):
"""
Serial mapping function with the same call signature as parallel_map, for
easy switching between serial and parallel execution.
Parameters
----------
task: a Python function
The function that is to be called for each value in ``task_vec``.
values: array / list
The list or array of values for which the ``task`` function is to be
evaluated.
task_args: list / dictionary
The optional additional argument to the ``task`` function.
task_kwargs: list / dictionary
The optional additional keyword argument to the ``task`` function.
progress_bar: ProgressBar
Progress bar class instance for showing progress.
Returns
--------
result : list
The result list contains the value of
``task(value, task_args, task_kwargs)`` for each
value in ``values``.
"""
try:
progress_bar = kwargs['progress_bar']
if progress_bar is True:
progress_bar = TextProgressBar()
except:
progress_bar = BaseProgressBar()
progress_bar.start(len(values))
results = []
for n, value in enumerate(values):
progress_bar.update(n)
result = task(n, *task_args, **task_kwargs)
results.append(result)
progress_bar.finished()
return results
def parallel_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):
"""
Parallel execution of a mapping of `values` to the function `task`.
Parameters
----------
task: a Python function
The function that is to be called for each value in ``task_vec``.
values: array / list
The list or array of values for which the ``task`` function is to be
evaluated.
task_args: list / dictionary
The optional additional argument to the ``task`` function.
task_kwargs: list / dictionary
The optional additional keyword argument to the ``task`` function.
progress_bar: ProgressBar
Progress bar class instance for showing progress.
Returns
--------
result : list
The result list contains the value of
``task(value, task_args, task_kwargs)`` for each
value in ``values``.
"""
kw = _default_kwargs()
if 'num_cpus' in kwargs:
kw['num_cpus'] = kwargs['num_cpus']
try:
progress_bar = kwargs['progress_bar']
if progress_bar is True:
progress_bar = TextProgressBar()
except:
progress_bar = BaseProgressBar()
progress_bar.start(len(values))
nfinished = [0]
def _update_progress_bar(x):
nfinished[0] += 1
progress_bar.update(nfinished[0])
pool = Pool(processes=kw['num_cpus'])
async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs,
_update_progress_bar)
for value in values]
while not all([ar.ready() for ar in async_res]):
for ar in async_res:
ar.wait(timeout=0.1)
progress_bar.finished()
pool.close()
pool.join()
return [ar.get() for ar in async_res]
def _default_kwargs():
settings = {'num_cpus': qset.num_cpus}
return settings
|
Python
| 0
|
@@ -7927,66 +7927,61 @@
p
-rogress_bar.finished()%0A %0A pool.close()%0A pool.join
+ool.close()%0A pool.join()%0A progress_bar.finished
()%0A%0A
|
9becada645e9680974dbb18fee10983d204dfd3d
|
Create low-res cubes, masks, and moment arrays
|
14B-088/HI/analysis/cube_pipeline_lowres.py
|
14B-088/HI/analysis/cube_pipeline_lowres.py
|
Python
| 0.000001
|
@@ -0,0 +1,2811 @@
+%0A'''%0AConvolve the VLA + GBT data to 2 * beam and 5 * beam, then run the%0Amasking and moments pipeline.%0A%0AMake signal masks and compute the moments.%0A'''%0A%0Afrom astropy import log%0Aimport os%0Afrom radio_beam import Beam%0Afrom spectral_cube import SpectralCube%0Afrom cube_analysis import run_pipeline%0A%0Afrom paths import (fourteenB_wGBT_HI_file_dict, fourteenB_HI_data_wGBT_path)%0A%0Afile_path = fourteenB_HI_data_wGBT_path(%22smooth_2beam%22, no_check=True)%0Aif not os.path.exists(file_path):%0A os.mkdir(file_path)%0A%0Acube = SpectralCube.read(fourteenB_wGBT_HI_file_dict%5B%22Cube%22%5D)%0A# Convolve to 2 * beam. May as well make it circular.%0Abeam2 = Beam(2 * cube.beams.largest_beam().major)%0A%0Aconv_cube = cube.convolve_to(beam2)%0A%0Afile_name = os.path.join(file_path, %22M33_14B-088_HI.clean.image.GBT_feathered.38arcsec.fits%22)%0Aconv_cube.write(file_name)%0A%0Adel conv_cube%0Adel cube%0A%0Alog.info(%22Masking and moments for the VLA+GBT 2 * beam cube%22)%0Arun_pipeline(file_name,%0A file_path,%0A masking_kwargs=%7B%22method%22: %22ppv_connectivity%22,%0A %22save_cube%22: True,%0A %22is_huge%22: True,%0A %22noise_map%22: None,%0A %22smooth_chans%22: 31,%0A %22min_chan%22: 10,%0A %22peak_snr%22: 5.,%0A %22min_snr%22: 2,%0A %22edge_thresh%22: 1,%0A %22verbose%22: False,%0A %22show_plots%22: False,%0A %7D,%0A moment_kwargs=%7B%22num_cores%22: 6,%0A %22verbose%22: True%7D)%0A%0Aprint(%22Running 5 beam convolution and masking.%22)%0A%0Afile_path = fourteenB_HI_data_wGBT_path(%22smooth_5beam%22, no_check=True)%0Aif not os.path.exists(file_path):%0A os.mkdir(file_path)%0A%0Acube = SpectralCube.read(fourteenB_wGBT_HI_file_dict%5B%22Cube%22%5D)%0A%0Abeam5 = Beam(5 * cube.beams.largest_beam().major)%0A%0Aconv_cube = cube.convolve_to(beam5)%0A%0Afile_name = os.path.join(file_path, %22M33_14B-088_HI.clean.image.GBT_feathered.95arcsec.fits%22)%0Aconv_cube.write(file_name)%0A%0Adel conv_cube%0Adel cube%0A%0A# VLA+GBT cube%0Alog.info(%22Masking and moments for the VLA+GBT 5 * beam cube%22)%0Arun_pipeline(file_name,%0A file_path,%0A masking_kwargs=%7B%22method%22: %22ppv_connectivity%22,%0A %22save_cube%22: True,%0A %22is_huge%22: True,%0A %22noise_map%22: None,%0A %22smooth_chans%22: 31,%0A %22min_chan%22: 10,%0A %22peak_snr%22: 5.,%0A %22min_snr%22: 2,%0A %22edge_thresh%22: 1,%0A %7D,%0A moment_kwargs=%7B%22num_cores%22: 1,%0A %22verbose%22: True,%0A %22chunk_size%22: 2e5%7D)%0A
|
|
d0a053acf6773c24b5fce2ec1ac56a5800ca1a28
|
Add discord return types to VoiceStateUpdate props
|
musicbot/constructs.py
|
musicbot/constructs.py
|
import discord
from .utils import objdiff
class SkipState:
def __init__(self):
self.skippers = set()
self.skip_msgs = set()
@property
def skip_count(self):
return len(self.skippers)
def reset(self):
self.skippers.clear()
self.skip_msgs.clear()
def add_skipper(self, skipper, msg):
self.skippers.add(skipper)
self.skip_msgs.add(msg)
return self.skip_count
class Response:
def __init__(self, content, reply=False, delete_after=0):
self.content = content
self.reply = reply
self.delete_after = delete_after
# Alright this is going to take some actual thinking through
class AnimatedResponse(Response):
def __init__(self, content, *sequence, delete_after=0):
super().__init__(content, delete_after=delete_after)
class VoiceStateUpdate:
def __init__(self, before: discord.Member, after: discord.Member):
self.before = before
self.after = after
self.broken = False
if not all([before, after]):
self.broken = True
return
self.old_voice_channel = before.voice_channel
self.new_voice_channel = after.voice_channel
if before.voice_channel == self.voice_channel:
self.joining = False
elif after.voice_channel == self.voice_channel:
self.joining = True
else:
self.joining = None
@property
def me(self):
return self.after.server.me
@property
def is_about_me(self):
return self.after == self.me
@property
def my_voice_channel(self):
return self.me.voice_channel
@property
def voice_channel(self):
return self.new_voice_channel or self.old_voice_channel
@property
def moving(self):
# I had this as "... == before.server.me" and i'm not sure why, hope it wasn't important
return self.before == self.me
def empty(self, *, excluding_me=False, excluding_deaf=False):
def check(member):
if excluding_me and member == self.me:
return False
if excluding_deaf and any([member.deaf, member.self_deaf]):
return False
return True
return not sum(1 for m in self.voice_channel.voice_members if check(m))
@property
def change(self):
return objdiff(self.before.voice, self.after.voice, access_attr='__slots__')
|
Python
| 0
|
@@ -1458,32 +1458,50 @@
def me(self)
+ -%3E discord.Member
:%0A return
@@ -1638,32 +1638,51 @@
ce_channel(self)
+ -%3E discord.Channel
:%0A return
@@ -1742,24 +1742,43 @@
hannel(self)
+ -%3E discord.Channel
:%0A re
|
8fbd7421e9517ead4293c62086f3305810c93b1b
|
Add initial manage/fabfile/ci.py (sketch)
|
manage/fabfile/ci.py
|
manage/fabfile/ci.py
|
Python
| 0
|
@@ -0,0 +1,481 @@
+from fabric.api import *%0A%0A@task%0A@role('ci')%0Adef install():%0A sudo(%22apt-get install git%22)%0A sudo(%22apt-get install maven2%22) # TODO: maven3%0A sudo(%22apt-get install groovy%22) # TODO: groovy-1.8, or gradle...%0A configure_groovy_grapes()%0A sudo(%22apt-get install python-dev%22)%0A sudo(%22apt-get install python-pip%22)%0A sudo(%22pip install fabric%22)%0A%0Adef configure_groovy_grapes():%0A run(%22mkdir -p ~/.groovy/%22)%0A # TODO:%0A #put(%22grapeConfig.xml%22, %22~/.groovy/grapeConfig.xml%22)%0A%0A
|
|
95d87c541ebf82109b882daebcb5b387f0f1cdb8
|
Read the american physics society graph
|
exp/influence2/ReputationExp2.py
|
exp/influence2/ReputationExp2.py
|
Python
| 0.000047
|
@@ -0,0 +1,2522 @@
+import numpy %0Atry: %0A ctypes.cdll.LoadLibrary(%22/usr/local/lib/libigraph.so%22)%0Aexcept: %0A pass %0Aimport igraph %0Afrom apgl.util.PathDefaults import PathDefaults %0Afrom exp.util.IdIndexer import IdIndexer %0Aimport xml.etree.ElementTree as ET%0Aimport array %0A%0AmetadataDir = PathDefaults.getDataDir() + %22aps/aps-dataset-metadata-2010/%22%0AmetadataFilename = metadataDir + %22PRSTAB.xml%22%0A%0AcitationsDir = PathDefaults.getDataDir() + %22aps/aps-dataset-citations-2010/%22%0AcitatonsFilename = citationsDir + %22citing_cited.csv%22%0A%0Atree = ET.parse(metadataFilename)%0Aroot = tree.getroot()%0A%0AauthorIndexer = IdIndexer(%22i%22)%0AarticleIndexer = IdIndexer(%22i%22)%0A%0Afor child in root: %0A authorGroups = child.findall('authgrp') %0A %0A for authorGroup in authorGroups: %0A authors = authorGroup.findall(%22author%22) %0A %0A for author in authors: %0A if author.find(%22givenname%22) != None: %0A fullname = author.find(%22givenname%22).text%0A else: %0A fullname = %22%22%0A %0A for middlename in author.findall(%22middlename%22): %0A fullname += %22 %22 + middlename.text%0A %0A fullname += %22 %22 + author.find(%22surname%22).text%0A %0A authorId = fullname%0A articleId = child.attrib%5B%22doi%22%5D%0A %0A authorIndexer.append(authorId) %0A articleIndexer.append(articleId)%0A %0AauthorInds = authorIndexer.getArray()%0AarticleInds = articleIndexer.getArray()%0A%0A#We now need to read the citations file and add those edges %0Aarticle1Inds = array.array(%22i%22) %0Aarticle2Inds = array.array(%22i%22)%0A%0AcitationsFile = open(citatonsFilename)%0AcitationsFile.readline()%0A%0Afor line in citationsFile: %0A vals = line.split(%22,%22)%0A articleId1 = vals%5B0%5D.strip()%0A articleId2 = vals%5B1%5D.strip()%0A %0A #print(articleId1, articleId2)%0A %0A articleIdDict = articleIndexer.getIdDict()%0A %0A if articleId1 in articleIdDict and articleId2 in articleIdDict: %0A article1Inds.append(articleIdDict%5BarticleId1%5D)%0A article2Inds.append(articleIdDict%5BarticleId2%5D)%0A%0Aarticle1Inds = numpy.array(article1Inds)%0Aarticle2Inds = numpy.array(article2Inds)%0A%0AauthorArticleEdges = numpy.c_%5BauthorInds, articleInds%5D%0Aprint(authorArticleEdges)%0A%0AarticleArticleEdges = numpy.c_%5Barticle1Inds, article2Inds%5D%0Aprint(articleArticleEdges)%0A%0Aprint(articleArticleEdges.shape)%0A %0Agraph = igraph.Graph()%0Agraph.add_vertices(numpy.max(authorInds) + numpy.max(articleInds))%0Agraph.add_edges(authorArticleEdges)%0A%0Aprint(graph.summary()) %0A
|
|
8f391cfd541f68a3c4bfc20be68c32d4e2d6798f
|
Add server script
|
server.py
|
server.py
|
Python
| 0.000001
|
@@ -0,0 +1,1167 @@
+#!/usr/bin/python3%0A%0A# import the necessary components%0Afrom flask import Flask, request, jsonify%0Aapp = Flask(__name__)%0A%0A# define a dictionary to store our information in%0Ainfo = %7B%7D%0A%0A# listen for data at /data%0A@app.route(%22/data%22, methods=%5B%22GET%22, %22POST%22%5D)%0Adef api():%0A%09# convert the data to a dict%0A%09data = request.get_json(silent=True)%0A%0A%09# pretty debug%0A%09print(%22%25s sent: %25s%22 %25 (data%5B'uuid'%5D, data%5B'stats'%5D))%0A%0A%09# replace the old data with the new data%0A%09info%5Bdata%5B'uuid'%5D%5D = %7B'cpu': data%5B'stats'%5D%5B'cpu'%5D, 'mem': data%5B'stats'%5D%5B'memory'%5D%7D%0A%0A%09# keep count of the CPU and memory across all devices%0A%09totalCpu = 0%0A%09totalMem = 0%0A%0A%09# loop through all devices%0A%09for key, stats in info.items():%0A%09%09# add the CPU and memory to their respective counters%0A%09%09totalCpu += stats%5B'cpu'%5D%0A%09%09totalMem += stats%5B'mem'%5D%0A%0A%09# calculate means%0A%09meanCpu = totalCpu / len(info)%0A%09meanMem = totalMem / len(info)%0A%0A%09# more nice debugging%0A%09print(%22Mean CPU: %25s%22 %25 meanCpu)%0A%09print(%22Mean memory: %25s%22 %25 meanMem)%0A%0A%09# tell the client that all is well%0A%09return(jsonify(%7B%22message%22: %22ok%22%7D))%0A%0A# if file is called directly...%0Aif __name__ == %22__main__%22:%0A%09# ...start the server on port 8742%0A%09app.run(port=8742, host=%220.0.0.0%22)%0A
|
|
a49d1d96b49eb6006e864bbaf2757cd5358b0110
|
Create func.py
|
func.py
|
func.py
|
Python
| 0.000037
|
@@ -0,0 +1,392 @@
+#it's fun, c?%0Adef read_poi(file):%0A poi_dict = %7B%7D%0A%0A #taken from project2%0A for line in file:%0A line = line.rstrip()%0A if len(line) == 0:%0A continue%0A parts = line.split(' ', 2)%0A print(parts%5B0%5D,parts%5B1%5D,parts%5B2%5D)%0A%0A poi_dict%5Bparts%5B2%5D%5D = parts%5B0%5D,parts%5B1%5D%0A return poi_dict%0A%0Aif __name__ == %22__main__%22:%0A read_poi(open(%22static/data/poi.txt%22))%0A
|
|
4810c88d484bc02fe5f7983dbf9cac0be5a440cd
|
Create reverse_word_order.py
|
09-revisao/practice_python/reverse_word_order.py
|
09-revisao/practice_python/reverse_word_order.py
|
Python
| 0.998843
|
@@ -0,0 +1,611 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%22%22%22Exercise 15: Reverse Word Order%0A%0AWrite a program (using functions!) that asks the user for a long string%0Acontaining multiple words. Print back to the user the same string,%0Aexcept with the words in backwards order. For example, say I type the%0Astring:%0A%0A My name is Michele%0A Then I would see the string:%0A%0A Michele is name My%0A shown back to me.%0A%22%22%22%0A%0A%0Adef reverse_word_order(word: str):%0A return %22 %22.join(word.split(%22 %22)%5B::-1%5D)%0A%0A%0Along_string = input(%22Write a long string: %22)%0Aprint(%22The string in backwards order: %25s%22 %25 reverse_word_order(long_string))%0A
|
|
c460874436ee087a50f9f7ec06c15ae9a110a656
|
Initialize web spider class definition & imports
|
spider.py
|
spider.py
|
Python
| 0
|
@@ -0,0 +1,156 @@
+from scrapy.contrib.spiders import CrawlSpider, Rule%0Afrom scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor%0Afrom scrapy.selector import Selector%0A%0A
|
|
3f6e3d60588dec447fffbfc7e5fc65f34cbd3aa7
|
fix bug in version.py subprocess call
|
cobra/version.py
|
cobra/version.py
|
#!/usr/bin/env python
"""
Tracks the version number. If git is installed and file script
is located within a git repository, git describe is used to get
the version information. This version string is sanitized to
comply with PEP 386 and stored in the RELEASE-VERSION file.
If git describe can not be run, the RELEASE-VERSION file is used
for version information instead.
"""
__all__ = ("get_git_version")
from subprocess import check_output
from os import path
current_dir = path.dirname(path.abspath(__file__))
version_file = path.join(current_dir, "RELEASE-VERSION")
def call_git_describe(abbrev=4):
try:
return check_output(["git", "describe", "--tags",
"--abbrev=%d" % abbrev], dir=current_dir).strip()
except:
return None
def read_release_version():
try:
with open(version_file, "r") as infile:
version = infile.read().strip()
if len(version) == 0:
version = None
return version
except:
return None
def write_release_version(version):
with open(version_file, "w") as outfile:
outfile.write("%s\n" % version)
def get_git_version(abbrev=4):
# Read in the version that's currently in RELEASE-VERSION.
release_version = read_release_version()
# First try to get the current version using "git describe".
version = call_git_describe(abbrev)
#adapt to PEP 386 compatible versioning scheme
version = pep386adapt(version)
# If that doesn't work, fall back on the value that's in
# RELEASE-VERSION.
if version is None:
version = release_version
# If we still don't have anything, that's an error.
if version is None:
raise ValueError("Cannot find the version number!")
# If the current version is different from what's in the
# RELEASE-VERSION file, update the file to be current.
if version != release_version:
write_release_version(version)
# Finally, return the current version.
return version
def pep386adapt(version):
if version is None:
return
if '-' in version:
# adapt git-describe version to be in line with PEP 386
parts = version.split('-')
parts[-2] = 'post'+parts[-2]
version = '.'.join(parts[:-1])
return version
if __name__ == "__main__":
print get_git_version()
|
Python
| 0.000001
|
@@ -707,19 +707,19 @@
bbrev%5D,
-dir
+cwd
=current
|
957490251e5038d9fb963f0c43ea3973e763c134
|
Add test
|
tests/test_plotter.py
|
tests/test_plotter.py
|
Python
| 0.000005
|
@@ -0,0 +1,1262 @@
+from moca.plotter import create_plot%0Aimport os%0Aimport pytest%0A%0A@pytest.mark.mpl_image_compare(baseline_dir='data/images',%0A filename='ENCSR000AKB_PhyloP_1.png')%0A%0Adef join_path(head, leaf):%0A return os.path.join(head, leaf)%0A%0Adef test_image():%0A base_path = 'tests/data/ENCSR000AKB/'%0A meme_file = base_path+'moca_output/meme_out/meme.txt'%0A plot_title = 'ENCSR000AKB.sorted'%0A oc = 'tests/data/generated_out'%0A motif_number = 1%0A flank_motif = 5%0A sample_score_files = %5Bbase_path+'moca_output/fimo_out_1/phylop.mean.txt'%5D%0A control_score_files = %5Bbase_path+'moca_output/fimo_random_1/phylop.mean.txt'%5D%0A plot_titles = %5B'PhyloP'%5D%0A centrimo_dir = base_path + 'moca_output/centrimo_out'%0A figs = create_plot(meme_file,%0A plot_title,%0A output_dir=oc,%0A centrimo_dir=centrimo_dir,%0A motif_number=motif_number,%0A flank_length=flank_motif,%0A sample_score_files=sample_score_files,%0A control_score_files=control_score_files,%0A reg_plot_titles=plot_titles,%0A annotate=None,%0A save=False)%0A return figs%5B0%5D%0A%0A%0A
|
|
ed0a2a8fc20a44499d9db03d2eb8fcd58c1b0cd3
|
Add unit tests
|
tests/test_session.py
|
tests/test_session.py
|
Python
| 0.000001
|
@@ -0,0 +1,1240 @@
+# Local imports%0Afrom uplink import session%0A%0A%0Adef test_base_url(uplink_builder_mock):%0A # Setup%0A uplink_builder_mock.base_url = %22https://api.github.com%22%0A sess = session.Session(uplink_builder_mock)%0A%0A # Run & Verify%0A assert uplink_builder_mock.base_url == sess.base_url%0A%0A%0Adef test_headers(uplink_builder_mock):%0A # Setup%0A sess = session.Session(uplink_builder_mock)%0A%0A # Run%0A sess.headers%5B%22key%22%5D = %22value%22%0A%0A # Verify%0A uplink_builder_mock.add_hook.assert_called()%0A assert sess.headers == %7B%22key%22: %22value%22%7D%0A%0A%0Adef test_params(uplink_builder_mock):%0A # Setup%0A sess = session.Session(uplink_builder_mock)%0A%0A # Run%0A sess.params%5B%22key%22%5D = %22value%22%0A%0A # Verify%0A uplink_builder_mock.add_hook.assert_called()%0A assert sess.params == %7B%22key%22: %22value%22%7D%0A%0A%0Adef test_auth(uplink_builder_mock):%0A # Setup%0A uplink_builder_mock.auth = (%22username%22, %22password%22)%0A sess = session.Session(uplink_builder_mock)%0A%0A # Run & Verify%0A assert uplink_builder_mock.auth == sess.auth%0A%0A%0Adef test_auth_set(uplink_builder_mock):%0A # Setup%0A sess = session.Session(uplink_builder_mock)%0A%0A # Run%0A sess.auth = (%22username%22, %22password%22)%0A%0A # Verify%0A assert (%22username%22, %22password%22) == uplink_builder_mock.auth%0A
|
|
f1b22c952dabb3b66638000078e1ab2d0b7acea2
|
Add missing utils file
|
homedisplay/homedisplay/utils.py
|
homedisplay/homedisplay/utils.py
|
Python
| 0
|
@@ -0,0 +1,193 @@
+import redis%0Aimport json%0A%0Aredis_instance = redis.StrictRedis()%0A%0Adef publish_ws(key, content):%0A redis_instance.publish(%22home:broadcast:generic%22, json.dumps(%7B%22key%22: key, %22content%22: content%7D))%0A
|
|
f338d34e750fd4d06cd0992c7f457c403b1cff3b
|
add a simple tool to dump the GETLBASTATUS provisioning status
|
tools/getlbastatus.py
|
tools/getlbastatus.py
|
Python
| 0
|
@@ -0,0 +1,1103 @@
+#!/usr/bin/env python%0A# coding: utf-8%0A%0Aimport sys%0A%0Afrom pyscsi.pyscsi.scsi import SCSI%0Afrom pyscsi.pyscsi.scsi_device import SCSIDevice%0Afrom pyscsi.pyscsi.scsi_enum_getlbastatus import P_STATUS%0A%0A%0Adef usage():%0A print 'Usage: getlbastatus.py %5B--help%5D %5B-l %3Clba%3E%5D %3Cdevice%3E'%0A%0A%0Adef main():%0A i = 1%0A lba = 0%0A while i %3C len(sys.argv):%0A if sys.argv%5Bi%5D == '--help':%0A return usage()%0A if sys.argv%5Bi%5D == '-l':%0A del sys.argv%5Bi%5D%0A lba = int(sys.argv%5Bi%5D, 10)%0A del sys.argv%5Bi%5D%0A continue%0A i += 1%0A%0A if len(sys.argv) %3C 2:%0A return usage()%0A%0A device = sys.argv%5B1%5D%0A%0A sd = SCSIDevice(device)%0A s = SCSI(sd)%0A%0A r = s.readcapacity16().result%0A if not r%5B'lbpme'%5D:%0A print 'LUN is fully provisioned.'%0A return%0A%0A r = s.getlbastatus(lba).result%0A for i in range(len(r%5B'lbas'%5D)):%0A print 'LBA:%25d-%25d %25s' %25 (%0A r%5B'lbas'%5D%5Bi%5D%5B'lba'%5D,%0A r%5B'lbas'%5D%5Bi%5D%5B'lba'%5D + r%5B'lbas'%5D%5Bi%5D%5B'num_blocks'%5D - 1,%0A P_STATUS%5Br%5B'lbas'%5D%5Bi%5D%5B'p_status'%5D%5D%0A )%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A%0A
|
|
f7f122be60e8ffe03f8d449d619b21ec314b37a1
|
Include total sync time used in sync_status_stats.
|
stats_cron.py
|
stats_cron.py
|
from tapiriik.database import db
from datetime import datetime, timedelta
# total distance synced
distanceSynced = db.sync_stats.aggregate([{"$group": {"_id": None, "total": {"$sum": "$Distance"}}}])["result"][0]["total"]
# sync time utilization
db.sync_worker_stats.remove({"Timestamp": {"$lt": datetime.utcnow() - timedelta(hours=1)}}) # clean up old records
timeUsed = db.sync_worker_stats.aggregate([{"$group": {"_id": None, "total": {"$sum": "$TimeTaken"}}}])["result"][0]["total"]
# error/pending/locked stats
lockedSyncRecords = db.users.aggregate([
{"$match": {"SynchronizationWorker": {"$ne": None}}},
{"$group": {"_id": None, "count": {"$sum": 1}}}
])
if len(lockedSyncRecords["result"]) > 0:
lockedSyncRecords = lockedSyncRecords["result"][0]["count"]
else:
lockedSyncRecords = 0
pendingSynchronizations = db.users.aggregate([
{"$match": {"NextSynchronization": {"$lt": datetime.utcnow()}}},
{"$group": {"_id": None, "count": {"$sum": 1}}}
])
if len(pendingSynchronizations["result"]) > 0:
pendingSynchronizations = pendingSynchronizations["result"][0]["count"]
else:
pendingSynchronizations = 0
usersWithErrors = db.users.aggregate([
{"$match": {"SyncErrorCount": {"$gt": 0}}},
{"$group": {"_id": None, "count": {"$sum": 1}}}
])
if len(usersWithErrors["result"]) > 0:
usersWithErrors = usersWithErrors["result"][0]["count"]
else:
usersWithErrors = 0
totalErrors = db.users.aggregate([
{"$group": {"_id": None,
"total": {"$sum": "$SyncErrorCount"}}}
])
if len(totalErrors["result"]) > 0:
totalErrors = totalErrors["result"][0]["sum"]
else:
totalErrors = 0
db.sync_status_stats.insert({
"Timestamp": datetime.utcnow(),
"Locked": lockedSyncRecords,
"Pending": pendingSynchronizations,
"ErrorUsers": usersWithErrors,
"TotalErrors": totalErrors
})
db.stats.update({}, {"$set": {"TotalDistanceSynced": distanceSynced, "TotalSyncTimeUsed": timeUsed, "Updated": datetime.utcnow()}}, upsert=True)
|
Python
| 0
|
@@ -2207,16 +2207,59 @@
alErrors
+,%0A %22SyncTimeUsed%22: TotalSyncTimeUsed
%0A%7D)%0A%0Adb.
|
46c816f169b29a8fe91f14ab477222873d9bed88
|
Add DocTestParser
|
robot/docparser.py
|
robot/docparser.py
|
Python
| 0
|
@@ -0,0 +1,778 @@
+import re%0A%0A%0Aclass DocTestParser(object):%0A %22%22%22Find all externaltestcaseid's in a test's docstring.%0A%0A If your externaltestcaseid prefix is abc and the test has 'abc-123' in it's docstring.%0A %60DocTestParser('abc').get_testcases()%60 would return %60%5B'abc-123'%5D%60.%0A %22%22%22%0A def __init__(self, doc_matcher=None, doc_matchers=None):%0A %22%22%22%0A :param doc_matchers: List of regex to find in docstring%0A %22%22%22%0A self.doc_matchers = doc_matchers if doc_matchers is not None else %5B%5D%0A if doc_matcher:%0A self.doc_matchers.append(doc_matcher)%0A%0A def get_testcases(self, test):%0A testcases = set()%0A for matcher in self.doc_matchers:%0A testcases %7C= set(re.findall('%7B%7D-%5Cd+'.format(matcher), test.doc))%0A return testcases%0A
|
|
4f2c91c06ab13eec02ef0199ef45d0eeaf555ea7
|
Add dunder init for lowlevel.
|
astrodynamics/lowlevel/__init__.py
|
astrodynamics/lowlevel/__init__.py
|
Python
| 0
|
@@ -0,0 +1,81 @@
+# coding: utf-8%0Afrom __future__ import absolute_import, division, print_function%0A
|
|
6473cf17576fb0f52c653d96e22c4c9bf316250a
|
Remove elipses from examples instead of commenting them out
|
src/etc/extract-tests.py
|
src/etc/extract-tests.py
|
# Script for extracting compilable fragments from markdown
# documentation. See prep.js for a description of the format
# recognized by this tool. Expects a directory fragements/ to exist
# under the current directory, and writes the fragments in there as
# individual .rs files.
import sys, re;
if len(sys.argv) < 3:
print("Please provide an input filename")
sys.exit(1)
filename = sys.argv[1]
dest = sys.argv[2]
f = open(filename)
lines = f.readlines()
f.close()
cur = 0
line = ""
chapter = ""
chapter_n = 0
while cur < len(lines):
line = lines[cur]
cur += 1
chap = re.match("# (.*)", line);
if chap:
chapter = re.sub(r"\W", "_", chap.group(1)).lower()
chapter_n = 1
elif re.match("~~~", line):
# Parse the tags that open a code block in the pandoc format:
# ~~~ {.tag1 .tag2}
tags = re.findall("\.([\w-]*)", line)
block = ""
ignore = "notrust" in tags or "ignore" in tags
# Some tags used by the language ref that indicate not rust
ignore |= "ebnf" in tags
ignore |= "abnf" in tags
ignore |= "keyword" in tags
ignore |= "field" in tags
ignore |= "precedence" in tags
xfail = "xfail-test" in tags
while cur < len(lines):
line = lines[cur]
cur += 1
if re.match("~~~", line):
break
else:
# Lines beginning with '# ' are turned into valid code
line = re.sub("^# ", "", line)
# Allow elipses in code snippets
line = re.sub("\.\.\.", "/*...*/", line)
block += line
if not ignore:
if not re.search(r"\bfn main\b", block):
block = "fn main() {\n" + block + "\n}\n"
if not re.search(r"\bextern mod std\b", block):
block = "extern mod std;\n" + block;
if xfail:
block = "// xfail-test\n" + block
filename = (dest + "/" + str(chapter)
+ "_" + str(chapter_n) + ".rs")
chapter_n += 1
f = open(filename, 'w')
f.write(block)
f.close()
|
Python
| 0
|
@@ -1610,15 +1610,8 @@
%22, %22
-/*...*/
%22, l
|
de10593be3c513d41423dffcedd220c02dd37d6c
|
Add config_default.py
|
config_default.py
|
config_default.py
|
Python
| 0.000004
|
@@ -0,0 +1,851 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on 2015-10-23 08:06:00%0A%0A@author: Tran Huu Cuong %3Ctranhuucuong91@gmail.com%3E%0A%0A%22%22%22%0Aimport os%0A%0A# Blog configuration values.%0A%0A# You may consider using a one-way hash to generate the password, and then%0A# use the hash again in the login view to perform the comparison. This is just%0A# for simplicity.%0AADMIN_PASSWORD = 'admin@secret'%0AAPP_DIR = os.path.dirname(os.path.realpath(__file__))%0A%0A# The playhouse.flask_utils.FlaskDB object accepts database URL configuration.%0ADATABASE = 'sqliteext:///%25s' %25 os.path.join(APP_DIR, 'blog.db')%0ADEBUG = False%0A%0A# The secret key is used internally by Flask to encrypt session data stored%0A# in cookies. Make this unique for your app.%0ASECRET_KEY = 'shhh, secret!'%0A%0A# This is used by micawber, which will attempt to generate rich media%0A# embedded objects with maxwidth=800.%0ASITE_WIDTH = 800%0A%0A
|
|
95bb2d362e6a41b4e6421b5e8752b5040ea23d3f
|
Test file
|
main.py
|
main.py
|
Python
| 0.000001
|
@@ -0,0 +1,293 @@
+from flir.stream import Stream%0Afrom flir.flir import FLIR%0Aimport time%0A%0A%0A#h = Stream(%22129.219.136.149%22, 4000)%0A#h.connect()%0A#time.sleep(5)%0A#h.write(%22PP-500%22.encode(%22ascii%22))%0A%0Ax = FLIR(%22129.219.136.149%22, 4000)%0Ax.connect()%0Ax.pan(30)%0Aprint(x.pan())%0Ax.pan_offset(10)%0Aprint(x.pan())%0Ax.stream.close()%0A
|
|
ea422aabe19e13ad6811496470fcd7c341820333
|
add reject quote route
|
main.py
|
main.py
|
from flask import *
from flask.json import JSONEncoder
from flask.ext.cors import CORS
from flask.ext.login import LoginManager, login_user , logout_user , current_user , login_required
import simplejson as json
import os, sys
import datetime
app = Flask(__name__)
sess = Session()
app.config.from_object('config')
if app.config['SQLALCHEMY_DATABASE_URI'] == None:
print "Need database config"
sys.exit(1)
from models import db, Quote, Vote, User
db.init_app(app)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@app.before_request
def before_request():
g.user = current_user
@login_manager.user_loader
def load_user(id):
return User.query.get(int(id))
# registers user
@app.route('/register', methods = ['POST'])
def register():
body = request.get_json()
print app.config
if 'secret' not in body or body['secret'] != app.config['ADMIN_REGISTRATION_SECRET_KEY']:
return jsonify({"Error": "Secret key is wrong"})
email = body['email']
password = body['password']
user = User(email=email, password=password)
db.session.add(user)
db.session.commit()
return jsonify(user.serialize)
# user login
@app.route('/login', methods = ['POST'])
def login():
body = request.get_json()
email = body['email']
password = body['password']
registered_user = User.query.filter_by(email=email,password=password).first()
if registered_user is None:
return jsonify({"Error": "Email or Password invalid"})
login_user(registered_user)
return jsonify({"Success": "User is logged in"})
# user logout
@app.route('/logout', methods = ['GET'])
def logout():
logout_user()
return jsonify({"Success": "User is logged out"})
# get all approved/ active quotes and votecount
@app.route("/quote", methods = ['GET'])
def get_quote():
result = db.session.query(Vote.quote_id, db.func.sum(Vote.value).label("score")).group_by(Vote.quote_id).order_by("score DESC").join(Quote).filter(Quote.active == True).all()
return jsonify(result)
# submits a new quote
@app.route("/quote", methods = ['POST'])
def post_new_quote():
body = request.get_json()
conditions = {}
if "conditions" in body:
conditions = body['conditions']
quote = Quote(text = body['text'], conditions = json.dumps(conditions), view_count = 1, ip = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr), active = False)
db.session.add(quote)
db.session.commit()
vote = Vote(ip = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr), value = 1, quote_id = quote.id) #auto upvote every new quote by 1
db.session.add(vote)
db.session.commit()
return jsonify(quote.serialize)
# get all unapproved/ inactive quotes and votecount
@app.route("/quote/unapproved", methods = ['GET'])
@login_required
def get_unapproved_quotes():
result = db.session.query(Vote.quote_id, db.func.sum(Vote.value).label("score")).group_by(Vote.quote_id).order_by("score DESC").join(Quote).filter(Quote.active == False).all()
return jsonify(result)
# gets details of single quote
@app.route("/quote/<int:id>", methods = ['GET'])
def get_single_quote(id):
quote = Quote.query.get(id)
quote.view_count += 1
db.session.commit()
return jsonify(quote.serialize)
# approves/ activates a single quote
@app.route("/quote/<int:id>/approve", methods = ['PUT'])
@login_required
def approve_quote(id):
quote = Quote.query.get(id)
quote.active = True
db.session.commit()
return jsonify(quote.serialize)
# deletes a single quote
@app.route("/quote/<int:id>", methods = ['DELETE'])
@login_required
def unapprove_quote(id):
vote = Vote.query.filter_by(quote_id = id).all()
quote = Quote.query.filter_by(id = id).all()
if quote == []:
return jsonify({"Error":"Quote does not exist"})
for v in vote:
db.session.delete(v)
db.session.commit()
for q in quote:
db.session.delete(q)
db.session.commit()
return jsonify({"Success":"Quote has been deleted"})
# submits a new vote for a single quote
@app.route("/quote/<int:quote_id>/vote", methods = ['POST'])
def post_new_vote(quote_id):
body = request.get_json()
vote = Vote(ip = request.environ.get('HTTP_X_FORWARDED_FOR', request.remote_addr), value = body['value'], quote_id = quote_id)
db.session.add(vote)
db.session.commit()
return jsonify(vote.serialize)
cors = CORS(app)
if __name__ == "__main__":
app.debug = True
app.run()
|
Python
| 0
|
@@ -3577,16 +3577,265 @@
alize)%0A%0A
+# unapproves/ rejects a single quote%0A@app.route(%22/quote/%3Cint:id%3E/reject%22, methods = %5B'PUT'%5D)%0A@login_required%0Adef reject_quote(id):%0A quote = Quote.query.get(id)%0A quote.active = False%0A db.session.commit()%0A return jsonify(quote.serialize)%0A%0A
# delete
@@ -3843,32 +3843,32 @@
a single quote%0A
-
@app.route(%22/quo
@@ -3923,24 +3923,21 @@
red%0Adef
-unapprov
+delet
e_quote(
|
e2020af5ccd41f8571a2d0db4f5345ca9a8b561e
|
Add migration for db changes
|
gmn/src/d1_gmn/app/migrations/0010_auto_20170805_0107.py
|
gmn/src/d1_gmn/app/migrations/0010_auto_20170805_0107.py
|
Python
| 0
|
@@ -0,0 +1,1949 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.2 on 2017-08-05 01:07%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('app', '0009_auto_20170603_0546'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Chain',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('head_pid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='chain_head_pid', to='app.IdNamespace')),%0A ('sid', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='chain_sid', to='app.IdNamespace')),%0A %5D,%0A ),%0A migrations.CreateModel(%0A name='ChainMember',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('chain', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='app.Chain')),%0A ('pid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='chainmember_pid', to='app.IdNamespace')),%0A %5D,%0A ),%0A migrations.RemoveField(%0A model_name='chainidtoseriesid',%0A name='head_pid',%0A ),%0A migrations.RemoveField(%0A model_name='chainidtoseriesid',%0A name='sid',%0A ),%0A migrations.RemoveField(%0A model_name='persistentidtochainid',%0A name='chain',%0A ),%0A migrations.RemoveField(%0A model_name='persistentidtochainid',%0A name='pid',%0A ),%0A migrations.DeleteModel(%0A name='ChainIdToSeriesID',%0A ),%0A migrations.DeleteModel(%0A name='PersistentIdToChainID',%0A ),%0A %5D%0A
|
|
a0cd167b9f19e2a4a9d1f2a80bc3586cce15c6ab
|
Add GMN DB migration to current
|
gmn/src/d1_gmn/app/migrations/0019_auto_20190418_1512.py
|
gmn/src/d1_gmn/app/migrations/0019_auto_20190418_1512.py
|
Python
| 0
|
@@ -0,0 +1,428 @@
+# Generated by Django 2.2 on 2019-04-18 20:12%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('app', '0018_auto_20180901_0115'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='eventlog',%0A options=%7B%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='scienceobject',%0A options=%7B%7D,%0A ),%0A %5D%0A
|
|
a6ef1e2456f84b50102c4192984b0c18b9c81a27
|
Create scriptGenerator.py
|
scriptGenerator.py
|
scriptGenerator.py
|
Python
| 0.000001
|
@@ -0,0 +1,1603 @@
+#!/usr/bin/env python%0A#GENERATE A NEW SCRIPT %0Adef Creation():%0Afichero=open('generator.py','w')%0Afichero.close()%0Adef Save():%0Afichero=open('generator.py','a')%0Afichero.write('from PyQt4.QtGui import *%5Cn')%0Afichero.write('import sys%5Cn')%0Afichero.write('class Window(QWidget):%5Cn')%0Afichero.write('%5Ctdef __init__(self,parent=None):%5Cn')%0Afichero.write('%5Ct%5CtQWidget.__init__(self)%5Cn')%0Afichero.write('%5Ct%5Ctself.label=QLabel(%22Show a message%22)%5Cn')%0Afichero.write('%5Ct%5Ctself.lineEdit=QLineEdit()%5Cn')%0Afichero.write('%5Ct%5Ctself.lineEdit.setPlaceholderText(%22Write Here%22)%5Cn')%0Afichero.write('%5Ct%5Ctself.button=QPushButton(%22Show message%22)%5Cn') %0Afichero.write('%5Ct%5Ctself.layout=QVBoxLayout()%5Cn')%0Afichero.write('%5Ct%5Ctself.layout.addWidget(self.label)%5Cn')%0Afichero.write('%5Ct%5Ctself.layout.addWidget(self.lineEdit)%5Cn')%0Afichero.write('%5Ct%5Ctself.layout.addWidget(self.button)%5Cn')%0Afichero.write('%5Ct%5Ctself.setLayout(self.layout)%5Cn')%0Afichero.write('%5Ct%5Ctself.button.clicked.connect(self.Mensaje)%5Cn')%0Afichero.write('%5Ctdef Mensaje(self):%5Cn')%0Afichero.write('%5Ct%5Ctmensaje=QMessageBox.information(self,%22mensaje%22,self.lineEdit.text())%5Cn')%0Afichero.write('if __name__==%22__main__%22:%5Cn')%0Afichero.write('%5Cttry:%5Cn')%0Afichero.write('%5Ct%5Ctapp=QApplication(sys.argv)%5Cn')%0Afichero.write('%5Ct%5Ctventana=Window()%5Cn')%0Afichero.write('%5Ct%5Ctventana.show()%5Cn')%0Afichero.write('%5Ct%5Ctsys.exit(app.exec_())%5Cn')%0Afichero.write('%5Ctexcept SystemExit:%5Cn')%0Afichero.write('%5Ct%5Ctpass%5Cn')%0Afichero.close()%0An=raw_input(%22Do you wish create a new script %5BY/N%5D?: %22)%0Aif n=='Y' or n=='y':%0ACreation()%0ASave()%0Aprint(%22the file has been created%22)%0Araw_input()%0Aelse:%0Aprint(%22file doesn't created%22)%0Araw_input()%0A
|
|
844810f393724684d855e6e12fd20c392b6f06a0
|
check if key even exists before going into os.environ
|
src/pyechonest/config.py
|
src/pyechonest/config.py
|
"""
Global configuration variables for accessing the Echo Nest web API.
"""
ECHO_NEST_API_KEY = None
__version__ = "$Revision: 0 $"
# $Source$
import os
if(os.environ['ECHO_NEST_API_KEY']):
ECHO_NEST_API_KEY = os.environ['ECHO_NEST_API_KEY']
else:
ECHO_NEST_API_KEY = None
API_HOST = 'developer.echonest.com'
API_SELECTOR = '/api/'
"Locations for the Analyze API calls."
HTTP_USER_AGENT = 'PyENAPI'
"""
You may change this to be a user agent string of your
own choosing.
"""
MP3_BITRATE = 192
"""
Default bitrate for MP3 output. Conventionally an
integer divisible by 32kbits/sec.
"""
CACHE = True
"""
You may change this to False to prevent local caching
of API results.
"""
OBEY_RATE_LIMIT = True
"""
The Echo Nest limits users to 120 api calls per minute.
By default, pyechonest enforces this limit locally. Set this
variable to False to turn of local enforcement. The Echo Nest
api will still throttle you.
"""
|
Python
| 0.000001
|
@@ -153,27 +153,16 @@
os%0A%0Aif(
-os.environ%5B
'ECHO_NE
@@ -172,17 +172,30 @@
API_KEY'
-%5D
+ in os.environ
):%0A E
@@ -922,12 +922,13 @@
tle you.%0A%22%22%22
+%0A
|
1c2ba73eb0405dcfd427574c197e6a0588390f67
|
Simplify shipping template tags
|
oscar/templatetags/shipping_tags.py
|
oscar/templatetags/shipping_tags.py
|
from django import template
register = template.Library()
@register.tag
def shipping_charge(parse, token):
"""
Template tag for calculating the shipping charge for a given shipping
method and basket, and injecting it into the template context.
"""
return build_node(ShippingChargeNode, token)
@register.tag
def shipping_charge_discount(parse, token):
"""
Template tag for calculating the shipping discount for a given shipping
method and basket, and injecting it into the template context.
"""
return build_node(ShippingChargeDiscountNode, token)
@register.tag
def shipping_charge_excl_discount(parse, token):
"""
Template tag for calculating the shipping charge (excluding discounts) for
a given shipping method and basket, and injecting it into the template
context.
"""
return build_node(ShippingChargeExclDiscountNode, token)
def build_node(node_class, token):
tokens = token.split_contents()
if len(tokens) != 5 or tokens[3] != 'as':
raise template.TemplateSyntaxError(
"%(tag)r tag uses the following syntax: "
"{%% %(tag)r method basket as "
"name %%}" % {'tag': tokens[0]})
method_var, basket_var, name_var = tokens[1], tokens[2], tokens[4]
return node_class(method_var, basket_var, name_var)
class ShippingNode(template.Node):
method_name = None
def __init__(self, method_var, basket_var, name_var):
self.method_var = template.Variable(method_var)
self.basket_var = template.Variable(basket_var)
self.name_var = name_var
def render(self, context):
try:
method = self.method_var.resolve(context)
basket = self.basket_var.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.name_var] = getattr(
method, self.method_name)(basket)
return ''
class ShippingChargeNode(ShippingNode):
method_name = 'calculate'
class ShippingChargeDiscountNode(ShippingNode):
method_name = 'discount'
class ShippingChargeExclDiscountNode(ShippingNode):
method_name = 'calculate_excl_discount'
|
Python
| 0
|
@@ -57,32 +57,43 @@
y()%0A%0A%0A@register.
+assignment_
tag%0Adef shipping
@@ -100,28 +100,30 @@
_charge(
-parse, token
+method, basket
):%0A %22
@@ -289,44 +289,31 @@
urn
-build_node(ShippingChargeNode, token
+method.calculate(basket
)%0A%0A%0A
@@ -314,32 +314,43 @@
et)%0A%0A%0A@register.
+assignment_
tag%0Adef shipping
@@ -362,36 +362,38 @@
ge_discount(
-parse, token
+method, basket
):%0A %22%22%22%0A
@@ -557,52 +557,30 @@
urn
-build_node(ShippingChargeDiscountNode, token
+method.discount(basket
)%0A%0A%0A
@@ -589,16 +589,27 @@
egister.
+assignment_
tag%0Adef
@@ -642,20 +642,22 @@
unt(
-parse, token
+method, basket
):%0A
@@ -853,1328 +853,43 @@
urn
-build_node(ShippingChargeExclDiscountNode, token)%0A%0A%0Adef build_node(node_class, token):%0A tokens = token.split_contents()%0A if len(tokens) != 5 or tokens%5B3%5D != 'as':%0A raise template.TemplateSyntaxError(%0A %22%25(tag)r tag uses the following syntax: %22%0A %22%7B%25%25 %25(tag)r method basket as %22%0A %22name %25%25%7D%22 %25 %7B'tag': tokens%5B0%5D%7D)%0A%0A method_var, basket_var, name_var = tokens%5B1%5D, tokens%5B2%5D, tokens%5B4%5D%0A return node_class(method_var, basket_var, name_var)%0A%0A%0Aclass ShippingNode(template.Node):%0A method_name = None%0A%0A def __init__(self, method_var, basket_var, name_var):%0A self.method_var = template.Variable(method_var)%0A self.basket_var = template.Variable(basket_var)%0A self.name_var = name_var%0A%0A def render(self, context):%0A try:%0A method = self.method_var.resolve(context)%0A basket = self.basket_var.resolve(context)%0A except template.VariableDoesNotExist:%0A return ''%0A context%5Bself.name_var%5D = getattr(%0A method, self.method_name)(basket)%0A return ''%0A%0A%0Aclass ShippingChargeNode(ShippingNode):%0A method_name = 'calculate'%0A%0A%0Aclass ShippingChargeDiscountNode(ShippingNode):%0A method_name = 'discount'%0A%0A%0Aclass ShippingChargeExclDiscountNode(ShippingNode):%0A method_name = 'calculate_excl_discount'
+method.calculate_excl_discount(basket)
%0A
|
3ef6a9dbe2916d669d3e7e7cfab86a365237bc19
|
Make octane result format match the old v8_benchmark output.
|
tools/perf/perf_tools/octane.py
|
tools/perf/perf_tools/octane.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import multi_page_benchmark
from telemetry import util
class Octane(multi_page_benchmark.MultiPageBenchmark):
def MeasurePage(self, _, tab, results):
js_is_done = """
completed && !document.getElementById("progress-bar-container")"""
def _IsDone():
return bool(tab.runtime.Evaluate(js_is_done))
util.WaitFor(_IsDone, 300, poll_interval=5)
js_get_results = """
var results = {}
var result_divs = document.querySelectorAll('.p-result');
for (var r in result_divs) {
if (result_divs[r].id && result_divs[r].id.indexOf('Result-') == 0)
results[result_divs[r].id] = result_divs[r].innerHTML;
}
var main_banner = document.getElementById("main-banner").innerHTML;
var octane_score = main_banner.substr(main_banner.lastIndexOf(':') + 2);
results['Result-Octane'] = octane_score;
JSON.stringify(results);
"""
result_dict = eval(tab.runtime.Evaluate(js_get_results))
for key, value in result_dict.iteritems():
if value == '...':
continue
results.Add(key, '', value)
|
Python
| 0.999996
|
@@ -739,24 +739,26 @@
0)%0A
-results%5B
+var key =
result_d
@@ -766,16 +766,56 @@
vs%5Br%5D.id
+.replace('Result-', '');%0A results%5Bkey
%5D = resu
@@ -992,20 +992,12 @@
ts%5B'
-Result-Octan
+scor
e'%5D
@@ -1201,32 +1201,164 @@
-results.Add(key, '', valu
+data_type = 'unimportant'%0A if key == 'score':%0A data_type = 'default'%0A results.Add(key, 'score (bigger is better)', value, data_type=data_typ
e)%0A
|
238f5788211ed117ceedbb234e7404bc02716d60
|
add serializer.py
|
apps/zblog/serializers.py
|
apps/zblog/serializers.py
|
Python
| 0.000004
|
@@ -0,0 +1,368 @@
+%0Afrom rest_framework import serializers%0A%0A# Serializers define the API representation.%0Aclass ArticleSerializer(serializers.HyperlinkedModelSerializer):%0A class Meta:%0A model = Article%0A #fields = ('title', 'content', 'hits', 'created_time', 'updated_time', 'category', 'tags')%0A fields = ('title', 'content', 'hits', 'created_time', 'updated_time')%0A
|
|
4e962d97b6a9d97db915c92c5a388a3a36573d63
|
add 32
|
p032.py
|
p032.py
|
Python
| 0.999999
|
@@ -0,0 +1,345 @@
+import itertools%0A%0Amatches = set()%0Afor i in itertools.permutations('123456789', 9):%0A for s in xrange(1, 4):%0A for s2 in xrange(s + 1, (14 - s) / 2):%0A a = int(''.join(i%5B:s%5D))%0A b = int(''.join(i%5Bs:s2%5D))%0A c = int(''.join(i%5Bs2:%5D))%0A if a * b == c:%0A matches.add(c)%0A%0Aprint sum(matches)
|
|
691a5873596c487eece704bad991270c6b275dde
|
Create Game_of_Master_Mind.py
|
Cracking_Coding_Interview/Game_of_Master_Mind.py
|
Cracking_Coding_Interview/Game_of_Master_Mind.py
|
Python
| 0.000001
|
@@ -0,0 +1,845 @@
+class Result:%0A def __init__(self):%0A self.hit = 0%0A self.pseudohit = 0%0A%0Adef estimate(guess, solution):%0A if len(guess) != len(solution): return None%0A result = Result()%0A idict = %7B%7D%0A for i, char in enumerate(guess):%0A if char == solution%5Bi%5D:%0A result.hit += 1%0A else:%0A idict%5Bsolution%5Bi%5D%5D = idict.get(solution%5Bi%5D, 0) + 1 # Note: here we use the idict to record the solution frequency!%0A %0A for i, char in enumerate(guess):%0A print %22char: %25s%22, char%0A print %22solution: %22, solution%0A if (char != solution%5Bi%5D) and (idict.get(char, 0) %3E 0):%0A result.pseudohit += 1%0A idict%5Bchar%5D -= 1%0A %0A print %22hit: %25d, pseudohit: %25d%22 %25(result.hit, result.pseudohit)%0A return result%0A%0Aguess = 'GGRR'%0Asolution = 'RGGY'%0Aestimate(guess, solution)%0A
|
|
b5906545121d4d5229552d3e2243a290810d1c1c
|
add self-connect.py
|
python/self-connect.py
|
python/self-connect.py
|
Python
| 0.000001
|
@@ -0,0 +1,458 @@
+#!/usr/bin/python%0A%0Aimport errno%0Aimport socket%0Aimport sys%0Aimport time%0A%0Aif len(sys.argv) %3C 2:%0A%09print %22Usage: %25s port%22 %25 sys.argv%5B0%5D%0A%09print %22port should in net.ipv4.ip_local_port_range%22%0Aelse:%0A%09port = int(sys.argv%5B1%5D)%0A%09for i in range(65536):%0A%09%09try:%0A%09%09%09sock = socket.create_connection(('localhost', port))%0A%09%09%09print %22connected%22, sock.getsockname(), sock.getpeername()%0A%09%09%09time.sleep(60*60)%0A%09%09except socket.error, e:%0A%09%09%09if e.errno != errno.ECONNREFUSED:%0A%09%09%09%09break%0A%0A%0A
|
|
d4151bf2a30fc8a497f7d4cb3f6eba4b6913447e
|
Create tester.py
|
tester.py
|
tester.py
|
Python
| 0.000002
|
@@ -0,0 +1,14 @@
+print(%22hey!%22)%0A
|
|
2d5fa61c1edc91621befe54d7fd08642f67b68f8
|
add jar_dir
|
recipes/pilon/pilon.py
|
recipes/pilon/pilon.py
|
#!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
#
# Program Parameters
#
import os
import sys
import subprocess
from os import access, getenv, X_OK
jar_file = 'pilon-1.20.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') == None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java]+ mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
|
Python
| 0.000003
|
@@ -2491,16 +2491,90 @@
s_args%0A%0A
+ if '--jar_dir' in sys.argv%5B1:%5D:%0A print(jar_path)%0A else:%0A
sys.
|
e1ceaa62c7e6f0974b21a23105280da49e9657bf
|
Send push notifications
|
regrowl/bridge/push.py
|
regrowl/bridge/push.py
|
Python
| 0
|
@@ -0,0 +1,1631 @@
+%22%22%22%0ASend push notifications%0A%0AUses pushnotify to send notifications to iOS and Android devices%0A%0ARequires https://pypi.python.org/pypi/pushnotify%0A%0ASample config%0A%0A%5Bregrowl.bridge.push%5D%0Alabel = prowl,%3Capikey%3E%0Aother = nma,%3Capikey%3E%0Aexample = pushover,%3Capikey%3E%0A%22%22%22%0A%0Afrom __future__ import absolute_import%0A%0Atry:%0A import pushnotify%0Aexcept ImportError:%0A raise ImportError('Requires https://pypi.python.org/pypi/pushnotify Please install from PyPi')%0A%0Aimport logging%0A%0Afrom regrowl.regrowler import ReGrowler%0A%0A%0Alogger = logging.getLogger(__name__)%0Alogging.getLogger('requests').setLevel(logging.WARNING)%0A%0A%0A__all__ = %5B'PushNotifier'%5D%0A%0A%0Aclass PushNotifier(ReGrowler):%0A valid = %5B'NOTIFY'%5D%0A%0A _kwargs = %7B%0A 'Notification-Callback-Target': 'url',%0A 'Notification-Priority': 'pritory',%0A %7D%0A%0A def notify(self, packet):%0A for label, settings in self.config.items(__name__):%0A notifier, apikey = settings.split(',')%0A%0A client = pushnotify.get_client(%0A notifier, packet.headers%5B'Application-Name'%5D)%0A%0A if not client:%0A logger.error('Error loading push provider %25s', notifier)%0A return%0A%0A logger.info('Sending push to %25s with %25s', label, notifier)%0A kwargs = %7B%7D%0A for key, target in self._kwargs.items():%0A if key in packet.headers:%0A kwargs%5Btarget%5D = packet.headers%5Bkey%5D%0A%0A client.add_key(apikey)%0A client.notify(%0A packet.headers%5B'Notification-Text'%5D,%0A packet.headers%5B'Notification-Title'%5D,%0A kwargs=kwargs%0A )%0A
|
|
f423a32dac3b3232a03e6eebdb0664d2b5cdf87e
|
Add test for ordinal
|
tests/app/utils/test_time.py
|
tests/app/utils/test_time.py
|
Python
| 0.000418
|
@@ -0,0 +1,183 @@
+from app.utils.time import make_ordinal%0A%0A%0Aclass WhenMakingOrdinal:%0A%0A def it_returns_an_ordinal_correctly(self):%0A ordinal = make_ordinal(11)%0A assert ordinal == '11th'%0A
|
|
5647dfbf3aa2b2c5cb7f32b60b21a47ad2ee6f20
|
add google foobar exercise
|
solution_level1.py
|
solution_level1.py
|
Python
| 0.000001
|
@@ -0,0 +1,397 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0Adef answer(s):%0A re = ''%0A a = ord('a')%0A z = ord('z')%0A for c in s:%0A ascii_code = ord(c)%0A if ascii_code %3E= a and ascii_code %3C= z:%0A tmp = chr(a + z -ascii_code)%0A re = re + tmp%0A else:%0A re = re + c%0A%0A return re%0A%0Aif __name__ == '__main__':%0A str = raw_input(%22Inputs:%22)%0A print(answer(str))%0A%0A
|
|
e1b23ecf168d397da373c4441c67e655da58e3e9
|
Add basic Log class to represent a log record.
|
source/bark/log.py
|
source/bark/log.py
|
Python
| 0
|
@@ -0,0 +1,958 @@
+# :coding: utf-8%0A# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips%0A# :license: See LICENSE.txt.%0A%0Afrom collections import MutableMapping%0A%0A%0Aclass Log(MutableMapping):%0A '''Hold individual log data.'''%0A%0A def __init__(self, *args, **kw):%0A '''Initialise log.'''%0A super(Log, self).__init__()%0A self._mapping = dict(*args, **kw)%0A%0A def __str__(self):%0A '''Return string representation.'''%0A return str(self._mapping)%0A%0A def __len__(self):%0A '''Return number of keys.'''%0A%0A def __iter__(self):%0A '''Return iterator over object.'''%0A return iter(self._mapping)%0A%0A def __getitem__(self, key):%0A '''Return value referenced by *key*.'''%0A return self._mapping%5Bkey%5D%0A%0A def __setitem__(self, key, value):%0A '''Set *key* to reference *value*.'''%0A self._mapping%5Bkey%5D = value%0A%0A def __delitem(self, key):%0A '''Remove *key* reference.'''%0A del self._mapping%5Bkey%5D%0A%0A
|
|
6c6214681ee89f2f67b09542fcf7690aa61954b9
|
Add maybe_make_dir()
|
file/maybe_make_dir.py
|
file/maybe_make_dir.py
|
Python
| 0.000001
|
@@ -0,0 +1,530 @@
+import os%0A%0A# ==============================================================================%0A# MAYBE_MAKE_DIR%0A# ==============================================================================%0Adef maybe_make_dir(path):%0A %22%22%22 Checks if a directory path exists on the system, if it does not, then%0A it creates that directory (and any parent directories needed to%0A create that directory)%0A %22%22%22%0A if not os.path.exists(path):%0A os.makedirs(path)%0A%0A%0A
|
|
c57bfdfae235e7ed7b5f13922a7fbc64dbd112f1
|
Add a missing migration
|
junction/proposals/migrations/0025_auto_20200321_0049.py
|
junction/proposals/migrations/0025_auto_20200321_0049.py
|
Python
| 0.013292
|
@@ -0,0 +1,469 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9 on 2020-03-20 19:19%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('proposals', '0024_auto_20170610_1857'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterIndexTogether(%0A name='proposalcomment',%0A index_together=set(%5B('is_spam', 'marked_as_spam_by'), ('commenter', 'is_spam')%5D),%0A ),%0A %5D%0A
|
|
0779277486a6812f5b58e1fc1ab6fe1e5dc35559
|
add dummy api tests
|
nc/tests/test_api.py
|
nc/tests/test_api.py
|
Python
| 0.000001
|
@@ -0,0 +1,1201 @@
+from django.core.urlresolvers import reverse%0Afrom rest_framework import status%0Afrom rest_framework.test import APITestCase%0Afrom nc.models import Agency%0A%0A%0Aclass AgencyTests(APITestCase):%0A%0A def test_list_agencies(self):%0A %22%22%22Test Agency list%22%22%22%0A Agency.objects.create(name=%22Durham%22)%0A url = reverse('agency-api-list')%0A data = %5B%7B'id': 1, 'name': 'Durham'%7D%5D%0A response = self.client.get(url, format='json')%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A self.assertEqual(response.data, data)%0A%0A def test_stops_api(self):%0A %22%22%22Test Agency stops API endpoint%22%22%22%0A agency = Agency.objects.create(name=%22Durham%22)%0A url = reverse('agency-api-stops', args=%5Bagency.pk%5D)%0A response = self.client.get(url, format='json')%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A%0A def test_stops_by_reason(self):%0A %22%22%22Test Agency stops_by_reason API endpoint%22%22%22%0A agency = Agency.objects.create(name=%22Durham%22)%0A url = reverse('agency-api-stops-by-reason', args=%5Bagency.pk%5D)%0A response = self.client.get(url, format='json')%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A
|
|
9b68ee1e0ffb60ebffe0bb90da2512da4bbbeb99
|
add split_from_listobj_and_tuple_it func
|
utils/split_listobj_and_tuple_it.py
|
utils/split_listobj_and_tuple_it.py
|
Python
| 0.000018
|
@@ -0,0 +1,52 @@
+%0Adef split_from_listobj_and_tuple_it():%0A return 0
|
|
ece4598e6297ef071b1c928435efb3bee73e3ddb
|
Backup script.
|
scripts/backup.py
|
scripts/backup.py
|
Python
| 0
|
@@ -0,0 +1,367 @@
+import json%0Afrom app.firebase import db%0A%0A%22%22%22%0ABackup script for saving contents at the path in Firebase.%0A%22%22%22%0Adef backup(path):%0A backup = db().child(path).get().val()%0A with open(%22out.json%22, %22w%22) as f: json.dump(backup, f)%0A%0Aif __name__ == '__main__':%0A # See app/constants for table prefixes and suffixes%0A path = %22branches/02-chicago/venues%22%0A backup(path)%0A
|
|
2ad0a7f50b6b120c6e769033037c0e1661d2480d
|
Add spacer/exp_name.py to generate names for experiments
|
spacer/exp_name.py
|
spacer/exp_name.py
|
Python
| 0.000016
|
@@ -0,0 +1,1144 @@
+#! /usr/bin/env python3%0A%0A# Name for experiments directory%0A%0Aimport sys%0Aimport words%0Aimport argparse%0Aimport os.path%0Afrom datetime import datetime%0Aimport platform%0A%0A%0Aclass ExpNamer(object):%0A%0A def __init__(self):%0A self._name = 'exp_name'%0A self._help = 'Name experiment'%0A%0A def mk_arg_parser(self, ap):%0A ap.add_argument('idx',%0A metavar='FILE',%0A help='Index of benchmarks for this experiment')%0A return ap%0A%0A def run(self, args=None):%0A idx = os.path.splitext(os.path.basename(args.idx))%5B0%5D%0A date = datetime.now().strftime('%25d_%25m_%25Y-t%25H-%25M-%25S')%0A noun = words.get_a_noun(length=7, bound='atmost', seed=date)%0A noun = noun.lower()%0A node = platform.node().split('.')%5B0%5D%0A%0A print(f%22%7Bidx%7D.%7Bnode%7D.%7Bnoun%7D.%7Bdate%7D%22)%0A%0A def main(self, argv):%0A ap = argparse.ArgumentParser(prog=self._name, description=self._help)%0A ap = self.mk_arg_parser(ap)%0A args = ap.parse_args(argv)%0A return self.run(args)%0A%0A%0Adef main():%0A cmd = ExpNamer()%0A return cmd.main(sys.argv%5B1:%5D)%0A%0A%0Aif __name__ == '__main__':%0A sys.exit(main())%0A
|
|
8b467efd1f998d05da0272a284773501f0b330ff
|
Add a test file which was missing from a recent branch
|
djangae/tests/test_meta_queries.py
|
djangae/tests/test_meta_queries.py
|
Python
| 0.000001
|
@@ -0,0 +1,1324 @@
+from django.db import models%0Afrom djangae.test import TestCase%0A%0Afrom djangae.contrib import sleuth%0A%0A%0Aclass MetaQueryTestModel(models.Model):%0A field1 = models.CharField(max_length=32)%0A%0A%0Aclass PrimaryKeyFilterTests(TestCase):%0A%0A def test_pk_in_with_slicing(self):%0A i1 = MetaQueryTestModel.objects.create();%0A%0A self.assertFalse(%0A MetaQueryTestModel.objects.filter(pk__in=%5Bi1.pk%5D)%5B9999:%5D%0A )%0A%0A self.assertFalse(%0A MetaQueryTestModel.objects.filter(pk__in=%5Bi1.pk%5D)%5B9999:10000%5D%0A )%0A%0A def test_limit_correctly_applied_per_branch(self):%0A MetaQueryTestModel.objects.create(field1=%22test%22)%0A MetaQueryTestModel.objects.create(field1=%22test2%22)%0A%0A with sleuth.watch('google.appengine.api.datastore.Query.Run') as run_calls:%0A%0A list(MetaQueryTestModel.objects.filter(field1__in=%5B%22test%22, %22test2%22%5D)%5B:1%5D)%0A%0A self.assertEqual(1, run_calls.calls%5B0%5D.kwargs%5B'limit'%5D)%0A self.assertEqual(1, run_calls.calls%5B1%5D.kwargs%5B'limit'%5D)%0A%0A with sleuth.watch('google.appengine.api.datastore.Query.Run') as run_calls:%0A%0A list(MetaQueryTestModel.objects.filter(field1__in=%5B%22test%22, %22test2%22%5D)%5B1:2%5D)%0A%0A self.assertEqual(2, run_calls.calls%5B0%5D.kwargs%5B'limit'%5D)%0A self.assertEqual(2, run_calls.calls%5B1%5D.kwargs%5B'limit'%5D)%0A
|
|
62c04b70178f3df8a8c7cbf01de0896d3e808698
|
Create __init__.py
|
mass_mailing_themes_boilerplate/__init__.py
|
mass_mailing_themes_boilerplate/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
615247c28d58fbbff40f5e4122441d77acb19003
|
Integrate notification app in settings and add basic structure of files
|
notification/urls.py
|
notification/urls.py
|
Python
| 0
|
@@ -0,0 +1,504 @@
+from django.conf.urls import url%0Afrom link.views import LinkView, LinkReactionView, LinkCommentView%0A%0A%0Aurlpatterns = %5B%0A url(r'%5E$', LinkView.new, name='link_new'),%0A url(r'%5E(?P%3Cpost_id%3E%5B0-9%5D+)/add/$', LinkView.add, name='link_add'),%0A url(r'%5E(?P%3Cpost_id%3E%5B0-9%5D+)/react/$', LinkReactionView.react, name='link_react'),%0A url(r'%5E(?P%3Cpost_id%3E%5B0-9%5D+)/unreact/$', LinkReactionView.unreact, name='link_unreact'),%0A url(r'%5E(?P%3Cpost_id%3E%5B0-9%5D+)/comment/$', LinkCommentView.comment, name='link_comment'),%0A%5D%0A
|
|
ca1cfd2514d382b1187eab880014b6a611d3568d
|
add some testing for Resources and ResourceAttributesMixin
|
tests/resource.py
|
tests/resource.py
|
Python
| 0
|
@@ -0,0 +1,1523 @@
+import mock%0Aimport unittest%0Aimport httplib2%0Aimport slumber%0A%0A%0Aclass ResourceAttributesMixinTestCase(unittest.TestCase):%0A%0A def test_attribute_fallback_to_resource(self):%0A class ResourceMixinTest(slumber.ResourceAttributesMixin, slumber.MetaMixin, object):%0A class Meta:%0A authentication = None%0A base_url = None%0A format = %22json%22%0A%0A rmt = ResourceMixinTest(base_url=%22http://example.com/%22)%0A self.assertTrue(isinstance(rmt.example, slumber.Resource))%0A%0A%0Aclass ResourceTestCase(unittest.TestCase):%0A%0A def setUp(self):%0A self.base_resource = slumber.Resource(base_url=%22http://example/api/v1/test%22)%0A%0A def test_get_serializer(self):%0A self.assertTrue(isinstance(self.base_resource.get_serializer(), slumber.Serializer))%0A%0A def test_request_200(self):%0A # Mock a Response Object%0A r = mock.Mock(spec=httplib2.Response)%0A r.status = 200%0A%0A # Mock The httplib2.Http class%0A self.base_resource._http = mock.Mock(spec=httplib2.Http)%0A self.base_resource._http.request.return_value = (r, %22Mocked Content%22)%0A%0A resp, content = self.base_resource._request(%22GET%22)%0A%0A self.assertTrue(resp is r)%0A self.assertEqual(content, %22Mocked Content%22)%0A%0A self.base_resource._http.request.assert_called_once_with(%0A %22http://example/api/v1/test%22,%0A %22GET%22,%0A body=None,%0A headers=%7B%22content-type%22: self.base_resource.get_serializer().get_content_type()%7D%0A )%0A
|
|
7f4cfe09a29202475b0941558f8ab722e63cee7e
|
Add MPL 2.0 to license trove
|
scripts/migrations/023-add-new-trove-license-category.py
|
scripts/migrations/023-add-new-trove-license-category.py
|
Python
| 0
|
@@ -0,0 +1,570 @@
+import sys%0Aimport logging%0A%0Afrom ming.orm.ormsession import ThreadLocalORMSession%0A%0Afrom allura import model as M%0A%0Alog = logging.getLogger(__name__)%0A%0Adef main():%0A M.TroveCategory(trove_cat_id=905,%0A trove_parent_id=14,%0A shortname='mpl20',%0A fullname='Mozilla Public License 2.0 (MPL 2.0)',%0A fullpath='License :: OSI-Approved Open Source :: Mozilla Public License 2.0 (MPL 2.0)')%0A%0A ThreadLocalORMSession.flush_all()%0A ThreadLocalORMSession.close_all()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
07cccdbb7fdc6919503c5b11bca8604e1f7a0d59
|
Create roman_numeral_convert.py
|
projects/roman_numeral_convert.py
|
projects/roman_numeral_convert.py
|
Python
| 0.000383
|
@@ -0,0 +1,501 @@
+%0A#Roman numerals are: %5Bi v x l c d m%5D%0A%0Adef stringer (x):%0A number_string = str(x)%0A a = number_string%5B0%5D%0A b = number_string%5B1%5D %0A c = number_string%5B2%5D %0A d = number_string%5B3%5D %0A%0A%0Aa_list = %5B I, II, III, IV, V, VI, VII, VIII, IX%5D%0Ab_list = %5B X, XX, XXX, XL, L, LX, LXX, LXXX, XC%5D%0Ac_list = %5B C, CC, CCC, CD, D, DC, DCC, DCCC, CM%5D%0Ad_list = %5B M, MM, MMM%5D%0A%0A%0Ax = int(input(%22Your Number(up to 3999): %22))%0A%0Astringer%0A%0Aprint a_list%5Ba-1%5D%0Aprint b_list%5Bb-1%5D%0Aprint c_list%5Bc-1%5D%0Aprint d_list%5Bd-1%5D%0A%0A%0A%0A%0A %0A %0A %0A %0A%0A
|
|
4e02394f87bec9f73364738550c0b441beb80696
|
Build Tower
|
Codewars/BuildTower.py
|
Codewars/BuildTower.py
|
Python
| 0.000001
|
@@ -0,0 +1,129 @@
+def tower_builder(n_floors):%0A return %5B((n_floors - i)*' ' + (2*i - 1)*'*' + (n_floors - i)*' ') for i in range(1,n_floors+1)%5D%0A
|
|
acedeb97935c53d0e7f1e39b2282f8a90bf379ee
|
add test case
|
test_flask.py
|
test_flask.py
|
Python
| 0.000059
|
@@ -0,0 +1,543 @@
+from pytest import fixture%0Afrom flask import Flask%0A%0Afrom flask_slack import Slack%0A%0A%0Aclass App(object):%0A%0A def __init__(self):%0A self.app = Flask(__name__)%0A self.app.debug = True%0A self.slack = Slack(self.app)%0A self.app.add_url_rule('/', view_func=self.slack.dispatch)%0A self.client = self.app.test_client()%0A%0A%0A@fixture%0Adef app():%0A return App()%0A%0A%0Adef test_register_command(app):%0A res = app.client.get('/')%0A assert res.status_code == 200%0A assert res.data == b'Command None is not found in team None'%0A
|
|
6beccf0c0b4e7788403415c05ae9f31e6c0a89eb
|
Add tests for Generalized Procrustes Analysis (GPA)
|
tests/test_gpa.py
|
tests/test_gpa.py
|
Python
| 0
|
@@ -0,0 +1,2442 @@
+import unittest%0A%0Aimport numpy as np%0Afrom sklearn import datasets%0Afrom sklearn import decomposition%0Afrom sklearn.utils import estimator_checks%0A%0Aimport prince%0A%0A%0Aclass TestGPA(unittest.TestCase):%0A%0A # def setUp(self):%0A def __init__(self):%0A # Create a list of 2-D circles with different locations and rotations%0A n_shapes = 4%0A n_points = 12%0A n_dims = 2%0A%0A shape_sizes = np.arange(1, n_shapes + 1)%0A shape_angle_offsets = 10 * np.arange(n_shapes)%0A shape_center_offsets = np.tile(np.arange(n_shapes), (n_dims, 1))%0A%0A base_angles = np.linspace(0, 2 * np.pi, num=n_points, endpoint=False)%0A # Size (n_shapes, n_points)%0A angles = base_angles%5Bnp.newaxis, :%5D + shape_angle_offsets%5B:, np.newaxis%5D%0A%0A # Calculate along dimensions%0A x = (%0A np.cos(angles) * shape_sizes%5B:, np.newaxis%5D%0A + shape_center_offsets%5B0%5D%5B:, np.newaxis%5D%0A )%0A y = (%0A np.sin(angles) * shape_sizes%5B:, np.newaxis%5D%0A + shape_center_offsets%5B1%5D%5B:, np.newaxis%5D%0A )%0A%0A self.shapes = np.stack(%5Bx, y%5D, axis=-1)%0A%0A def test_fit(self):%0A gpa = prince.GPA()%0A self.assertIsInstance(gpa.fit(self.shapes), prince.GPA)%0A%0A def test_transform(self):%0A gpa = prince.GPA(copy=True)%0A aligned_shapes = gpa.fit(self.shapes).transform(self.shapes)%0A self.assertIsInstance(aligned_shapes, np.ndarray)%0A self.assertEqual(self.shapes.shape, aligned_shapes.shape)%0A%0A def test_fit_transform(self):%0A gpa = prince.GPA()%0A aligned_shapes = gpa.fit_transform(self.shapes)%0A self.assertIsInstance(aligned_shapes, np.ndarray)%0A%0A def test_fit_transform_single(self):%0A %22%22%22Aligning a single shape should return the same shape.%22%22%22%0A gpa = prince.GPA()%0A shapes = self.shapes.shape%5B0:1%5D%0A aligned_shapes = gpa.fit_transform(shapes)%0A np.testing.assert_array_equal(shapes, aligned_shapes)%0A%0A def test_copy(self):%0A shapes_copy = np.copy(self.shapes)%0A%0A gpa = prince.GPA(copy=True)%0A gpa.fit(shapes_copy)%0A np.testing.assert_array_equal(self.shapes, shapes_copy)%0A%0A gpa = prince.GPA(copy=False)%0A gpa.fit(shapes_copy)%0A self.assertRaises(%0A AssertionError, np.testing.assert_array_equal, self.shapes, shapes_copy%0A )%0A%0A def test_check_estimator(self):%0A estimator_checks.check_estimator(prince.GPA(as_array=True))%0A
|
|
f86da5eddec2dd37f4797ea1caf404e8fec82701
|
add unit tests for query parsing
|
test_query.py
|
test_query.py
|
Python
| 0
|
@@ -0,0 +1,1378 @@
+from query import parse_query%0Aimport copy%0A%0Adefault_parsed_query = %7B%0A 'from': '-24hours',%0A 'to': 'now',%0A 'min': None,%0A 'max': None,%0A 'avg_by': %7B%7D,%0A 'limit_targets': 500,%0A 'avg_over': None,%0A 'patterns': %5B'target_type=', 'unit='%5D,%0A 'group_by': %5B'target_type=', 'unit=', 'server'%5D,%0A 'sum_by': %7B%7D,%0A 'statement': 'graph'%0A%7D%0A%0A%0Adef test_query_basic():%0A query = parse_query(%22%22)%0A assert query == default_parsed_query%0A query = parse_query(%22foo bar%22)%0A new = copy.deepcopy(default_parsed_query)%0A new%5B'patterns'%5D.extend(%5B'foo', 'bar'%5D)%0A assert query == new%0A%0A%0Adef test_query_advanced():%0A query = parse_query(%22octo -20hours unit=b/s memory group by foo avg by barsum by baz%22)%0A new = copy.deepcopy(default_parsed_query)%0A new%5B'patterns'%5D.extend(%5B'octo', '-20hours', 'unit=b/s', 'memory', 'by', 'baz'%5D)%0A new%5B'avg_by'%5D = %7B'barsum': %5B''%5D%7D%0A new%5B'group_by'%5D = %5B'target_type=', 'unit=', 'foo'%5D%0A assert query == new%0A query = parse_query(%22stack from -20hours to -10hours avg over 10M sum by foo:bucket1%7Cbucket2,bar min 100 max 200%22)%0A new = copy.deepcopy(default_parsed_query)%0A new%5B'statement'%5D = 'stack'%0A new%5B'avg_over'%5D = (10, 'M')%0A new%5B'from'%5D = '-20hours'%0A new%5B'to'%5D = '-10hours'%0A new%5B'min'%5D = '100'%0A new%5B'max'%5D = '200'%0A new%5B'sum_by'%5D = %7B'foo': %5B'bucket1', 'bucket2'%5D, 'bar': %5B''%5D%7D%0A assert query == new%0A
|
|
0baca9564c9df7b06645f71abdda0fe3090f46a6
|
Add a test-case for lit xunit output
|
utils/lit/tests/xunit-output.py
|
utils/lit/tests/xunit-output.py
|
Python
| 0.999995
|
@@ -0,0 +1,374 @@
+# Check xunit output%0A# RUN: %25%7Blit%7D --xunit-xml-output %25t.xunit.xml %25%7Binputs%7D/test-data%0A# RUN: FileCheck %3C %25t.xunit.xml %25s%0A%0A# CHECK: %3C?xml version=%221.0%22 encoding=%22UTF-8%22 ?%3E%0A# CHECK: %3Ctestsuites%3E%0A# CHECK: %3Ctestsuite name='test-data' tests='1' failures='0'%3E%0A# CHECK: %3Ctestcase classname='test-data.' name='metrics.ini' time='0.00'/%3E%0A# CHECK: %3C/testsuite%3E%0A# CHECK: %3C/testsuites%3E
|
|
e818989604ddaf34dd5730cc3b73093744b59a29
|
Create themes.py
|
timpani/themes.py
|
timpani/themes.py
|
Python
| 0
|
@@ -0,0 +1,110 @@
+from . import database%0A%0ATHEME_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), %22../../themes%22))%0A
|
|
2e90787a245d6a30c733699a819a1ff888c308b7
|
add simple boot script
|
src/japronto/__main__.py
|
src/japronto/__main__.py
|
Python
| 0
|
@@ -0,0 +1,1260 @@
+from argparse import ArgumentParser%0Afrom importlib import import_module%0Aimport sys%0A%0Afrom .app import Application%0A%0A%0Adef main():%0A parser = ArgumentParser(prog='python -m japronto')%0A parser.add_argument('--host', dest='host', type=str, default='0.0.0.0')%0A parser.add_argument('--port', dest='port', type=int, default=8080)%0A parser.add_argument('--worker-num', dest='worker_num', type=int, default=1)%0A parser.add_argument('application')%0A%0A args = parser.parse_args()%0A%0A try:%0A module, attribute = args.application.rsplit('.', 1)%0A except ValueError:%0A print(%0A %22Application specificer must contain at least one '.', got '%7B%7D'.%22%0A .format(args.application))%0A return 1%0A%0A try:%0A module = import_module(module)%0A except ModuleNotFoundError as e:%0A print(e.args%5B0%5D + ' on Python search path.')%0A return 1%0A%0A try:%0A attribute = getattr(module, attribute)%0A except AttributeError:%0A print(%22Module '%7B%7D' does not have an attribute '%7B%7D'.%22%0A .format(module.__name__, attribute))%0A return 1%0A%0A if not isinstance(attribute, Application):%0A print(%22%7B%7D is not an instance of 'japronto.Application'.%22)%0A return 1%0A%0A attribute.run()%0A%0A%0Asys.exit(main())%0A
|
|
bfa84c54166a606e4c7b587aeb10e5e79a2d0e50
|
Add __init__
|
tmdb3/__init__.py
|
tmdb3/__init__.py
|
Python
| 0.000917
|
@@ -0,0 +1,207 @@
+#!/usr/bin/env python%0A%0Afrom tmdb_api import Configuration, searchMovie, searchPerson, Person, %5C%0A Movie, Collection, __version__%0Afrom request import set_key%0Afrom tmdb_exceptions import *%0A%0A
|
|
8c1fe9193f55fd758af74d3270e5630db630fa2c
|
Fix constraints tests
|
test/test_constraints.py
|
test/test_constraints.py
|
from __future__ import absolute_import
from __future__ import print_function
import keras
from keras.datasets import mnist
import keras.models
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.regularizers import l2, l1
from keras.constraints import maxnorm, nonneg
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils import np_utils, generic_utils
import theano
import theano.tensor as T
import numpy as np
import scipy
batch_size = 100
nb_classes = 10
nb_epoch = 10
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train=X_train.reshape(60000,784)
X_test=X_test.reshape(10000,784)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(784, 20, W_constraint=maxnorm(1)))
model.add(Activation('relu'))
model.add(Dropout(0.1))
model.add(Dense(20, 20, W_constraint=nonneg))
model.add(Activation('relu'))
model.add(Dropout(0.1))
model.add(Dense(20, 10, W_constraint=maxnorm(1)))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0)
a=model.params[0].eval()
if np.isclose(np.max(np.sqrt(np.sum(a**2, axis=0))),1):
print('Maxnorm test passed')
else:
raise ValueError('Maxnorm test failed!')
b=model.params[2].eval()
if np.min(b)==0 and np.min(a)!=0:
print('Nonneg test passed')
else:
raise ValueError('Nonneg test failed!')
model = Sequential()
model.add(Dense(784, 20))
model.add(Activation('relu', target=.4))
model.add(Dropout(0.1))
model.add(Dense(20, 20))
model.add(Activation('relu', target=.3))
model.add(Dropout(0.1))
model.add(Dense(20, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)
get_activations1 = theano.function([model.layers[0].input], model.layers[1].output(train=False), allow_input_downcast=True)
activations1 = get_activations1(X_train)
get_activations2 = theano.function([model.layers[0].input], model.layers[4].output(train=False), allow_input_downcast=True)
activations2 = get_activations2(X_train)
if np.isclose(np.mean(activations1), .4, atol=.02) and np.isclose(np.mean(activations2), .3, atol=.02):
print('KL penalty test passed')
else:
raise ValueError('KL penalty test failed!')
model = Sequential()
model.add(Dense(784, 20))
model.add(Activation('relu'))
model.add(Dense(20, 20, W_regularizer=l1(.01)))
model.add(Activation('relu'))
model.add(Dense(20, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)
a=model.params[2].eval().reshape(400)
(D, p1) = scipy.stats.kurtosistest(a)
model = Sequential()
model.add(Dense(784, 20))
model.add(Activation('relu'))
model.add(Dense(20, 20, W_regularizer=l2(.01)))
model.add(Activation('relu'))
model.add(Dense(20, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)
a=model.params[2].eval().reshape(400)
(D, p2) = scipy.stats.kurtosistest(a)
if p1<.01 and p2>.01:
print('L1 and L2 regularization tests passed')
else:
raise ValueError('L1 and L2 regularization tests failed!')
|
Python
| 0.000012
|
@@ -1766,969 +1766,8 @@
)%0A%09%0A
-model = Sequential()%0Amodel.add(Dense(784, 20))%0Amodel.add(Activation('relu', target=.4))%0Amodel.add(Dropout(0.1))%0Amodel.add(Dense(20, 20))%0Amodel.add(Activation('relu', target=.3))%0Amodel.add(Dropout(0.1))%0Amodel.add(Dense(20, 10))%0Amodel.add(Activation('softmax'))%0A%0A%0Arms = RMSprop()%0Amodel.compile(loss='categorical_crossentropy', optimizer=rms)%0A%0Amodel.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=20, show_accuracy=True, verbose=0)%0A%0A%0A%0Aget_activations1 = theano.function(%5Bmodel.layers%5B0%5D.input%5D, model.layers%5B1%5D.output(train=False), allow_input_downcast=True)%0Aactivations1 = get_activations1(X_train)%0Aget_activations2 = theano.function(%5Bmodel.layers%5B0%5D.input%5D, model.layers%5B4%5D.output(train=False), allow_input_downcast=True)%0Aactivations2 = get_activations2(X_train)%0A%0Aif np.isclose(np.mean(activations1), .4, atol=.02) and np.isclose(np.mean(activations2), .3, atol=.02):%0A%09print('KL penalty test passed')%0Aelse:%0A%09raise ValueError('KL penalty test failed!')%0A%09%0A%0A
%0Amod
|
60f5dbe34884683626ca7f045fa79d2c247197fe
|
add test for class checks
|
pychecker/pychecker2/tests/class.py
|
pychecker/pychecker2/tests/class.py
|
Python
| 0
|
@@ -0,0 +1,909 @@
+import compiler.ast%0A%0Aclass B(compiler.ast.Const):%0A%0A def x(self):%0A self.inherited = 1%0A%0Aclass A(B):%0A def __init__(self):%0A self.x = 1 # define x on A%0A self.w.q = 1%0A%0A def f(s, self): # unusual self%0A print self%0A s.self = 1%0A s = 7%0A%0A def x(): # no self, redefine x on object%0A pass%0A%0A def y(self):%0A self.a, self.b = (1, 2) # define a, b%0A%0A def z(self):%0A print self.z # method%0A print self.x # assigned%0A print self.a # unpacked%0A print self.w # unknown%0A print self.known # known is known from B%0A print self.value # from compiler.ast.Const%0A print self.goofy # defined in class scope%0A%0A goofy = x%0A %0A
|
|
a5e599f4a7c2f20c4f0ed79366db985cba7ae85e
|
Add template context debugging templatetag
|
pylab/website/templatetags/debug.py
|
pylab/website/templatetags/debug.py
|
Python
| 0
|
@@ -0,0 +1,183 @@
+from django import template%0A%0Aregister = template.Library()%0A%0A%0A@register.simple_tag(name='pdb', takes_context=True)%0Adef pdb(context, *args, **kwargs):%0A import ipdb; ipdb.set_trace()%0A
|
|
e7852c457da3cea0f8a20773cc3a355f559b845e
|
Update version to 1.7
|
src/dashboard/src/main/migrations/0047_version_number.py
|
src/dashboard/src/main/migrations/0047_version_number.py
|
Python
| 0
|
@@ -0,0 +1,531 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Adef data_migration(apps, schema_editor):%0A Agent = apps.get_model('main', 'Agent')%0A Agent.objects %5C%0A .filter(identifiertype='preservation system', name='Archivematica') %5C%0A .update(identifiervalue='Archivematica-1.7')%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('main', '0046_optional_normative_structmap'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(data_migration)%0A %5D%0A
|
|
a4a73ac2e5a15e53a0935987911c5905890bfab8
|
Add Overwatch command.
|
orchard/overwatch.py
|
orchard/overwatch.py
|
Python
| 0.000001
|
@@ -0,0 +1,2590 @@
+%22%22%22Get stats for Overwatch.%22%22%22%0A%0Afrom plumeria.command import commands, CommandError%0Afrom plumeria.command.parse import Word%0Afrom plumeria.message.lists import build_list%0Afrom plumeria.util import http%0Afrom plumeria.util.http import BadStatusCodeError%0Afrom plumeria.util.ratelimit import rate_limit%0A%0AGENERAL_STATS = (%0A ('Played', 'time_played', '%7B:.0f%7D'),%0A ('K/D', 'kpd', '%7B:.2f%7D'),%0A ('Dmg/t', 'all_damage_done_avg_per_10_min', '%7B:.0f%7D'),%0A ('Best Streak', 'kill_streak_best', '%7B:.0f%7D'),%0A ('Obj Time/g', 'objective_time_most_in_game', '%7B:.5f%7D'),%0A ('Most Dmg/g', 'all_damage_done_most_in_game', '%7B:.0f%7D'),%0A ('Most Heal/g', 'healing_done_most_in_game', '%7B:.0f%7D'),%0A ('Medals', 'medals', '%7B:.0f%7D'),%0A ('Gold Med', 'medals_gold', '%7B:.0f%7D'),%0A ('Won', 'games_won', '%7B:.0f%7D'),%0A)%0A%0A%0Adef generate_stats_from_keys(data, stats):%0A entries = %5B%5D%0A for label, key, format in stats:%0A if key in data:%0A entries.append(('%7B%7D: **' + format + '**').format(label, float(data%5Bkey%5D)))%0A return ' %7C '.join(entries)%0A%0A%0A@commands.create('overwatch', 'ow', cost=2, category='Games', params=%5BWord('battletag'), Word('region', fallback=None)%5D)%0A@rate_limit()%0Aasync def overwatch(message, battletag, region=None):%0A %22%22%22%0A Get someone's Overwatch stats.%0A%0A The name is case-sensitive.%0A%0A Example::%0A%0A /overwatch booo#0000%0A%0A %22%22%22%0A try:%0A r = await http.get(%22https://owapi.net/api/v3/u/%7Bname%7D/blob%22.format(name=battletag.replace(%22#%22, %22-%22)))%0A data = r.json()%0A except BadStatusCodeError as e:%0A if e.http_code == 404:%0A raise CommandError(%22Battletag '%7B%7D' not found. The name is CASE-SENSITIVE.%22.format(battletag))%0A raise%0A%0A regions = %5B%5D%0A for key, value in data.items():%0A if key%5B0%5D != %22_%22 and value:%0A regions.append(key)%0A%0A if not len(regions):%0A raise CommandError(%22Battle tag found but there are no stats for '%7B%7D'.%22.format(battletag))%0A if not region and len(regions) %3E 1:%0A raise CommandError(%22Please specify a region (one of %7B%7D) for '%7B%7D'.%22.format(', '.join(regions), battletag))%0A if region and region.lower() not in regions:%0A raise CommandError(%22Please specify a region in one of %7B%7D for '%7B%7D'.%22.format(', '.join(regions), battletag))%0A%0A if not region:%0A region = regions%5B0%5D%0A stats = data%5Bregion%5D%0A%0A lines = %5B%5D%0A for type, s in stats%5B'stats'%5D.items():%0A lines.append(%0A %22**%7B%7D**: %7B%7D%22.format(type.capitalize(), generate_stats_from_keys(s%5B'game_stats'%5D, GENERAL_STATS)))%0A return build_list(lines)%0A%0A%0Adef setup():%0A commands.add(overwatch)%0A
|
|
010dd0366ddb62e52f295ec1648c1bab38f9e437
|
move python wrappers to their own file
|
tremendous/api.py
|
tremendous/api.py
|
Python
| 0
|
@@ -0,0 +1,167 @@
+from tremendous.bindings import lib%0Afrom tremendous.bindings import ffi%0A%0Adef apply_format(color, body):%0A s = lib.apply_format(color, body)%0A return ffi.string(s)%0A
|
|
69905bbee59bb2f65e389ba3c57d8d80bcd55407
|
Fix typo in setup.py
|
scikits/learn/setup.py
|
scikits/learn/setup.py
|
from os.path import join
import warnings
import numpy
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
from numpy.distutils.system_info import get_info, BlasNotFoundError
config = Configuration('learn',parent_package,top_path)
config.add_subpackage('em')
config.add_subpackage('datasets')
config.add_subpackage('feature_selection')
config.add_subpackage('glm')
config.add_subpackage('manifold')
config.add_subpackage('utils')
config.add_extension('libsvm',
sources=[join('src', 'svm.cpp'),
join('src', 'libsvm.c'),
],
include_dirs=[numpy.get_include()],
depends=[join('src', 'svm.h'),
join('src', 'libsvm_helper.c'),
])
### liblinear module
blas_sources = [join('src', 'blas', 'daxpy.c'),
join('src', 'blas', 'ddot.c'),
join('src', 'blas', 'dnrm2.c'),
join('src', 'blas', 'dscal.c')]
liblinear_sources = [join('src', 'linear.cpp'),
join('src', 'liblinear.c'),
join('src', 'tron.cpp')]
# we try to link agains system-wide blas
blas_info = get_info('blas_opt')
if not blas_info:
warnings.warn(BlasNotFoundError.__doc__)
liblinear_souces.append(blas_sources)
config.add_extension('liblinear',
sources=liblinear_sources,
libraries = blas_info.pop('libraries', []),
include_dirs=['src',
numpy.get_include(),
blas_info.pop('include_dirs', [])],
depends=[join('src', 'linear.h'),
join('src', 'tron.h'),
join('src', 'blas', 'blas.h'),
join('src', 'blas', 'blasp.h')],
**blas_info)
## end liblinear module
config.add_extension('BallTree',
sources=[join('src', 'BallTree.cpp')],
include_dirs=[numpy.get_include()]
)
config.add_subpackage('utils')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Python
| 0.999224
|
@@ -1485,16 +1485,17 @@
near_sou
+r
ces.appe
|
19c5600486ea7bee68eb1098636b21757938d799
|
Add is_prime python
|
math/is_prime/python/is_prime.py
|
math/is_prime/python/is_prime.py
|
Python
| 0.999978
|
@@ -0,0 +1,351 @@
+import math%0A%0Adef is_prime(number):%0A if number %3C= 1: return False%0A if number == 2: return True%0A if (number %25 2) == 0: return False%0A%0A for i in range(3, int(math.sqrt(number)) +1,2):%0A if number %25 1 == 0: return False%0A return True%0A%0Anumber = input (%22Enter number :%22)%0Aif is_prime(number):%0A print(%22It is prime%22)%0Aelse:%0A print(%22It is not prime%22)%0A %0A%0A
|
|
582128f1061ab74da76d26a366bfd3c8fee8f007
|
Add scripts/fill_events.py to generate mock data
|
scripts/fill_events.py
|
scripts/fill_events.py
|
Python
| 0.000001
|
@@ -0,0 +1,507 @@
+#!/usr/bin/env python%0Aimport sys%0Aimport os%0A%0Asys.path.append(os.path.join(os.path.dirname('__file__'), '..', 'src'))%0A%0Afrom random import randint%0Afrom datetime import datetime, timedelta%0A%0Afrom logsandra.model.client import CassandraClient%0A%0A%0Aclient = CassandraClient('test', 'localhost', 9160, 3)%0A%0Atoday = datetime.now()%0Akeywords = %5B'foo', 'bar', 'baz'%5D%0Afor i in range(1000):%0A d = today + timedelta(randint(-7, -1), randint(-3600*24, 3600*24))%0A client.add_log(d, 'test entry', 'here', %5Bkeywords%5Bi %25 3%5D%5D)%0A
|
|
a102fb888b60454d7efbe26e4afb38a59c212769
|
Add script to delete spam users.
|
p3/management/commands/delete_spam_users.py
|
p3/management/commands/delete_spam_users.py
|
Python
| 0
|
@@ -0,0 +1,1760 @@
+# -*- coding: utf-8 -*-%0A%22%22%22 Delete users creating by spambots.%0A%0A%22%22%22%0A%0Aimport logging as log%0Afrom optparse import make_option%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.db import transaction%0Afrom assopy import models as amodels%0A%0A###%0A%0Aclass Command(BaseCommand):%0A%0A # Options%0A option_list = BaseCommand.option_list + (%0A make_option('--dry-run',%0A action='store_true',%0A dest='dry_run',%0A help='Do everything except delete users',%0A ),%0A )%0A%0A args = '%3Cconference%3E'%0A%0A # Dry run ?%0A dry_run = False%0A%0A @transaction.atomic%0A def handle(self, *args, **options):%0A%0A%09# Handle options %0A self.dry_run = options.get('dry_run', False)%0A%0A # Between June 1-4 2018, a Chinese spammer create 30k fake user%0A # accounts%0A spam_users = amodels.User.objects.filter(%0A user__first_name = '%E9%87%91%E8%AF%9A%E9%80%8138%E5%85%83',%0A )%0A print ('Found %25i (potential) spam users.' %25 len(spam_users))%0A %0A count = 0%0A for user in spam_users:%0A %0A # Filter out users with tickets%0A tickets = user.tickets()%0A if tickets:%0A print ('Spam user %25r has %25i tickets: skipping.' %25 (%0A user.user.get_username(), len(tickets)))%0A continue%0A %0A # Delete user and all related objects%0A if not self.dry_run:%0A user.delete()%0A count += 1%0A if count %25 1000 == 0:%0A print ('Deleted %25i spam users.' %25 count)%0A %0A if self.dry_run:%0A print ('Would have deleted %25i spam users.' %25 count)%0A else:%0A print ('Deleted %25i spam users.' %25 count)%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.