commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
56ee8843c355ffa56f7c2583d8d524e1ecfd29c3 | Create __init__.py | module/submodule/tests/__init__.py | module/submodule/tests/__init__.py | Python | 0.000429 | ||
2cae3441a0a1889ef19a3d120c48f9b30a36e787 | add tests for query and result post process | matgendb/tests/test_postfuncs.py | matgendb/tests/test_postfuncs.py |
import pymongo
from pymongo import MongoClient
import uuid
import unittest
import pprint
from matgendb.query_engine import QueryEngine, QueryResults
# Determine if MongoDB is up and usable
has_mongo = False
try:
conn = pymongo.MongoClient()
has_mongo = True
except:
pass
class SandboxTest(unittest.TestCase):
SBX = "testing"
N = 100
def qtx(self, crit, props):
if props==None:
props = {}
crit['sbxd.e_above_hull'] = crit['e_above_hull']
props['sbxd'] = {"$elemMatch": { "id": self.SBX } }
del crit['e_above_hull']
def rtx(self, doc):
doc['add_fake_field'] = 'test value'
for item in doc['sbxd']:
if item['id'] == self.SBX:
doc['e_above_hull'] = item['e_above_hull']
return doc
def setUp(self):
# Try a real mongodb
if has_mongo:
self.conn = pymongo.MongoClient()
self.db_name = 'test'
self.db = self.conn[self.db_name]
self.coll_name = 'sandboxes_{}'.format(uuid.uuid4())
self.coll = self.db[self.coll_name]
for i in range(self.N):
core_v, sbx_v = 0.1 * (i + 1), -0.1 * (i + 1)
doc = {'task_id': 'mp-{:d}'.format(1000 + i),
'sbxd': [{'id': 'core', 'e_above_hull': core_v},
{'id': self.SBX, 'e_above_hull': sbx_v}],
'sbxn': ['core', self.SBX]}
doc.update({'state': 'successful'})
if i < 2:
pprint.pprint(doc)
self.coll.insert(doc)
def tearDown(self):
if has_mongo:
self.db.drop_collection(self.coll_name)
@unittest.skipUnless(has_mongo, 'requires MongoDB server')
def test_no_post_funcs(self):
qe = QueryEngine(connection=self.conn,
database=self.db_name,
collection=self.coll_name,
aliases={},
query_post=[], result_post=[])
cursor = qe.query()
self.assertTrue(isinstance(cursor, QueryResults))
n = 0
for rec in cursor:
pprint.pprint("RESULT: {}".format(rec))
# No Post proccessing should be done
self.assertTrue('e_above_hull' not in rec)
self.assertTrue('add_fake_field' not in rec)
self.assertTrue('sbxd' in rec)
n += 1
# should find all tasks
self.failUnlessEqual(n, self.N)
@unittest.skipUnless(has_mongo, 'requires MongoDB server')
def test_mongo_find(self):
#
qe = QueryEngine(connection=self.conn,
database=self.db_name,
collection=self.coll_name,
aliases={},
query_post=[self.qtx], result_post=[self.rtx])
result = self._test_find(qe,
criteria={'e_above_hull': {'$lte': 0.0}},
properties={})
@unittest.skipUnless(has_mongo, 'requires MongoDB server')
def test_with_properties(self):
#
qe = QueryEngine(connection=self.conn,
database=self.db_name,
collection=self.coll_name,
aliases={},
query_post=[self.qtx], result_post=[self.rtx])
result = self._test_find(qe,
criteria={'e_above_hull': {'$lte': 0.0}},
properties=['e_above_hull', 'sbxd'])
def _test_find(self, qe, properties, criteria):
cursor = qe.query(properties=properties,
criteria=criteria)
self.assertTrue(isinstance(cursor, QueryResults))
n = 0
for rec in cursor:
pprint.pprint("RESULT: {}".format(rec))
self.failIf(rec['e_above_hull'] >= 0)
self.assertTrue('add_fake_field' in rec)
self.failIf(rec['add_fake_field'] != 'test value')
n += 1
# should find all tasks
self.failUnlessEqual(n, self.N)
@unittest.skipUnless(has_mongo, 'requires MongoDB server')
def test_queryresult(self):
qe = QueryEngine(connection=self.conn,
database=self.db_name,
collection=self.coll_name,
aliases={},
query_post=[self.qtx], result_post=[self.rtx])
result = qe.query(criteria={'e_above_hull': {'$lte': 0.0}}).sort('sbxd.e_above_hull', pymongo.ASCENDING)
self.assertTrue(isinstance(result, QueryResults))
self.assertEqual(len(result), self.N)
self.assertTrue(result[0]['e_above_hull'] < 0)
if __name__ == '__main__':
unittest.main() | Python | 0 | |
5e1d5644b2279b31191870b4a8099f3f6f31e851 | Enable admin for Project, Platform, Dataset | ncharts/admin.py | ncharts/admin.py | from django.contrib import admin
from ncharts.models import Project, Platform, Dataset
class ProjectAdmin(admin.ModelAdmin):
pass
class PlatformAdmin(admin.ModelAdmin):
pass
class DatasetAdmin(admin.ModelAdmin):
pass
admin.site.register(Project,ProjectAdmin)
admin.site.register(Platform,PlatformAdmin)
admin.site.register(Dataset,DatasetAdmin)
| Python | 0 | |
23808a3d65db23163969aeb08adaa29f6403e720 | Fix a test | test/lib/lint/policy/test_abstract_policy.py | test/lib/lint/policy/test_abstract_policy.py | import unittest
from lib.lint.policy.abstract_policy import AbstractPolicy
class ConcretePolicy(AbstractPolicy):
def __init__(self):
super().__init__()
self.description = 'Found something invalid'
self.reference = 'http://example.com'
self.level = 0
class TestAbstractPolicy(unittest.TestCase):
def test_listen_node_types(self):
policy = AbstractPolicy()
self.assertEqual(policy.listen_node_types(), [])
def test_create_violation_report(self):
pos = {
'col': 3,
'i': 24,
'lnum': 3,
}
node = {'pos': pos}
env = {'path': 'path/to/file.vim'}
expected_violation = {
'name': 'ConcretePolicy',
'level': 0,
'description': 'Found something invalid',
'reference': 'http://example.com',
'position': {
'column': 3,
'line': 3,
'path': 'path/to/file.vim',
},
}
policy = ConcretePolicy()
self.assertEqual(
policy.create_violation_report(node, env),
expected_violation)
if __name__ == '__main__':
unittest.main()
| import unittest
from lib.lint.policy.abstract_policy import AbstractPolicy
class ConcretePolicy(AbstractPolicy):
def __init__(self):
super().__init__()
self.description = 'Found something invalid'
self.reference = 'http://example.com'
self.level = 0
class TestAbstractPolicy(unittest.TestCase):
def test_listen_node_types(self):
policy = AbstractPolicy()
self.assertEqual(policy.listen_node_types(), [])
def test_create_violation_report(self):
pos = {
'col': 3,
'i': 24,
'lnum': 3,
}
env = {'path': 'path/to/file.vim'}
expected_violation = {
'name': 'ConcretePolicy',
'level': 0,
'description': 'Found something invalid',
'reference': 'http://example.com',
'path': 'path/to/file.vim',
'position': pos,
}
policy = ConcretePolicy()
self.assertEqual(
policy.create_violation_report(pos, env),
expected_violation)
if __name__ == '__main__':
unittest.main()
| Python | 0.999999 |
343a98d8a400467016a70e53d7e6d261bf9f75d8 | add regress.py | suite/regress.py | suite/regress.py | #!/usr/bin/env python
# Capstone Python bindings, by Nguyen Anh Quynnh <aquynh@gmail.com>
from __future__ import print_function
import sys
from capstone import *
all_tests = (
# arch, mode, syntax, address, hexcode, expected output
# issue 456 https://github.com/aquynh/capstone/issues/456
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_INTEL, 0xfc16, b"\xE8\x35\x64", "call 0x604e"),
(CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_INTEL, 0x9123fc1b, b"\x66\xE8\x35\x64", "call 0x6054"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_INTEL, 0x9123fc1b, b"\x66\xE8\x35\x64", "call 0x6054"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_INTEL, 0xfc26, b"\xE9\x35\x64", "jmp 0x605e"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_INTEL, 0xfff6, b"\x66\xE9\x35\x64\x93\x53", "jmp 0x53946431"),
(CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_INTEL, 0x9123fff1, b"\xE9\x35\x64\x93\x53", "jmp 0xe4b7642b"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_INTEL, 0x649123fff1, b"\xE9\x35\x64\x93\x53", "jmp 0x64e4b7642b"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_INTEL, 0xffe1, b"\x66\xe8\x35\x64\x93\x53", "call 0x5394641c"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_INTEL, 0x649123ffe1, b"\x66\xe8\x35\x64", "call 0x641a"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_INTEL, 0x649123ffe1, b"\x66\xe9\x35\x64", "jmp 0x641a"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_INTEL, 0xffe1, b"\x66\xe9\x35\x64\x93\x53", "jmp 0x5394641c"),
# AT&T syntax
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_ATT, 0xfc16, b"\xE8\x35\x64", "callw 0x604e"),
(CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT, 0x9123fc1b, b"\x66\xE8\x35\x64", "callw 0x6054"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT, 0x9123fc1b, b"\x66\xE8\x35\x64", "callw 0x6054"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_ATT, 0xfc26, b"\xE9\x35\x64", "jmp 0x605e"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_ATT, 0xfff6, b"\x66\xE9\x35\x64\x93\x53", "jmp 0x53946431"),
(CS_ARCH_X86, CS_MODE_32, CS_OPT_SYNTAX_ATT, 0x9123fff1, b"\xE9\x35\x64\x93\x53", "jmp 0xe4b7642b"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT, 0x649123fff1, b"\xE9\x35\x64\x93\x53", "jmp 0x64e4b7642b"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_ATT, 0xffe1, b"\x66\xe8\x35\x64\x93\x53", "calll 0x5394641c"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT, 0x649123ffe1, b"\x66\xe8\x35\x64", "callw 0x641a"),
(CS_ARCH_X86, CS_MODE_64, CS_OPT_SYNTAX_ATT, 0x649123ffe1, b"\x66\xe9\x35\x64", "jmp 0x641a"),
(CS_ARCH_X86, CS_MODE_16, CS_OPT_SYNTAX_ATT, 0xffe1, b"\x66\xe9\x35\x64\x93\x53", "jmp 0x5394641c"),
)
_python3 = sys.version_info.major == 3
def to_hex(s):
if _python3:
return " ".join("0x{0:02x}".format(c) for c in s) # <-- Python 3 is OK
else:
return " ".join("0x{0:02x}".format(ord(c)) for c in s)
def str_syntax(syntax):
slist = {
0: "",
CS_OPT_SYNTAX_INTEL: "intel",
CS_OPT_SYNTAX_ATT: "att",
}
return slist[syntax]
def str_arch_mode(a, m):
amlist = {
(CS_ARCH_X86, CS_MODE_16): "X86-16bit",
(CS_ARCH_X86, CS_MODE_32): "X86-32bit",
(CS_ARCH_X86, CS_MODE_64): "X86-64bit",
}
return amlist[(a, m)]
# ## Test cs_disasm_quick()
def test_regression():
for (arch, mode, syntax, address, code, expected_output) in all_tests:
print("%s %s: %s = " %(str_arch_mode(arch, mode), str_syntax(syntax), to_hex(code)), end=""),
md = Cs(arch, mode)
if syntax != 0:
md.syntax = syntax
insn = list(md.disasm(code, address))[0]
output = "%s %s" % (insn.mnemonic, insn.op_str)
print(output)
if output != expected_output:
print("\t --> ERROR: expected output = %s" %(expected_output))
print()
if __name__ == '__main__':
test_regression()
| Python | 0.000003 | |
e1c6f344e804f0d972dbc685b9492a126d74a7d3 | Create new management app | usingnamespace/management/__init__.py | usingnamespace/management/__init__.py | from pyramid.config import Configurator
from pyramid.session import SignedCookieSessionFactory
from pyramid.settings import asbool
from pyramid.wsgi import wsgiapp2
default_settings = (
('route_path', str, '/management'),
('domain', str, ''),
)
# Stolen from pyramid_debugtoolbar
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
name = '%s%s' % ('usingnamespace.management.', name)
value = convert(settings.get(name, default))
parsed[name] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
# Go parse the settings
settings = parse_settings(config.registry.settings)
# Update the config
config.registry.settings.update(settings)
# Create the application
application = make_application(config.registry.settings, config.registry)
# Add the API route
route_kw = {}
if config.registry.settings['usingnamespace.management.domain'] != '':
route_kw['is_management_domain'] = config.registry.settings['usingnamespace.management.domain']
config.add_route_predicate('is_management_domain', config.maybe_dotted('.predicates.route.Management'))
config.add_route('usingnamespace.management',
config.registry.settings['usingnamespace.management.route_path'] + '/*subpath',
**route_kw)
# Add the API view
config.add_view(wsgiapp2(application), route_name='usingnamespace.management')
def make_application(settings, parent_registry):
config = Configurator()
config.registry.settings.update(settings)
config.registry.parent_registry = parent_registry
config.include('pyramid_mako')
# Create the session factory, we are using the stock one
_session_factory = SignedCookieSessionFactory(
settings['pyramid.secret.session'],
httponly=True,
max_age=864000
)
config.set_session_factory(_session_factory)
config.include('..security')
config.add_static_view('static', 'usingnamespace:static/', cache_max_age=3600)
def is_management(request):
if request.matched_route is not None and request.matched_route.name == 'usingnamespace.management.main':
return True
return False
config.add_request_method(callable=is_management, name='is_management', reify=True)
config.add_subscriber_predicate('is_management', config.maybe_dotted('.predicates.subscriber.IsManagement'))
config.add_route('management',
'/*traverse',
factory='.traversal.Root',
use_global_views=False,
)
config.scan('.views')
config.scan('.subscribers')
return config.make_wsgi_app()
def main(global_config, **settings):
pass
| Python | 0.000001 | |
4fbb9ca1b055b040214c82dc307f69793947b800 | Add handler for syncing wallets to server | api/sync_wallet.py | api/sync_wallet.py | import urlparse
import os, sys
import json
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
from msc_apps import *
data_dir_root = os.environ.get('DATADIR')
def sync_wallet_response(request_dict):
if not request_dict.has_key('type'):
return (None, 'No field type in response dict '+str(request_dict))
print request_dict
req_type = request_dict['type'][0].upper()
if req_type == "SYNCWALLET":
response_data = syncWallets(request_dict['masterWallets'][0])
else:
return (None, req_type + ' is not supported')
response = { 'status': 'OK', 'data': response_data }
return (json.dumps(response), None)
def syncWallets(master_wallets_json):
master_wallets = json.loads(master_wallets_json)
print master_wallets
for wallet in master_wallets:
uuid = wallet['uuid']
filename = data_dir_root + '/wallets/' + uuid + '.json'
with open(filename, 'w') as f:
json.dump(wallet, f)
return "OK"
def sync_wallet_handler(environ, start_response):
return general_handler(environ, start_response, sync_wallet_response)
| Python | 0 | |
eefa26090a4ff8fc23908afa83c87c2d54568929 | add pager duty sample alert plugin, closes #249 | alerts/plugins/pagerDutyTriggerEvent.py | alerts/plugins/pagerDutyTriggerEvent.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Jeff Bryner jbryner@mozilla.com
import requests
import json
import os
import sys
from configlib import getConfig, OptionParser
class message(object):
def __init__(self):
'''
takes an incoming alert
and uses it to trigger an event using
the pager duty event api
'''
self.registration = ['bro']
self.priority = 2
# set my own conf file
# relative path to the rest index.py file
self.configfile = './plugins/pagerDutyTriggerEvent.conf'
self.options = None
if os.path.exists(self.configfile):
sys.stdout.write('found conf file {0}\n'.format(self.configfile))
self.initConfiguration()
def initConfiguration(self):
myparser = OptionParser()
# setup self.options by sending empty list [] to parse_args
(self.options, args) = myparser.parse_args([])
# fill self.options with plugin-specific options
# change this to your default zone for when it's not specified
self.options.serviceKey = getConfig('serviceKey', 'APIKEYHERE', self.configfile)
def onMessage(self, message):
# here is where you do something with the incoming alert message
if 'summary' in message.keys() :
print message['summary']
headers = {
'Content-type': 'application/json',
}
payload = json.dumps({
"service_key": "{0}".format(self.options.serviceKey),
"incident_key": "bro",
"event_type": "trigger",
"description": "{0}".format(message['summary']),
"client": "mozdef",
"client_url": "http://mozdef.rocks",
"details": message['events']
})
r = requests.post(
'https://events.pagerduty.com/generic/2010-04-15/create_event.json',
headers=headers,
data=payload,
)
print r.status_code
print r.text
# you can modify the message if needed
# plugins registered with lower (>2) priority
# will receive the message and can also act on it
# but even if not modified, you must return it
return message | Python | 0 | |
32ea116ff172da3e7f0eeb7d9dea6b9a0378be08 | Add persistance | persistance.py | persistance.py | import numpy as np
import os
from itertools import izip
T_FILE = "t.npy"
C_FILE = "c.npy"
K_FILE = "k.npy"
U_FILE = "u.npy"
FP_FILE = "fp.npy"
IER_FILE = "ier.npy"
MSG_FILE = "msg.txt"
def saveSplines(directory, splines):
((t, c, k), u), fp, ier, msg = splines[0]
tlst = []
clst = []
klst = []
ulst = []
fplst = []
ierlst = []
msglst = []
for ((t, c, k), u), fp, ier, msg in splines:
tlst.append(t)
clst.append(c)
klst.append(k)
ulst.append(u)
fplst.append(fp)
ierlst.append(ier)
msglst.append(msg + '\n')
tarr = np.array(tlst)
carr = np.array(clst)
karr = np.array(klst)
uarr = np.array(ulst)
fparr = np.array(fplst)
ierarr = np.array(ierlst)
np.save(os.path.join(directory, T_FILE), tarr)
np.save(os.path.join(directory, C_FILE), carr)
np.save(os.path.join(directory, K_FILE), karr)
np.save(os.path.join(directory, U_FILE), uarr)
np.save(os.path.join(directory, FP_FILE), fparr)
np.save(os.path.join(directory, IER_FILE), ierarr)
with open(os.path.join(directory, MSG_FILE), 'w') as f:
f.writelines(msglst)
def loadSplines(directory):
tarr = np.load(os.path.join(directory, T_FILE))
carr = np.load(os.path.join(directory, C_FILE))
karr = np.load(os.path.join(directory, K_FILE))
uarr = np.load(os.path.join(directory, U_FILE))
fparr = np.load(os.path.join(directory, FP_FILE))
ierarr = np.load(os.path.join(directory, IER_FILE))
with open(os.path.join(directory, MSG_FILE)) as f:
msglst = f.readlines()
return [(([t, c, k], u), fp, ier, msg) for t, c, k, u, fp, ier, msg in izip(tarr, carr, karr, uarr, fparr, ierarr, msglst)]
| Python | 0.998588 | |
a34318312199e6dab8ca3db92f247f0bda369e17 | Add missing testcase file | exercises/tests/testcase.py | exercises/tests/testcase.py | # This file is part of Workout Manager.
#
# Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.test import TestCase
class WorkoutManagerTestCase(TestCase):
fixtures = ['tests-user-data', 'test-exercises', ]
def user_login(self, user='admin'):
"""Login the user, by default as 'admin'
"""
self.client.login(username=user, password='%(user)s%(user)s' % {'user': user})
def user_logout(self):
"""Visit the logout page
"""
self.client.logout()
| Python | 0.000003 | |
b36ef2473c70ff16f4033f24ffc69a3b30b0ce26 | add edit-prowjobs.py | experiment/edit-prowjobs.py | experiment/edit-prowjobs.py | #!/usr/bin/env python3
# Copyright 2021 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Edit prowjobs en-masse by round-tripping them through ruamel.yaml
This is not intended for general usage, because:
- not all jobs can or should be edited
- many jobs have different formatting, and we're not at a point where
we can enforce formatting standards, so this is almost guaranteed
to introduce formatting change noise
- the idea is to manually edit this file with the specific edit to be
done, rather that developing a general purpose language to do this
"""
import re
import argparse
import glob
from os import path, walk
import ruamel.yaml
# Prow files that will be ignored
EXCLUDED_JOB_CONFIGS = [
# Ruamel won't be able to successfully dump fejta-bot-periodics
# See https://bitbucket.org/ruamel/yaml/issues/258/applying-json-patch-breaks-comment
"fejta-bot-periodics.yaml",
# generated.yaml is generated by generate_tests.py, and will be overwritten.
"generated.yaml",
]
# A hilariously large line length to ensure we never line-wrap
MAX_WIDTH = 2000000000
def setup_yaml():
# Setup the ruamel.yaml parser
yaml = ruamel.yaml.YAML(typ='rt')
yaml.preserve_quotes = True
# GoogleCloudPlatform/ - no yaml.indent
# bazelbuild/ - no yaml.indent
# cadvisor/ - no yaml.indent
# containerd/ - no yaml.indent
# image-pushing/ - yaml.indent(mapping=2, sequence=4, offset=2)
# kubernetes/ - yaml.indent(mapping=2) seems to cause the least change
# kubernetes-client - TBD
# kubernetes-csi - TBD
# kubernetes-sigs - TBD
yaml.indent(mapping=2, sequence=4, offset=2)
yaml.width = MAX_WIDTH
return yaml
def edit_job_config(yaml, prow_job_file_name):
with open(prow_job_file_name, "r") as job_fp:
prow_config = yaml.load(job_fp)
def should_edit(job):
return job["name"] == "a-specific-job-to-edit"
def edit(job):
return job
# For each presubmit, postsubmit, and periodic
# presubmits -> <any repository> -> [{name: prowjob}]
if "presubmits" in prow_config:
for _, jobs in prow_config["presubmits"].items():
for job in jobs:
if should_edit(job):
edit(job)
# postsubmits -> <any repository> -> [{name: prowjob}]
if "postsubmits" in prow_config:
for _, jobs in prow_config["postsubmits"].items():
for job in jobs:
if should_edit(job):
edit(job)
# periodics -> [{name: prowjob}]
if "periodics" in prow_config:
for job in prow_config["periodics"]:
if should_edit(job):
edit(job)
# Dump ProwConfig to prowJobFile
with open(prow_job_file_name, "w") as job_fp:
yaml.dump(prow_config, job_fp)
job_fp.truncate()
def main(prow_job_dir):
yaml = setup_yaml()
for f in glob.glob(f'{prow_job_dir}/**/*.yaml', recursive=True):
if path.basename(f) not in EXCLUDED_JOB_CONFIGS:
try:
print(f'editing {f}')
edit_job_config(yaml, f)
except:
print(f'ERROR: could not edit {f}')
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Does things to prowjob configs')
PARSER.add_argument(
'--prow-job-dir',
default='../config/jobs',
help='Path to Prow Job Directory')
ARGS = PARSER.parse_args()
main(ARGS.prow_job_dir)
| Python | 0 | |
ac8c78682e77d77be44910c36057e0217477b0a4 | Test OAI endpoint model | tests/test_models/test_oai_endpoint.py | tests/test_models/test_oai_endpoint.py | from django.test import TestCase
from core.models import OAIEndpoint
class OAIEndpointTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.attributes = {
'name': 'Test OAI Endpoint',
'endpoint': 'http://oai.example.com',
'verb': 'ListRecords',
'metadataPrefix': 'mods',
'scope_type': 'setList',
'scope_value': 'someset, anotherset'
}
cls.oai_endpoint = OAIEndpoint(**cls.attributes)
cls.oai_endpoint.save()
def test_str(self):
self.assertEqual('OAI endpoint: Test OAI Endpoint', format(OAIEndpointTestCase.oai_endpoint))
def test_as_dict(self):
as_dict = OAIEndpointTestCase.oai_endpoint.as_dict()
for k, v in OAIEndpointTestCase.attributes.items():
self.assertEqual(as_dict[k], v) | Python | 0 | |
613a0056e12a28232542aaf561831d276868e413 | Add parametric map generator, good for wrinkles | programs/kinbody-creator/openraveMapGenerator.py | programs/kinbody-creator/openraveMapGenerator.py | #!/usr/bin/python
#import lxml.etree
#import lxml.builder
from lxml import etree
#E = lxml.builder.ElementMaker()
#KINBODY=E.KinBody
#BODY=E.Body
#GEOM=E.Geom
#EXTENTS=E.Extents
#TRANSLATION=E.Translation
#DIFUSSECOLOR=E.diffuseColor
# User variables
nX = 3
nY = 2
boxHeight = 1.0
resolution = 2.0 # Just to make similar to MATLAB [pixel/meter]
meterPerPixel = 1 / resolution # [meter/pixel]
# Program
Ez = boxHeight / 2.0 # Box size is actually double the extent
Ex = meterPerPixel / 2.0
Ey = meterPerPixel / 2.0
KinBody = etree.Element("KinBody", name="map")
for iY in range(nY):
# print "iY:",iY
for iX in range(nX):
# print "* iX:",iX
#-- Add E___ to each to force begin at 0,0,0 (centered by default)
x = Ex + iX*meterPerPixel
y = Ey + iY*meterPerPixel
z = Ez # Add this to raise to floor level (centered by default)
Number = iX + (iY * nX)
#Create pixel
Body = etree.SubElement(KinBody, "Body", name="square"+str(Number), type="static")
Geom = etree.SubElement(Body, "Geom", type="box")
Extents = etree.SubElement(Geom, "Extents").text= str(Ex)+" "+ str(Ey)+" "+str(Ez)
Translation = etree.SubElement(Geom, "Translation").text= str(x)+" "+str(y)+" "+str(z)
DifusseColor = etree.SubElement(Geom, "diffuseColor").text= ".5 .5 .5"
'''
the_doc = KINBODY(
BODY(
GEOM(
EXTENTS("0.001 0.115 0.065"),
TRANSLATION("0.6 "+ "-0.8 0.32"),
DIFUSSECOLOR(".5 .5 .5"),
type="box",
),
name="square"+str(i), type="static"
),
name="wall",
)
'''
myStr = etree.tostring(KinBody, pretty_print=True)
outFile = open('map.kinbody.xml', 'w')
outFile.write(myStr)
outFile.close()
| Python | 0 | |
668c28fd55daa93e0024e14e7137f78919e93e2c | Add python client script | docs/basement_weather.py | docs/basement_weather.py | #!/usr/bin/python
import sys
import commands
import Adafruit_DHT
import twitter
import requests
import json
date = commands.getoutput('TZ=":Canada/Atlantic" date')
#Get temp and humidity
humidity, temperature = Adafruit_DHT.read_retry(11, 4)
message = 'Temp: {0:0.1f} C Humidity: {1:0.1f} %'.format(temperature, humidity)
#send to basementweather API
url = 'https://basementweather.herokuapp.com/readings.json'
payload = {'temperature': '{0:0.1f}'.format(temperature), 'humidity': '{0:0.1f}'.format(humidity)}
headers = {'content-type': 'application/json'}
r = requests.post(url, data=json.dumps(payload), headers=headers)
#send to twitter
api = twitter.Api(consumer_key="QeT4mgIqGqAi6y7sKEgkcR8HQ",
consumer_secret="zM2dFpIk3YojKBdlZOwTCC82tEP3RxffLZG6MQJQwTBeckG8Pk",
access_token_key="771330006068830209-4QTn99ThbM6V2DT0hxlNymQOLykbmMM",
access_token_secret="akoFlkoNgov5aDJrmkCJTtSqQgvs2Q2Phl0rjVgVjh9Zi")
status = api.PostUpdate(message+" "+date)
print "%s just posted: %s" % (status.user.name, status.text) | Python | 0.000003 | |
5a221296e9e7cc59e4fe4c85b178db06c1376f13 | Add product streamfield migrations | demo/apps/catalogue/migrations/0012_auto_20160617_1115.py | demo/apps/catalogue/migrations/0012_auto_20160617_1115.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0011_auto_20160616_1335'),
]
operations = [
migrations.AddField(
model_name='category',
name='body',
field=wagtail.wagtailcore.fields.StreamField([(b'heading', wagtail.wagtailcore.blocks.CharBlock(classname=b'full title')), (b'paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), (b'image', wagtail.wagtailimages.blocks.ImageChooserBlock())]),
preserve_default=False,
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=255, verbose_name='Name', db_index=True),
),
]
| Python | 0 | |
a9b35aff92c099aa52ce9e1ca1cb0df169a54ef5 | Add author to header. | publisher/writer.py | publisher/writer.py | __all__ = ['writer']
import docutils.core as dc
import docutils.writers
from docutils import nodes
from docutils.writers.latex2e import (Writer, LaTeXTranslator,
PreambleCmds)
class Translator(LaTeXTranslator):
def __init__(self, *args, **kwargs):
LaTeXTranslator.__init__(self, *args, **kwargs)
# Handle author declarations
current_field = ''
author_names = []
author_institutions = []
author_emails = []
def visit_docinfo(self, node):
pass
def depart_docinfo(self, node):
pass
def visit_author(self, node):
self.author_names.append(self.encode(node.astext()))
raise nodes.SkipNode
def depart_author(self, node):
pass
def visit_classifier(self, node):
pass
def depart_classifier(self, node):
pass
def visit_field_name(self, node):
self.current_field = node.astext()
raise nodes.SkipNode
def visit_field_body(self, node):
text = self.encode(node.astext())
if self.current_field == 'email':
self.author_emails.append(text)
elif self.current_field == 'institution':
self.author_institutions.append(text)
self.current_field = ''
raise nodes.SkipNode
def depart_field_body(self, node):
raise nodes.SkipNode
def depart_document(self, node):
LaTeXTranslator.depart_document(self, node)
doc_title = '\\title{Test 1 2 3}'
doc_title += '\\author{%s}' % ', '.join(self.author_names)
doc_title += '\\maketitle'
self.body_pre_docinfo = [doc_title]
writer = Writer()
writer.translator_class = Translator
| __all__ = ['writer']
import docutils.core as dc
import docutils.writers
from docutils import nodes
from docutils.writers.latex2e import (Writer, LaTeXTranslator,
PreambleCmds)
class Translator(LaTeXTranslator):
def __init__(self, *args, **kwargs):
LaTeXTranslator.__init__(self, *args, **kwargs)
# Handle author declarations
current_field = ''
def visit_docinfo(self, node):
pass
def depart_docinfo(self, node):
pass
def visit_author(self, node):
self.author_stack.append([self.encode(node.astext())])
raise nodes.SkipNode
def depart_author(self, node):
pass
def visit_classifier(self, node):
pass
def depart_classifier(self, node):
pass
def visit_field_name(self, node):
self.current_field = node.astext()
raise nodes.SkipNode
def visit_field_body(self, node):
if self.current_field == 'email':
pass
elif self.current_field == 'institution':
institute = '\\thanks{%s}' % self.encode(node.astext())
self.author_stack[-1].append(institute)
self.current_field = ''
raise nodes.SkipNode
def depart_field_body(self, node):
raise nodes.SkipNode
def depart_document(self, node):
LaTeXTranslator.depart_document(self, node)
doc_title = r'\title{Test 1 2 3}\author{Me}\maketitle'
self.body_pre_docinfo = [doc_title]
writer = Writer()
writer.translator_class = Translator
| Python | 0 |
6fb3c87f0f9b238eab71df6880568005e3a2b461 | add DuckDuckGo Instant Answer plugin | plugins/ddg.py | plugins/ddg.py | # coding=utf-8
import json
import urllib.request
@yui.threaded
@yui.command('duckduckgo', 'ddg')
def ddg(argv):
'''Returns the Instant Answer for a given query. Usage: ddg -lang <query>'''
lang = 'en_US'
if len(argv) < 1:
return
# check if a language was given
argv = argv[1:]
if len(argv) > 1 and argv[0].startswith('-'):
lang = argv[0][1:]
argv = argv[1:]
q = urllib.request.quote(' '.join(argv).encode('utf-8'))
url = f'https://api.duckduckgo.com/?q={q}&format=json&no_html=1&skip_disambig=1&no_redirect=1'
h = { 'Accept-Language' : lang }
req = urllib.request.Request(url, headers=h)
with urllib.request.urlopen(req) as r:
js = json.loads(r.read().decode('utf-8'))
Type = js.get('Type')
AbstractText = js.get('AbstractText')
AbstractURL = js.get('AbstractURL')
Heading = js.get('Heading')
Answer = js.get('Answer')
Redirect = js.get('Redirect')
reply = 'No results.'
if Type == 'D' or Type == 'C': # disambiguation or category
reply = f'{Heading}: {AbstractURL}'
elif Type == 'A': # article
reply = f'{Heading}: {AbstractText} - {AbstractURL}'
elif Type == 'E': # exclusive, e.g. calc/conversion and redirects
if type(Answer) is str and Answer != '':
reply = Answer
elif type(Answer) is dict and 'result' in Answer:
reply = Answer['result']
elif Redirect != '':
reply = f'Redirect: {Redirect}'
return reply
| Python | 0.000002 | |
e8576b67f8a3778c43a85e24d1cbc0e1985fe2ca | Add float.is_integer() example | trypython/basic/builtinfunc/float01.py | trypython/basic/builtinfunc/float01.py | """
組み込みクラス float のサンプルです.
float.is_integer() について
"""
from trypython.common.commoncls import SampleBase
from trypython.common.commonfunc import pr
class Sample(SampleBase):
def exec(self):
num = 1.00
pr('type(num)', type(num))
pr('is_integer', num.is_integer()) # ==> True (整数に出来るので)
pr('int()', int(num))
num = 1.05
pr('is_integer', num.is_integer()) # ==> False (整数に出来ないので)
pr('int()', int(num))
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| Python | 0.002001 | |
3b41e3bcc721d47ce5a7314af2ac4d9006598164 | add word class | pythonfiles/word.py | pythonfiles/word.py |
#Comtains models for storing words and data returned from parsing websites
#
class word(object):
def __init__(self,name=None,part_of_speech=None,definition=None,context=None,subwords=None,count=None,rating=None):
self.name=name
self.part_of_speech=part_of_speech
self.definition=definition
self.context=context
self.subwords=subwords
self.count=count
self.rating=rating
def __str__(self):
return self.name
def get_name(self):
return self.name
def get_part_of_speech(self):
return self.part_of_speech
def get_definition(self):
return self.definition
def get_context(self):
return self.context
def get_forms(self):
return self.forms
def get_count(self):
return self.count
def get_rating(self):
return self.rating
def set_count(self,value):
self.count = value
def incr_count(self,value):
self.count += value
| Python | 0.006846 | |
4cedd5cf992c180da7d778a9a8adc225a7d8c496 | test various odd characters in the filenames, including some UTF-8 | lib/test_filenames.py | lib/test_filenames.py | from smashbox.utilities import *
from smashbox.utilities.hash_files import count_files
__doc__ = """ Test various characters in the file names.
bug #104648: add UTF-8 escaping in PROPFIND response body (https://savannah.cern.ch/bugs/?104648)
Notes:
- unescaped % characters in propfind response crashes csync
"""
filesizeKB = int(config.get('filenames_filesizeKB',1))
# see: mirall/csync/src/csync_exclude.c
charsets_excluded_from_sync = {
'backslash' : '\\',
'colon' : ':',
'questionmark' : '?',
'asterisk' : '*',
'doublequote' : '"',
'greater' : '>',
'smaller' : '<',
'pipe' : '|'
}
@add_worker
def creator(step):
reset_owncloud_account()
reset_rundir()
step(1,'create initial content and sync')
d = make_workdir()
namepatterns = [
"space1 testfile.dat",
"space2testfile .dat",
" space3testfile .dat",
"space4testfile.dat ",
"space5testfile. dat",
" space6 testfile . dat ",
" "
]
charsets = { 'space' : ' ',
'plus' : '+',
'underscore' : '_',
'moscicki' : '\xc5\x9b', # some UTF-8 unicode character...
'singlequote' : "'"
}
charsets.update(charsets_excluded_from_sync)
filenames = []
for c in charsets:
for n in namepatterns:
nn = n.replace('space', "_"+c+"_").replace(' ',charsets[c])
#print nn
filenames.append(nn)
createfile(os.path.join(d,nn),'1',count=filesizeKB,bs=1000)
# generic charsets -- let's take a hammer and test (almost) all ANSI characters
# we don't test for the foward slash
char_range = range(32,47)+range(58,65)+range(91,97)+range(123,127)
#char_range.remove(37) #to see the tests to complition temporarily remove this character as it crashes csync
#char_range=[]
for i in char_range:
for n in namepatterns:
nn = n.replace('space','_chr'+str(i)+'_').replace(' ',chr(i))
if nn == '.': # skip this
continue
filenames.append(nn)
createfile(os.path.join(d,nn),'1',count=filesizeKB,bs=1000)
files_1 = os.listdir(d)
N = count_files(d)
shared = reflection.getSharedObject()
shared['files_1'] = files_1
shared['N'] = N
for i in range(3): # 2 is enough but 3 is better ;-)
list_files(d)
run_ocsync(d)
error_check(count_files(d) == N, "some files lost!")
files_2 = os.listdir(d)
for fn in set(files_1)-set(files_2):
error_check(False, "the file has disappeared: %s"%repr(fn))
@add_worker
def propagator(step):
step(2,'check propagation of files')
d = make_workdir()
shared = reflection.getSharedObject()
files_1 = shared['files_1']
# take the original file list produced by creator and remove all file names containing characters excluded from sync
expected_files = [fn for fn in files_1 if not any((c in charsets_excluded_from_sync.values()) for c in fn) ]
logger.info("expected %d files to be propagated (excluding the ones with unsyncable characters %s)",len(expected_files),repr(charsets_excluded_from_sync.values()))
run_ocsync(d)
N2 = count_files(d)
files_3 = os.listdir(d)
for fn in set(expected_files)-set(files_3):
error_check(False, "the file has not been propagated: %s"%repr(fn))
| Python | 0.000315 | |
83f6febfc051c8d7d3462e9a60f4eae036fa0529 | guessing the number genetic algorithm | numberGuesser.py | numberGuesser.py | import random
import time
def go(lower_bound, upper_bound, bots):
iterations = 1
target = random.randint(lower_bound, upper_bound)
while True:
print(f"Target number: {target}")
answers = [random.randint(lower_bound, upper_bound)
for _ in range(bots)]
scores = [answer - target for answer in answers]
curr_low = -upper_bound
curr_high = upper_bound
for score in scores:
answer = answers[scores.index(score)]
if score == 0:
print(
f"Answer found: {answers[scores.index(score)]}\nIterations required: {iterations} ")
return answer
if score < 0 and answer > lower_bound:
lower_bound = answer
print(f"Shifting lower bound to {lower_bound}")
elif score > 0 and answer < upper_bound:
upper_bound = answer
print(f"Shifting upper bound to {upper_bound}")
time.sleep(0.5)
print(f"Searching in new range {lower_bound} to {upper_bound}")
iterations += 1
if __name__ == "__main__":
go(1, 100, 5)
| Python | 0.999916 | |
6ac6202837cade41b1dad5af9a474be171255993 | Check that Constant is given Moments instance, not class | bayespy/inference/vmp/nodes/constant.py | bayespy/inference/vmp/nodes/constant.py | ################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
from .node import Node, Moments
class Constant(Node):
r"""
Node for presenting constant values.
The node wraps arrays into proper node type.
"""
def __init__(self, moments, x, **kwargs):
if not isinstance(moments, Moments) and issubclass(moments, Moments):
raise ValueError("Give moments as an object instance instead of a class")
self._moments = moments
x = np.asanyarray(x)
# Compute moments
self.u = self._moments.compute_fixed_moments(x)
# Dimensions of the moments
dims = self._moments.compute_dims_from_values(x)
# Resolve plates
D = len(dims[0])
if D > 0:
plates = np.shape(self.u[0])[:-D]
else:
plates = np.shape(self.u[0])
# Parent constructor
super().__init__(dims=dims, plates=plates, **kwargs)
def _get_id_list(self):
"""
Returns the stochastic ID list.
This method is used to check that same stochastic nodes are not direct
parents of a node several times. It is only valid if there are
intermediate stochastic nodes.
To put it another way: each ID corresponds to one factor q(..) in the
posterior approximation. Different IDs mean different factors, thus they
mean independence. The parents must have independent factors.
Stochastic nodes should return their unique ID. Deterministic nodes
should return the IDs of their parents. Constant nodes should return
empty list of IDs.
"""
return []
def get_moments(self):
return self.u
def set_value(self, x):
x = np.asanyarray(x)
shapes = [np.shape(ui) for ui in self.u]
self.u = self._moments.compute_fixed_moments(x)
for (i, shape) in enumerate(shapes):
if np.shape(self.u[i]) != shape:
raise ValueError("Incorrect shape for the array")
def lower_bound_contribution(self, gradient=False, **kwargs):
# Deterministic functions are delta distributions so the lower bound
# contribuion is zero.
return 0
| ################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
from .node import Node
class Constant(Node):
r"""
Node for presenting constant values.
The node wraps arrays into proper node type.
"""
def __init__(self, moments, x, **kwargs):
self._moments = moments
x = np.asanyarray(x)
# Compute moments
self.u = self._moments.compute_fixed_moments(x)
# Dimensions of the moments
dims = self._moments.compute_dims_from_values(x)
# Resolve plates
D = len(dims[0])
if D > 0:
plates = np.shape(self.u[0])[:-D]
else:
plates = np.shape(self.u[0])
# Parent constructor
super().__init__(dims=dims, plates=plates, **kwargs)
def _get_id_list(self):
"""
Returns the stochastic ID list.
This method is used to check that same stochastic nodes are not direct
parents of a node several times. It is only valid if there are
intermediate stochastic nodes.
To put it another way: each ID corresponds to one factor q(..) in the
posterior approximation. Different IDs mean different factors, thus they
mean independence. The parents must have independent factors.
Stochastic nodes should return their unique ID. Deterministic nodes
should return the IDs of their parents. Constant nodes should return
empty list of IDs.
"""
return []
def get_moments(self):
return self.u
def set_value(self, x):
x = np.asanyarray(x)
shapes = [np.shape(ui) for ui in self.u]
self.u = self._moments.compute_fixed_moments(x)
for (i, shape) in enumerate(shapes):
if np.shape(self.u[i]) != shape:
raise ValueError("Incorrect shape for the array")
def lower_bound_contribution(self, gradient=False, **kwargs):
# Deterministic functions are delta distributions so the lower bound
# contribuion is zero.
return 0
| Python | 0.998847 |
435004cebce00510db3bf36ae21b2cbf37020f32 | add petpvc specs file | nipype/interfaces/tests/test_auto_PETPVC.py | nipype/interfaces/tests/test_auto_PETPVC.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ...testing import assert_equal
from ..petpvc import PETPVC
def test_PETPVC_inputs():
input_map = dict(alpha=dict(argstr='-a %.4f',
),
args=dict(argstr='%s',
),
debug=dict(argstr='-d',
usedefault=True,
),
environ=dict(nohash=True,
usedefault=True,
),
fwhm_x=dict(argstr='-x %.4f',
mandatory=True,
),
fwhm_y=dict(argstr='-y %.4f',
mandatory=True,
),
fwhm_z=dict(argstr='-z %.4f',
mandatory=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-i %s',
mandatory=True,
),
mask_file=dict(argstr='-m %s',
mandatory=True,
),
n_deconv=dict(argstr='-k %d',
),
n_iter=dict(argstr='-n %d',
),
out_file=dict(argstr='-o %s',
genfile=True,
hash_files=False,
),
pvc=dict(argstr='-p %s',
mandatory=True,
),
stop_crit=dict(argstr='-a %.4f',
),
terminal_output=dict(nohash=True,
),
)
inputs = PETPVC.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_PETPVC_outputs():
output_map = dict(out_file=dict(),
)
outputs = PETPVC.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| Python | 0 | |
99e531ec0e86d7c1d34de154cc49584821f85904 | Make all metavars uppercased | powerline/commands/config.py | powerline/commands/config.py | # vim:fileencoding=utf-8:noet
from __future__ import (division, absolute_import, print_function)
import argparse
import powerline.bindings.config as config
class StrFunction(object):
def __init__(self, function, name=None):
self.name = name or function.__name__
self.function = function
def __call__(self, *args, **kwargs):
self.function(*args, **kwargs)
def __str__(self):
return self.name
TMUX_ACTIONS = {
'source': StrFunction(config.source_tmux_files, 'source'),
}
SHELL_ACTIONS = {
'command': StrFunction(config.shell_command, 'command'),
'uses': StrFunction(config.uses),
}
class ConfigArgParser(argparse.ArgumentParser):
def parse_args(self, *args, **kwargs):
ret = super(ConfigArgParser, self).parse_args(*args, **kwargs)
if not hasattr(ret, 'function'):
# In Python-3* `powerline-config` (without arguments) raises
# AttributeError. I have not found any standard way to display same
# error message as in Python-2*.
self.error('too few arguments')
return ret
def get_argparser(ArgumentParser=ConfigArgParser):
parser = ArgumentParser(description='Script used to obtain powerline configuration.')
subparsers = parser.add_subparsers()
tmux_parser = subparsers.add_parser('tmux', help='Tmux-specific commands')
tmux_parser.add_argument(
'function',
choices=tuple(TMUX_ACTIONS.values()),
metavar='ACTION',
type=(lambda v: TMUX_ACTIONS.get(v)),
help='If action is `source\' then version-specific tmux configuration files are sourced.'
)
shell_parser = subparsers.add_parser('shell', help='Shell-specific commands')
shell_parser.add_argument(
'function',
choices=tuple(SHELL_ACTIONS.values()),
type=(lambda v: SHELL_ACTIONS.get(v)),
metavar='ACTION',
help='If action is `command\' then preferred powerline command is output, if it is `uses\' then powerline-config script will exit with 1 if specified component is disabled and 0 otherwise.',
)
shell_parser.add_argument(
'component',
nargs='?',
choices=('tmux', 'prompt'),
metavar='COMPONENT',
)
shell_parser.add_argument(
'-s', '--shell',
metavar='SHELL',
help='Shell for which query is run',
)
return parser
| # vim:fileencoding=utf-8:noet
from __future__ import (division, absolute_import, print_function)
import argparse
import powerline.bindings.config as config
class StrFunction(object):
def __init__(self, function, name=None):
self.name = name or function.__name__
self.function = function
def __call__(self, *args, **kwargs):
self.function(*args, **kwargs)
def __str__(self):
return self.name
TMUX_ACTIONS = {
'source': StrFunction(config.source_tmux_files, 'source'),
}
SHELL_ACTIONS = {
'command': StrFunction(config.shell_command, 'command'),
'uses': StrFunction(config.uses),
}
class ConfigArgParser(argparse.ArgumentParser):
def parse_args(self, *args, **kwargs):
ret = super(ConfigArgParser, self).parse_args(*args, **kwargs)
if not hasattr(ret, 'function'):
# In Python-3* `powerline-config` (without arguments) raises
# AttributeError. I have not found any standard way to display same
# error message as in Python-2*.
self.error('too few arguments')
return ret
def get_argparser(ArgumentParser=ConfigArgParser):
parser = ArgumentParser(description='Script used to obtain powerline configuration.')
subparsers = parser.add_subparsers()
tmux_parser = subparsers.add_parser('tmux', help='Tmux-specific commands')
tmux_parser.add_argument(
'function',
choices=tuple(TMUX_ACTIONS.values()),
metavar='action',
type=(lambda v: TMUX_ACTIONS.get(v)),
help='If action is `source\' then version-specific tmux configuration files are sourced.'
)
shell_parser = subparsers.add_parser('shell', help='Shell-specific commands')
shell_parser.add_argument(
'function',
choices=tuple(SHELL_ACTIONS.values()),
type=(lambda v: SHELL_ACTIONS.get(v)),
metavar='action',
help='If action is `command\' then preferred powerline command is output, if it is `uses\' then powerline-config script will exit with 1 if specified component is disabled and 0 otherwise.',
)
shell_parser.add_argument(
'component',
nargs='?',
choices=('tmux', 'prompt'),
metavar='component',
)
shell_parser.add_argument(
'-s', '--shell',
metavar='SHELL',
help='Shell for which query is run',
)
return parser
| Python | 0.999999 |
c94653382a5a8aa2ec9af94f3aaa02ed5cae9b4c | Create test_send_sms.py | test_send_sms.py | test_send_sms.py | import serial
from curses import ascii
# since we need ascii code from CTRL-Z
import time
# here we are testing sending an SMS via virtual serial port ttyUSB0 that was created by a USB serial modem
phonenumber = #enter phone number to send SMS to e.g. "+441234123123"
SMS = "here's your SMS!"
ser = serial.Serial('/dev/ttyUSB0', 460800, timeout=1)
# 460800 is baud rate, ttyUSB0 is virtual serial port we are sending to
ser.write("AT\r\n")
# send AT to the ttyUSB0 virtual serial port
line = ser.readline()
print(line)
# what did we get back from AT command? Should be OK
ser.write("AT+CMGF=1\r\n")
# send AT+CMGF=1 so setting up for SMS followed by CR
line = ser.readline()
print(line)
# what did we get back from that AT command?
ser.write('AT+CMGS="%s"\r\n' %phonenumber)
# send AT+CMGS then CR, then phonenumber variable
ser.write(SMS)
# send the SMS variable after we sent the CR
ser.write(ascii.ctrl('z'))
# send a CTRL-Z after the SMS variable using ascii library
time.sleep(10)
# wait 10 seconds
print ser.readline()
print ser.readline()
print ser.readline()
print ser.readline()
# what did we get back after we tried AT_CMGS=phonenumber followed
# by <CR> , then SMS variable, then <CTRL-Z> ascii code??
| Python | 0.000002 | |
5e723223b5206e6e92a6140f184d41f1abfdbb3d | Create candy-crush.py | Python/candy-crush.py | Python/candy-crush.py | # Time: O((R * C)^2)
# Space: O(1)
# This question is about implementing a basic elimination algorithm for Candy Crush.
#
# Given a 2D integer array board representing the grid of candy,
# different positive integers board[i][j] represent different types of candies.
# A value of board[i][j] = 0 represents that the cell at position (i, j) is empty.
# The given board represents the state of the game following the player's move.
# Now, you need to restore the board to a stable state by crushing candies according to the following rules:
#
# If three or more candies of the same type are adjacent vertically or horizontally,
# "crush" them all at the same time - these positions become empty.
#
# After crushing all candies simultaneously,
# if an empty space on the board has candies on top of itself,
# then these candies will drop until they hit a candy or bottom at the same time.
# (No new candies will drop outside the top boundary.)
#
# After the above steps, there may exist more candies that can be crushed.
# If so, you need to repeat the above steps.
# If there does not exist more candies that can be crushed (ie. the board is stable),
# then return the current board.
# You need to perform the above rules until the board becomes stable, then return the current board.
#
# Example 1:
# Input:
# board =
# [[110,5,112,113,114],[210,211,5,213,214],[310,311,3,313,314],[410,411,412,5,414],[5,1,512,3,3],[610,4,1,613,614],[710,1,2,713,714],[810,1,2,1,1],[1,1,2,2,2],[4,1,4,4,1014]]
# Output:
# [[0,0,0,0,0],[0,0,0,0,0],[0,0,0,0,0],[110,0,0,0,114],[210,0,0,0,214],[310,0,0,113,314],[410,0,0,213,414],[610,211,112,313,614],[710,311,412,613,714],[810,411,512,713,1014]]
# Note:
# The length of board will be in the range [3, 50].
# The length of board[i] will be in the range [3, 50].
# Each board[i][j] will initially start as an integer in the range [1, 2000].
class Solution(object):
def candyCrush(self, board):
"""
:type board: List[List[int]]
:rtype: List[List[int]]
"""
R, C = len(board), len(board[0])
changed = True
while changed:
changed = False
for r in xrange(R):
for c in xrange(C-2):
if abs(board[r][c]) == abs(board[r][c+1]) == abs(board[r][c+2]) != 0:
board[r][c] = board[r][c+1] = board[r][c+2] = -abs(board[r][c])
changed = True
for r in xrange(R-2):
for c in xrange(C):
if abs(board[r][c]) == abs(board[r+1][c]) == abs(board[r+2][c]) != 0:
board[r][c] = board[r+1][c] = board[r+2][c] = -abs(board[r][c])
changed = True
for c in xrange(C):
i = R-1
for r in reversed(xrange(R)):
if board[r][c] > 0:
board[i][c] = board[r][c]
i -= 1
for r in reversed(xrange(i+1)):
board[r][c] = 0
return board
| Python | 0.9993 | |
c4f0d10e2112a391f716f58ffa449a490549fac6 | find kth smallest | Python/kthsmallest.py | Python/kthsmallest.py | '''
Find the k-th smallest element in a union of two given sorted arrays,
a and b
'''
# O(k) implementation
def findkth(a, b, k):
i = 0
j = 0
found = 1
while i < len(a) and j< len(b):
if a[i] < b[j]:
smaller = a[i]
i += 1
else:
smaller = b[j]
j += 1
kth_smallest = smaller
if found == k:
return kth_smallest
else:
found += 1
if i < len(a):
for num in a[i:]:
kth_smallest = num
if found == k:
return kth_smallest
else:
found += 1
if j < len(b):
for num in b[j:]:
kth_smallest = num
if found == k:
return kth_smallest
else:
found += 1
assert findkth([1, 2, 4], [3, 7, 11], 3) == 3
assert findkth([1, 2, 4, 5], [3, 7, 11], 5) == 5
assert findkth([1, 2], [3, 7, 11], 4) == 7
assert findkth([1, 2], [3, 7, 11], 5) == 11
assert findkth([1, 2], [-1, 7, 11], 1) == -1
| Python | 0.999975 | |
1dad9de4cf94d7f321176cf5b339b1a68f4ac6d2 | Add template to LanguageAdmin; #551 | judge/admin/runtime.py | judge/admin/runtime.py | from django.db.models import TextField
from django.forms import TextInput, ModelForm, ModelMultipleChoiceField
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from reversion.admin import VersionAdmin
from judge.models import Problem
from judge.widgets import AdminPagedownWidget, HeavySelect2MultipleWidget
class LanguageForm(ModelForm):
problems = ModelMultipleChoiceField(
label=_('Disallowed problems'),
queryset=Problem.objects.all(),
required=False,
help_text=_('These problems are NOT allowed to be submitted in this language'),
widget=HeavySelect2MultipleWidget(data_view='problem_select2'))
class LanguageAdmin(VersionAdmin):
fields = ('key', 'name', 'short_name', 'common_name', 'ace', 'pygments', 'info', 'description', 'template', 'problems')
list_display = ('key', 'name', 'common_name', 'info')
form = LanguageForm
if AdminPagedownWidget is not None:
formfield_overrides = {
TextField: {'widget': AdminPagedownWidget},
}
def save_model(self, request, obj, form, change):
super(LanguageAdmin, self).save_model(request, obj, form, change)
obj.problem_set = Problem.objects.exclude(id__in=form.cleaned_data['problems'].values('id'))
def get_form(self, request, obj=None, **kwargs):
self.form.base_fields['problems'].initial = \
Problem.objects.exclude(id__in=obj.problem_set.values('id')).values_list('pk', flat=True) if obj else []
return super(LanguageAdmin, self).get_form(request, obj, **kwargs)
class GenerateKeyTextInput(TextInput):
def render(self, name, value, attrs=None):
text = super(TextInput, self).render(name, value, attrs)
return mark_safe(text + format_html(
'''\
<a href="#" onclick="return false;" class="button" id="id_{0}_regen">Regenerate</a>
<script type="text/javascript">
(function ($) {{
$(document).ready(function () {{
$('#id_{0}_regen').click(function () {{
var length = 100,
charset = "abcdefghijklnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789`~!@#$%^&*()_+-=|[]{{}};:,<>./?",
key = "";
for (var i = 0, n = charset.length; i < length; ++i) {{
key += charset.charAt(Math.floor(Math.random() * n));
}}
$('#id_{0}').val(key);
}});
}});
}})(django.jQuery);
</script>
''', name))
class JudgeAdminForm(ModelForm):
class Meta:
widgets = {'auth_key': GenerateKeyTextInput}
if AdminPagedownWidget is not None:
widgets['description'] = AdminPagedownWidget
class JudgeAdmin(VersionAdmin):
form = JudgeAdminForm
readonly_fields = ('created', 'online', 'start_time', 'ping', 'load', 'last_ip', 'runtimes', 'problems')
fieldsets = (
(None, {'fields': ('name', 'auth_key')}),
(_('Description'), {'fields': ('description',)}),
(_('Information'), {'fields': ('created', 'online', 'last_ip', 'start_time', 'ping', 'load')}),
(_('Capabilities'), {'fields': ('runtimes', 'problems')}),
)
list_display = ('name', 'online', 'start_time', 'ping', 'load', 'last_ip')
ordering = ['-online', 'name']
def get_readonly_fields(self, request, obj=None):
if obj is not None and obj.online:
return self.readonly_fields + ('name',)
return self.readonly_fields
def has_delete_permission(self, request, obj=None):
result = super(JudgeAdmin, self).has_delete_permission(request, obj)
if result and obj is not None:
return not obj.online
return result
if AdminPagedownWidget is not None:
formfield_overrides = {
TextField: {'widget': AdminPagedownWidget},
}
| from django.db.models import TextField
from django.forms import TextInput, ModelForm, ModelMultipleChoiceField
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from reversion.admin import VersionAdmin
from judge.models import Problem
from judge.widgets import AdminPagedownWidget, HeavySelect2MultipleWidget
class LanguageForm(ModelForm):
problems = ModelMultipleChoiceField(
label=_('Disallowed problems'),
queryset=Problem.objects.all(),
required=False,
help_text=_('These problems are NOT allowed to be submitted in this language'),
widget=HeavySelect2MultipleWidget(data_view='problem_select2'))
class LanguageAdmin(VersionAdmin):
fields = ('key', 'name', 'short_name', 'common_name', 'ace', 'pygments', 'info', 'description', 'problems')
list_display = ('key', 'name', 'common_name', 'info')
form = LanguageForm
if AdminPagedownWidget is not None:
formfield_overrides = {
TextField: {'widget': AdminPagedownWidget},
}
def save_model(self, request, obj, form, change):
super(LanguageAdmin, self).save_model(request, obj, form, change)
obj.problem_set = Problem.objects.exclude(id__in=form.cleaned_data['problems'].values('id'))
def get_form(self, request, obj=None, **kwargs):
self.form.base_fields['problems'].initial = \
Problem.objects.exclude(id__in=obj.problem_set.values('id')).values_list('pk', flat=True) if obj else []
return super(LanguageAdmin, self).get_form(request, obj, **kwargs)
class GenerateKeyTextInput(TextInput):
def render(self, name, value, attrs=None):
text = super(TextInput, self).render(name, value, attrs)
return mark_safe(text + format_html(
'''\
<a href="#" onclick="return false;" class="button" id="id_{0}_regen">Regenerate</a>
<script type="text/javascript">
(function ($) {{
$(document).ready(function () {{
$('#id_{0}_regen').click(function () {{
var length = 100,
charset = "abcdefghijklnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789`~!@#$%^&*()_+-=|[]{{}};:,<>./?",
key = "";
for (var i = 0, n = charset.length; i < length; ++i) {{
key += charset.charAt(Math.floor(Math.random() * n));
}}
$('#id_{0}').val(key);
}});
}});
}})(django.jQuery);
</script>
''', name))
class JudgeAdminForm(ModelForm):
class Meta:
widgets = {'auth_key': GenerateKeyTextInput}
if AdminPagedownWidget is not None:
widgets['description'] = AdminPagedownWidget
class JudgeAdmin(VersionAdmin):
form = JudgeAdminForm
readonly_fields = ('created', 'online', 'start_time', 'ping', 'load', 'last_ip', 'runtimes', 'problems')
fieldsets = (
(None, {'fields': ('name', 'auth_key')}),
(_('Description'), {'fields': ('description',)}),
(_('Information'), {'fields': ('created', 'online', 'last_ip', 'start_time', 'ping', 'load')}),
(_('Capabilities'), {'fields': ('runtimes', 'problems')}),
)
list_display = ('name', 'online', 'start_time', 'ping', 'load', 'last_ip')
ordering = ['-online', 'name']
def get_readonly_fields(self, request, obj=None):
if obj is not None and obj.online:
return self.readonly_fields + ('name',)
return self.readonly_fields
def has_delete_permission(self, request, obj=None):
result = super(JudgeAdmin, self).has_delete_permission(request, obj)
if result and obj is not None:
return not obj.online
return result
if AdminPagedownWidget is not None:
formfield_overrides = {
TextField: {'widget': AdminPagedownWidget},
}
| Python | 0 |
e262d176ecd7d8871a9e06ebc542cf473acf0925 | Add migration for transnational weights | reports/migrations/0004_transnational_weights.py | reports/migrations/0004_transnational_weights.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django_countries import countries
def populate_weights(apps, schema_editor):
Weights = apps.get_model("reports", "Weights")
db_alias = schema_editor.connection.alias
for item in COUNTRY_WEIGHTS:
country = item['Country']
item.pop('Country')
for media_type, weight in item.iteritems():
w = Weights.objects.using(db_alias).create(
country=country,
media_type=media_type,
weight=weight)
w.save()
def backwards(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('reports', '0003_indonesia-weights'),
]
operations = [
migrations.RunPython(
populate_weights,
backwards,
),
]
COUNTRY_WEIGHTS= [
{'Country': 'T1',
'Internet': '1',
'Print': '1',
'Radio': '1',
'Television': '1',
'Twitter': '1'}]
| Python | 0.000016 | |
6ff99aa939bf07d18595507efeca6ada7fc267a5 | add a test thing for wilk | tools/ast-env.py | tools/ast-env.py | #!/usr/bin/env python
if __name__ == '__main__':
import os
import sys
import copy
import json
sys.path.append(os.path.dirname(__file__))
import logging
logging.basicConfig(level=logging.DEBUG)
import roush.db.database
from roush.db import api as db_api
from roush.db.database import init_db
from sqlalchemy.orm import sessionmaker, create_session, scoped_session
from sqlalchemy.ext.declarative import declarative_base
from roushclient.client import RoushEndpoint
from roush.webapp.ast import FilterBuilder, FilterTokenizer
from roush.webapp.solver import Solver
ep = RoushEndpoint()
init_db('sqlite:///roush.db')
db_session = scoped_session(lambda: create_session(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
##########################
ast_logger = logging.getLogger('roush.webapp.ast')
ast_logger.setLevel(logging.WARNING)
expr1 = 'facts.woof = "goober"'
expr2 = 'facts.arf = "woof"'
api = db_api.api_from_models()
solver = Solver(api, 4, ['facts.ostype="hi"'])
solved, requires_input, plan = solver.solve()
print 'Solver plan: %s' % plan
solver_from_plan = Solver.from_plan(api, 4,
['facts.ostype="hi"'],
plan)
new_plan = solver_from_plan.plan()
print 'Solver plan: %s' % new_plan
print 'plans identical: %s' % new_plan == plan
print plan
print new_plan
print json.dumps(solver_from_plan.adventure(), sort_keys=True, indent=4)
# foo = FilterBuilder(FilterTokenizer(),
# 'nodes: "test" in union(facts.blah, "test")')
# root_node = foo.build()
# print 'expression: %s' % root_node.to_s()
# print 'inverted: %s' % root_node.invert()
| Python | 0.000003 | |
b0577ce3b8b162ce3702430b189905f9beaae8d5 | Add script to clean up all FD phone and fax numbers. | firecares/firestation/management/commands/cleanup_phonenumbers.py | firecares/firestation/management/commands/cleanup_phonenumbers.py | from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from phonenumber_field.modelfields import PhoneNumber
import re
"""
This command is for cleaning up every phone and fax number in the
database. It removes all non-numeric characters, such as parenthesis,
hyphens, spaces, etc. It also removes prefixed 1s These numbers should
be made human-readable on the client side.
"""
def cleanNumber(no1):
no2 = re.sub('[^0-9]','', no1)
if no2.startswith("1"):
no2 = no2[1:]
return no2
class Command(BaseCommand):
def handle(self, *args, **kwargs):
print("Don't worry, it always takes this long.")
for fd in FireDepartment.objects.all():
# If the FD has a phone number, clean it up
if fd.headquarters_phone and not fd.headquarters_phone.raw_input == "Invalid Input":
newPhone = cleanNumber(fd.headquarters_phone.raw_input)
print(newPhone)
fd.headquarters_phone = newPhone
# If the FD has a fax number, clean it up
if fd.headquarters_fax and not fd.headquarters_fax.raw_input == "Invalid Input":
newFax = cleanNumber(fd.headquarters_fax.raw_input)
print(newFax)
fd.headquarters_fax = newFax
# Save and continue to the next FD (if any)
fd.save()
print("Completed successfully!")
| Python | 0 | |
370fa8682b09de76af36129817e412270c8206c8 | add a new mgmt command for data analysis: compute the intersections of every pair of boundaries | boundaries/management/commands/compute_intersections.py | boundaries/management/commands/compute_intersections.py | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.gis.geos import MultiPolygon, Polygon
from boundaries.models import BoundarySet, Boundary
class Command(BaseCommand):
help = 'Create a report of the area of intersection of every pair of boundaries from two boundary sets specified by their slug.'
args = 'boundaryset1 boundaryset1'
def handle(self, *args, **options):
if len(args) < 2:
print "Specify two boundaryset slugs."
return
bset_a = BoundarySet.objects.get(slug=args[0])
bset_b = BoundarySet.objects.get(slug=args[1])
print bset_a.slug, "area_1", bset_b.slug, "area_2", "area_intersection", "pct_of_1", "pct_of_2"
# For each boundary in the first set...
for a_slug in bset_a.boundaries.order_by("slug").values_list('slug', flat=True):
a_bdry = bset_a.boundaries.get(slug=a_slug)
a_area = a_bdry.shape.area
# Find each intersecting boundary in the second set...
for b_bdry in bset_b.boundaries\
.filter(shape__intersects=a_bdry.shape):
geometry = a_bdry.shape.intersection(b_bdry.shape)
int_area = geometry.area
if geometry.empty: continue
b_area = b_bdry.shape.area
# Skip overlaps that are less than .1% of the area of either of the shapes.
# These are probably not true overlaps.
if int_area/a_area < .001 or int_area/b_area < .001:
continue
print a_slug, a_area, b_bdry.slug, b_area, int_area, int_area/a_area, int_area/b_area
| Python | 0.000158 | |
66306f46417e8b0caa6b4e6d13d72e5ae6b226ef | include builds | corehq/apps/app_manager/management/commands/migrate_to_conditional_case_update.py | corehq/apps/app_manager/management/commands/migrate_to_conditional_case_update.py | from datetime import datetime
import traceback
from corehq.apps.app_manager.dbaccessors import wrap_app
from corehq.apps.app_manager.management.commands.helpers import AppMigrationCommandBase
from corehq.apps.domain.models import Domain
import random
def get_new_case_update_json(name_path):
return {
'question_path': name_path,
'update_mode': 'always'
}
class Command(AppMigrationCommandBase):
help = """
One-time migration to transition form action models to use ConditionalCaseUpdate as part of the new
"save only if edited" feature: https://github.com/dimagi/commcare-hq/pull/30910.
"""
include_linked_apps = True
include_builds = True
chunk_size = 5
DOMAIN_LIST_FILENAME = "migrate_to_cond_case_update_cmd_domain_list.txt"
DOMAIN_PROGRESS_NUMBER_FILENAME = "migrate_to_cond_case_update_cmd_domain_progress.txt"
APP_WRAPPING_ERRORS_LOG = "migrate_to_cond_case_update_wrapping_errors.txt"
def add_arguments(self, parser):
super().add_arguments(parser)
# Used for a dry run on 1000 domains to get a taste of how long a full migration would take.
parser.add_argument(
'--num-domains-test',
action='store',
default=None,
help='''For a dry run, use this argument to test on X number of domains. Dry run flag must be
included and domain flag cannot be included.''',
)
def _has_been_migrated(self, app_doc):
for module in app_doc['modules']:
for form in module['forms']:
if module['module_type'] == "basic":
actions = form.get('actions', '')
if actions:
open_case_action = actions.get('open_case', '')
if open_case_action:
if (open_case_action.get('name_update', '')
and not open_case_action.get('name_path', '')):
return True
if open_case_action.get('name_path', ''):
return False
elif module['module_type'] == "advanced":
for form in module['forms']:
if form['form_type'] == 'advanced_form':
actions = form.get('actions', '')
if actions:
open_case_action = actions.get('open_cases', '')[0] \
if actions.get('open_cases', '') else None
if open_case_action:
if (open_case_action.get('name_update', '')
and not open_case_action.get('name_path', '')):
return True
if open_case_action.get('name_path', ''):
return False
# Catch-all; if it's all surveys or something else strange, migrate it by default
return False
def migrate_app(self, app_doc):
if self._has_been_migrated(app_doc):
return None
else:
try:
wrapped_app = wrap_app(app_doc)
return wrapped_app
except Exception as e:
print(e)
self.log_error(app_doc)
return None
@property
def num_domains_test(self):
return self.options.get('num_domains_test', None)
def get_domains(self):
if self.is_dry_run and self.num_domains_test:
print(self.num_domains_test)
all_domain_names = Domain.get_all_names()
random.shuffle(all_domain_names)
return all_domain_names[:int(self.num_domains_test)]
else:
return Domain.get_all_names()
def log_error(self, app_doc):
with open(self.APP_WRAPPING_ERRORS_LOG, 'a') as f:
error_string = (f"{datetime.now()}\nOn domain: {app_doc['domain']}, "
f"App ID: {app_doc['_id']}\n{traceback.format_exc().strip()}\n")
f.write(error_string)
| from datetime import datetime
import traceback
from corehq.apps.app_manager.dbaccessors import wrap_app
from corehq.apps.app_manager.management.commands.helpers import AppMigrationCommandBase
from corehq.apps.domain.models import Domain
import random
def get_new_case_update_json(name_path):
return {
'question_path': name_path,
'update_mode': 'always'
}
class Command(AppMigrationCommandBase):
help = """
One-time migration to transition form action models to use ConditionalCaseUpdate as part of the new
"save only if edited" feature: https://github.com/dimagi/commcare-hq/pull/30910.
"""
include_linked_apps = True
include_builds = False
chunk_size = 5
DOMAIN_LIST_FILENAME = "migrate_to_cond_case_update_cmd_domain_list.txt"
DOMAIN_PROGRESS_NUMBER_FILENAME = "migrate_to_cond_case_update_cmd_domain_progress.txt"
APP_WRAPPING_ERRORS_LOG = "migrate_to_cond_case_update_wrapping_errors.txt"
def add_arguments(self, parser):
super().add_arguments(parser)
# Used for a dry run on 1000 domains to get a taste of how long a full migration would take.
parser.add_argument(
'--num-domains-test',
action='store',
default=None,
help='''For a dry run, use this argument to test on X number of domains. Dry run flag must be
included and domain flag cannot be included.''',
)
def _has_been_migrated(self, app_doc):
for module in app_doc['modules']:
for form in module['forms']:
if module['module_type'] == "basic":
actions = form.get('actions', '')
if actions:
open_case_action = actions.get('open_case', '')
if open_case_action:
if (open_case_action.get('name_update', '')
and not open_case_action.get('name_path', '')):
return True
if open_case_action.get('name_path', ''):
return False
elif module['module_type'] == "advanced":
for form in module['forms']:
if form['form_type'] == 'advanced_form':
actions = form.get('actions', '')
if actions:
open_case_action = actions.get('open_cases', '')[0] \
if actions.get('open_cases', '') else None
if open_case_action:
if (open_case_action.get('name_update', '')
and not open_case_action.get('name_path', '')):
return True
if open_case_action.get('name_path', ''):
return False
# Catch-all; if it's all surveys or something else strange, migrate it by default
return False
def migrate_app(self, app_doc):
if self._has_been_migrated(app_doc):
return None
else:
try:
wrapped_app = wrap_app(app_doc)
return wrapped_app
except Exception as e:
print(e)
self.log_error(app_doc)
return None
@property
def num_domains_test(self):
return self.options.get('num_domains_test', None)
def get_domains(self):
if self.is_dry_run and self.num_domains_test:
print(self.num_domains_test)
all_domain_names = Domain.get_all_names()
random.shuffle(all_domain_names)
return all_domain_names[:int(self.num_domains_test)]
else:
return Domain.get_all_names()
def log_error(self, app_doc):
with open(self.APP_WRAPPING_ERRORS_LOG, 'a') as f:
error_string = (f"{datetime.now()}\nOn domain: {app_doc['domain']}, "
f"App ID: {app_doc['_id']}\n{traceback.format_exc().strip()}\n")
f.write(error_string)
| Python | 0 |
7a75174716ecbc0757c84f29ebbb3fd309521936 | add management command to fire all repeaters in a domain | corehq/apps/cleanup/management/commands/fire_repeaters.py | corehq/apps/cleanup/management/commands/fire_repeaters.py | import datetime
from django.core.management.base import BaseCommand, CommandError
from corehq.apps.receiverwrapper.models import RepeatRecord
from dimagi.utils.post import simple_post
class Command(BaseCommand):
args = '<domain>'
help = 'Fire all repeaters in a domain.'
def handle(self, *args, **options):
if len(args) == 1:
domain = args[0]
else:
raise CommandError('Usage: %s\n%s' % (self.args, self.help))
next_year = datetime.datetime.now() + datetime.timedelta(days=365)
records = RepeatRecord.all(domain=domain, due_before=next_year)
for record in records:
record.fire(post_fn=simple_post)
record.save()
print '{} {}'.format(record._id, 'successful' if record.succeeded else 'failed')
| Python | 0 | |
d5d3fcfb331c1486acbfb004705b94b1923a0db8 | Add code to dump features into libsvm file format | Codes/SuperEdge/SuperEdge/dump_libsvm.py | Codes/SuperEdge/SuperEdge/dump_libsvm.py | import numpy as np
from datetime import datetime
from sklearn.datasets import dump_svmlight_file
import os.path as path
def main():
cache_path = 'largecache/'
feat_name = 'feat.dat'
lbl_name = 'lbl.dat'
feat_len = 4224 #1088
now = datetime.now()
lbl_memmap = np.memmap(path.join(cache_path, lbl_name), dtype='uint8', mode='r')
feat_memmap = np.memmap(path.join(cache_path, feat_name), dtype='float32', mode='r', shape=(lbl_memmap.shape[0], feat_len))
print 'loading dataset took ', (datetime.now() - now)
now = datetime.now()
print 'starting dumping feature files to libsvm format'
dump_svmlight_file(feat_memmap, lbl_memmap, 'largecache/data.train.txt')
if __name__ == '__main__':
main() | Python | 0 | |
269f1b743583609ecdc7658e35073db9b985634c | rename http.py | examples/http_example.py | examples/http_example.py | from poyonga.client import Groonga
g = Groonga()
cmds = [("status", {}),
("log_level", {"level": "warning"}),
#("table_create", {"name": "Site", "flags": "TABLE_HASH_KEY"}),
("select", {"table": "Site"})]
for cmd, kwargs in cmds:
ret = g.call(cmd, **kwargs)
print(ret.status)
print(ret.body)
print("*" * 40)
| Python | 0.00223 | |
fef9b29a9072ce6bc184592c240558ba01fbeb6b | Copy to github | git-credential-libsecret.py | git-credential-libsecret.py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 Tim Hughes <thughes@thegoldfish.org>
#
# Distributed under terms of the MIT license.
"""
Handles storing and providing usernames and passwords to Git using libsecret.
"""
import os
import sys
if __name__ == '__main__':
githelper = __import__(os.path.splitext(os.path.basename(__file__))[0])
raise SystemExit(githelper.main(sys.argv))
import sys
import argparse
from urllib.parse import urlparse
import gi
gi.require_version('Secret', '1')
from gi.repository import Secret
GIT_CREDENTIALS_SCHEMA = Secret.Schema.new("org.timhughes.git.Credentials.",
Secret.SchemaFlags.NONE,
{
"protocol": Secret.SchemaAttributeType.STRING,
"host": Secret.SchemaAttributeType.STRING,
"path": Secret.SchemaAttributeType.STRING,
"username": Secret.SchemaAttributeType.STRING,
}
)
def main(argv):
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
parser_get = subparsers.add_parser('get', help='get help')
parser_get.set_defaults(func=get)
parser_store = subparsers.add_parser('store', help='store shelp')
parser_store.set_defaults(func=store)
parser_reject = subparsers.add_parser('reject', help='reject help')
parser_reject.set_defaults(func=reject)
args = parser.parse_args(argv[1:])
if hasattr(args, 'func'):
try:
args.func()
except KeyboardInterrupt:
print('Interrupted')
sys.exit(0)
def get_attributes():
attributes = {}
for line in sys.stdin:
key, var = line.partition("=")[::2]
if key == "\n":
break
if key in ['protocol','host','path','username','password','url']:
if key == 'url':
o = urlparse(var.strip())
if o.scheme:
attributes['protocol'] = o.scheme
if o.netloc:
attributes['host'] = o.netloc
if o.path:
attributes['path'] = o.path
if o.username:
attributes['username'] = o.username
if o.password:
attributes['password'] = o.password
else:
attributes[key.strip()] = var.strip()
if len(attributes) > 0:
return attributes
else:
return
def get():
attributes = get_attributes()
if 'password' in attributes:
del attributes['password']
password = Secret.password_lookup_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
None
)
if password:
secret_item = find_secret_item(attributes)
print('protocol=%s' % secret_item['protocol'])
print('host=%s' % secret_item['host'])
print('username=%s' % secret_item['username'])
print('password=%s' % secret_item['password'])
def store():
attributes = get_attributes()
if 'password' in attributes:
password = attributes['password']
del attributes['password']
else:
sys.exit(1)
Secret.password_store_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
Secret.COLLECTION_DEFAULT,
"%s://%s@%s" %(attributes['protocol'], attributes['username'], attributes['host'] ),
password,
None
)
def reject():
attributes = get_attributes()
if 'password' in attributes:
del attributes['password']
Secret.password_clear_sync(
GIT_CREDENTIALS_SCHEMA,
attributes,
None
)
def find_secret_item(attributes):
service = Secret.Service.get_sync(Secret.ServiceFlags.LOAD_COLLECTIONS)
collection = Secret.Collection.for_alias_sync(service,Secret.COLLECTION_DEFAULT,Secret.CollectionFlags.LOAD_ITEMS,None)
item = collection.search_sync(GIT_CREDENTIALS_SCHEMA,attributes,Secret.SearchFlags.LOAD_SECRETS,None)[0]
item.load_secret_sync()
ret_attributes = item.get_attributes()
ret_attributes['password'] = item.get_secret().get().decode('utf-8')
return ret_attributes
| Python | 0 | |
95b304d2f0a9dc851926506795310f96c3312682 | Add SQL example. | examples/sql_rockstar.py | examples/sql_rockstar.py | from RockStar import RockStar
sql_code = "SELECT 'Hello World!';"
rock_it_bro = RockStar(days=400, file_name='hello_world.sql', code=sql_code)
rock_it_bro.make_me_a_rockstar()
| Python | 0.000001 | |
d5cf05e40b638afbf12fd95cf721a22ad0f3281d | Create appointments.py | backend/appointments.py | backend/appointments.py | Python | 0.000001 | ||
b017c43c9bf46cd2e1fd7380904a1b022b6930ba | Create __init__.py | beprof/__init__.py | beprof/__init__.py | pass
| Python | 0.000006 | |
bb462e78221344d71a2605d4af042e5037db3e79 | add colorize.py script | colorize.py | colorize.py | import numpy as np
import os
import skimage.color as color
import matplotlib.pyplot as plt
import scipy.ndimage.interpolation as sni
import caffe
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='iColor: deep interactive colorization')
parser.add_argument('-img_in',dest='img_in',help='grayscale image to read in', type=str)
parser.add_argument('-img_out',dest='img_out',help='colorized image to save off', type=str)
parser.add_argument('--gpu', dest='gpu', help='gpu id', type=int, default=0)
parser.add_argument('--prototxt',dest='prototxt',help='prototxt filepath', type=str, default='./models/colorization_deploy_v2.prototxt')
parser.add_argument('--caffemodel',dest='caffemodel',help='caffemodel filepath', type=str, default='./models/colorization_release_v2.caffemodel')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
caffe.set_mode_gpu()
caffe.set_device(args.gpu)
# Select desired model
net = caffe.Net(args.prototxt, args.caffemodel, caffe.TEST)
(H_in,W_in) = net.blobs['data_l'].data.shape[2:] # get input shape
(H_out,W_out) = net.blobs['class8_ab'].data.shape[2:] # get output shape
pts_in_hull = np.load('./resources/pts_in_hull.npy') # load cluster centers
net.params['class8_ab'][0].data[:,:,0,0] = pts_in_hull.transpose((1,0)) # populate cluster centers as 1x1 convolution kernel
# print 'Annealed-Mean Parameters populated'
# load the original image
img_rgb = caffe.io.load_image(args.img_in)
img_lab = color.rgb2lab(img_rgb) # convert image to lab color space
img_l = img_lab[:,:,0] # pull out L channel
(H_orig,W_orig) = img_rgb.shape[:2] # original image size
# create grayscale version of image (just for displaying)
img_lab_bw = img_lab.copy()
img_lab_bw[:,:,1:] = 0
img_rgb_bw = color.lab2rgb(img_lab_bw)
# resize image to network input size
img_rs = caffe.io.resize_image(img_rgb,(H_in,W_in)) # resize image to network input size
img_lab_rs = color.rgb2lab(img_rs)
img_l_rs = img_lab_rs[:,:,0]
net.blobs['data_l'].data[0,0,:,:] = img_l_rs-50 # subtract 50 for mean-centering
net.forward() # run network
ab_dec = net.blobs['class8_ab'].data[0,:,:,:].transpose((1,2,0)) # this is our result
ab_dec_us = sni.zoom(ab_dec,(1.*H_orig/H_out,1.*W_orig/W_out,1)) # upsample to match size of original image L
img_lab_out = np.concatenate((img_l[:,:,np.newaxis],ab_dec_us),axis=2) # concatenate with original image L
img_rgb_out = (255*np.clip(color.lab2rgb(img_lab_out),0,1)).astype('uint8') # convert back to rgb
plt.imsave(args.img_out, img_rgb_out)
| Python | 0.000001 | |
753388550e4ea7a8b09ddb22189021be3585a5e5 | Clean PlexUpdate plugin | beetsplug/plexupdate.py | beetsplug/plexupdate.py | """Updates an Plex library whenever the beets library is changed.
Put something like the following in your config.yaml to configure:
plex:
host: localhost
port: 32400
"""
import requests
from urlparse import urljoin
import xml.etree.ElementTree as ET
from beets import config
from beets.plugins import BeetsPlugin
def get_music_section(host, port):
"""Getting the section key for the music library in Plex.
"""
api_endpoint = 'library/sections'
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request.
r = requests.get(url)
# Parse xml tree and extract music section key.
tree = ET.fromstring(r.text)
for child in tree.findall('Directory'):
if child.get('title') == 'Music':
return child.get('key')
def update_plex(host, port):
"""Sends request to the Plex api to start a library refresh.
"""
# Getting section key and build url.
section_key = get_music_section(host, port)
api_endpoint = 'library/sections/{0}/refresh'.format(section_key)
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request and returns requests object.
r = requests.get(url)
return r
class PlexUpdate(BeetsPlugin):
def __init__(self):
super(PlexUpdate, self).__init__()
# Adding defaults.
config['plex'].add({
u'host': u'localhost',
u'port': 32400})
self.register_listener('database_change', self.listen_for_db_change)
def listen_for_db_change(self, lib):
"""Listens for beets db change and register the update for the end"""
self.register_listener('cli_exit', self.update)
def update(self, lib):
"""When the client exists try to send refresh request to Plex server.
"""
self._log.info('Updating Plex library...')
# Try to send update request.
try:
update_plex(
config['plex']['host'].get(),
config['plex']['port'].get())
self._log.info('... started.')
except requests.exceptions.RequestException:
self._log.warning('Update failed.')
| """Updates an Plex library whenever the beets library is changed.
Put something like the following in your config.yaml to configure:
plex:
host: localhost
port: 32400
"""
import requests
from urlparse import urljoin
import xml.etree.ElementTree as ET
from beets import config
from beets.plugins import BeetsPlugin
# Global variable to detect if database is changed that the update
# is only run once before beets exists.
database_changed = False
def get_music_section(host, port):
"""Getting the section key for the music library in Plex.
"""
api_endpoint = 'library/sections'
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request.
r = requests.get(url)
# Parse xml tree and extract music section key.
tree = ET.fromstring(r.text)
for child in tree.findall('Directory'):
if child.get('title') == 'Music':
return child.get('key')
def update_plex(host, port):
"""Sends request to the Plex api to start a library refresh.
"""
# Getting section key and build url.
section_key = get_music_section(host, port)
api_endpoint = 'library/sections/{0}/refresh'.format(section_key)
url = urljoin('http://{0}:{1}'.format(host, port), api_endpoint)
# Sends request and returns requests object.
r = requests.get(url)
return r
class PlexUpdate(BeetsPlugin):
def __init__(self):
super(PlexUpdate, self).__init__()
# Adding defaults.
config['plex'].add({
u'host': u'localhost',
u'port': 32400})
@PlexUpdate.listen('database_change')
def listen_for_db_change(lib=None):
"""Listens for beets db change and set global database_changed
variable to True.
"""
global database_changed
database_changed = True
@PlexUpdate.listen('cli_exit')
def update(lib=None):
"""When the client exists and the database_changed variable is True
trying to send refresh request to Plex server.
"""
if database_changed:
print('Updating Plex library...')
# Try to send update request.
try:
update_plex(
config['plex']['host'].get(),
config['plex']['port'].get())
print('... started.')
except requests.exceptions.RequestException:
print('Update failed.')
| Python | 0 |
e5bdbfb075bf95b7834f2db8c7d6ffa69cb03bc0 | Create convnet_basic.py | Classifying_datasets/statoil/convnet_basic.py | Classifying_datasets/statoil/convnet_basic.py | adi
| Python | 0.000004 | |
6f3bb73dd387fd73022a20c3a45adf05213339cf | add new package (#20109) | var/spack/repos/builtin/packages/py-rosinstall-generator/package.py | var/spack/repos/builtin/packages/py-rosinstall-generator/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyRosinstallGenerator(PythonPackage):
"""A tool for generating rosinstall files."""
homepage = "https://wiki.ros.org/rosinstall_generator"
url = "https://pypi.io/packages/source/r/rosinstall-generator/rosinstall_generator-0.1.22.tar.gz"
version('0.1.22', sha256='22d22599cd3f08a1f77fb2b1d9464cc8062ede50752a75564d459fcf5447b8c5')
depends_on('py-catkin-pkg@0.1.28:', type=('build', 'run'))
depends_on('py-rosdistro@0.7.3:', type=('build', 'run'))
depends_on('py-rospkg', type=('build', 'run'))
depends_on('py-pyyaml', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
| Python | 0 | |
0cbb114a70bcbef266c6df776681d1c664d8bdae | Create superClock.py | superClock.py | superClock.py | #This is My Nest but it will slowly be converted to superClock!
import urllib
import urllib2
import sys
import json
import time
# Make sure your higher level directory has the JSON file called passwordFile.json
# The file should contain the information in the JSON format. See below for an example
# {"username": "email@somewhere.com", "password": "yourSuperSecretPassword!!!"}
# all temps from the Nest site are stored in degrees Celsius
fileData = open('../passwordFile.json')
usernameAndPassword = json.load(fileData)
#print "username:" + str(usernameAndPassword['username'])
#print "password:" + str(usernameAndPassword['password'])
def c_to_f(c):
return c * 9.0 / 5.0 + 32.0
class Nest:
def __init__(self, username, password, serial=None, index=0):
self.username = username
self.password = password
self.serial = serial
self.index = index
def loads(self, res):
if hasattr(json, "loads"):
res = json.loads(res)
else:
res = json.read(res)
return res
def login(self):
data = urllib.urlencode({"username": self.username, "password": self.password})
req = urllib2.Request("https://home.nest.com/user/login",
data,
{"user-agent":"Nest/1.1.0.10 CFNetwork/548.0.4"})
res = urllib2.urlopen(req).read()
res = self.loads(res)
self.transport_url = res["urls"]["transport_url"]
self.access_token = res["access_token"]
self.userid = res["userid"]
def get_status(self):
req = urllib2.Request(self.transport_url + "/v2/mobile/user." + self.userid,
headers={"user-agent":"Nest/1.1.0.10 CFNetwork/548.0.4",
"Authorization":"Basic " + self.access_token,
"X-nl-user-id": self.userid,
"X-nl-protocol-version": "1"})
res = urllib2.urlopen(req).read()
res = self.loads(res)
self.structure_id = res["structure"].keys()[0]
if (self.serial is None):
self.device_id = res["structure"][self.structure_id]["devices"][self.index]
self.serial = self.device_id.split(".")[1]
self.status = res
def show_status(self):
shared = self.status["shared"][self.serial]
device = self.status["device"][self.serial]
allvars = shared
allvars.update(device)
for k in sorted(allvars.keys()):
print k + "."*(32-len(k)) + ":", allvars[k]
# This assumes you have two Nest Thermostats. If you have more than 2, the number, index, after "None"
# below will increment accordingly. If you only have one, it should just be 0. You have to create an object
# for each nest thermostat. You could also specify the thermostats by serial number instead of the index.
print"My Nest Data"
n0 = Nest(usernameAndPassword['username'],usernameAndPassword['password'], None, 0) #Downstairs
n1 = Nest(usernameAndPassword['username'],usernameAndPassword['password'], None, 1) #Upstairs
print " Logging On"
n1.login()
n0.login()
print " Getting Status"
n1.get_status()
n0.get_status()
print""
print "Upstairs Temperature"
print c_to_f(n1.status["shared"][n1.serial]["current_temperature"])
print "Upstairs Humidity"
print n1.status["device"][n1.serial]["current_humidity"]
print ""
print "Downstairs Temperature"
print c_to_f(n0.status["shared"][n0.serial]["current_temperature"])
print "Downstairs Humidity"
print n0.status["device"][n0.serial]["current_humidity"]
| Python | 0.000024 | |
a2516d28c86fd23efcb893e59de42b33526bfe6f | Add a Python Tkinter example showing how to map a scale widget. | swig/tkgui.py | swig/tkgui.py | #!/usr/bin/env python
import Tkinter
import sys
import mapper
def on_gui_change(x):
# print 'on_gui_change',x,x.__class__
sig_out.update_scalar(int(x))
def on_mapper_change(sig, x):
# print 'on_mapper_change', x, x.__class__
w.set(int(x))
dev = mapper.device("tkgui", 9000)
sig_in = mapper.signal(1, "/signal0", None, 'f', on_mapper_change)
dev.register_input(sig_in)
sig_out = mapper.signal(1, "/signal0", None, 'f', lambda x: x)
dev.register_output(sig_out)
master = Tkinter.Tk()
master.title("libmapper Python GUI demo")
w = Tkinter.Scale(master, from_=0, to=100, label='signal0',
orient=Tkinter.HORIZONTAL, length=300,
command=on_gui_change)
w.pack()
def do_poll():
dev.poll(20)
master.after(5, do_poll)
do_poll()
master.mainloop()
| Python | 0 | |
99578401585435c08aed6f29e090fbde955423fd | Create good2d.py | rocksetta-examples/good2d.py | rocksetta-examples/good2d.py | import tensorflow as tf
import numpy as np
# Create 100 phony x, y data points in NumPy, y = x * 0.1 + 0.3
x_data = np.random.rand(100).astype("float32")
y_data = x_data * 0.1 + 0.3
# Try to find values for W and b that compute y_data = W * x_data + b
# (We know that W should be 0.1 and b 0.3, but Tensorflow will
# figure that out for us.)
W = tf.Variable(tf.random_uniform([1], -1.0, 1.0))
b = tf.Variable(tf.zeros([1]))
y = W * x_data + b
# Minimize the mean squared errors.
loss = tf.reduce_mean(tf.square(y - y_data))
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
# Before starting, initialize the variables. We will 'run' this first.
init = tf.initialize_all_variables()
# Launch the graph.
sess = tf.Session()
sess.run(init)
# Fit the line.
for step in xrange(201):
sess.run(train)
if step % 20 == 0:
print(step, sess.run(W), sess.run(b))
# Learns best fit is W: [0.1], b: [0.3]
| Python | 0.000004 | |
35e51f55adfbe8383bfb3d34688eb7aee89a8351 | add shuffle_by_year script, to toss messages into yearly folders | shuffle_by_year.py | shuffle_by_year.py | #!/usr/bin/env python
# Q&D script to sort mail into subfolders by year.
# Reduces the burden upon the filesystem gnomes.
DIRPATH = "/stor0/backups/imapbak/rtucker/Fastmail-rey_fmgirl_com"
import email
import mailbox
import imap2maildir
import sys
import time
import os
def main():
db = imap2maildir.open_sql_session(DIRPATH + "/.imap2maildir.sqlite")
mbox = mailbox.Maildir(DIRPATH, False)
try:
counter = 0
c = db.cursor()
for result in db.execute("select mailfile,folder from seenmessages where folder is null or folder = ''"):
key = result[0]
msg = mbox.get_message(key)
year = None
if 'Date' in msg:
ttup = email.utils.parsedate(msg['Date'])
if ttup:
year = ttup[0]
if year is None:
tstamp = msg.get_date()
year = time.gmtime(tstamp).tm_year
print(key + " has no valid Date header; going with " + str(year))
ybox = mbox.add_folder(str(year))
ybox.lock()
newkey = ybox.add(msg)
ybox.flush()
ybox.unlock()
c.execute("update seenmessages set mailfile = ?, folder = ? where mailfile = ?", (newkey, year, key))
mbox.lock()
mbox.discard(key)
mbox.flush()
mbox.unlock()
print("moved " + key + " to " + str(year) + "/" + newkey)
counter += 1
if counter % 25 == 0:
print("committing db")
db.commit()
sys.stdout.flush()
if os.path.exists(".STOP"):
print("stop requested")
os.unlink(".STOP")
break
finally:
mbox.unlock()
db.commit()
if __name__ == "__main__":
main()
| Python | 0 | |
7423de8d2222e81693fe380a6b4c8638a96a9db8 | Create urls.py | urls.py | urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from pysoldev import settings
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'pysoldev.views.home', name='home'),
url(r'^$', 'pysoldev.app.views.index', name='index'),
)
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
| Python | 0.000017 | |
0079676729fa8023bea93fcf03bb48c4ff24a495 | add partition | resource-4/combinatorics/integer-partitions/partition1.py | resource-4/combinatorics/integer-partitions/partition1.py | # counting partitions
def partition1(n,k=-1):
if (k == -1):
return sum([partition1(n,i) for i in range(1,n+1)])
if (n < k):
return 0
if((n==0) or (n==1)):
return 1
if((k==1) or (n==k)):
return 1
return sum([partition1(n-k,i) for i in range(1,min(k,n-k)+1)])
| Python | 0.000007 | |
a787ceea91abf1c0fbeb3b97e063d3ec1aa61b57 | Create implement-rand10-using-rand7.py | Python/implement-rand10-using-rand7.py | Python/implement-rand10-using-rand7.py | # Time: O(1)
# Space: O(1)
# Given a function rand7 which generates a uniform random integer in the range 1 to 7,
# write a function rand10 which generates a uniform random integer in the range 1 to 10.
#
# Do NOT use system's Math.random().
#
# Example 1:
#
# Input: 1
# Output: [7]
# Example 2:
#
# Input: 2
# Output: [8,4]
# Example 3:
#
# Input: 3
# Output: [8,1,10]
#
# Note:
#
# rand7 is predefined.
# Each testcase has one argument: n, the number of times that rand10 is called.
#
# Follow up:
# - What is the expected value for the number of calls to rand7() function?
# - Could you minimize the number of calls to rand7()?
#
# The rand7() API is already defined for you.
# def rand7():
# @return a random integer in the range 1 to 7
class Solution(object):
def rand10(self):
"""
:rtype: int
"""
while True:
x = (rand7()-1)*7 + (rand7()-1)
if x < 40:
return x%10 + 1
| Python | 0.000022 | |
b659dd572bd92e10bde8899540792bdb26529a45 | add qdb_test | scripts/qdb_test.py | scripts/qdb_test.py | from qdb import set_trace, RemoteCommandManager
def f():
in_f = True
return 'getting out of f'
def main():
set_trace(
uuid='qdb',
host='localhost',
port=8001,
cmd_manager=RemoteCommandManager(),
)
mutable_object = {}
print 'Hello world!'
f()
print mutable_object
if __name__ == '__main__':
main() | Python | 0.000001 | |
d6ce19d526f07b843cbe5c79cc4e292181e6679f | add dumper per requested format #80 | scripts/gridorder2/dumper.py | scripts/gridorder2/dumper.py | """Generate the requested output."""
from pyiem.dep import read_env
from pyiem.util import get_dbconn
from tqdm import tqdm
import pandas as pd
from pandas.io.sql import read_sql
HUCS = (
"102400090102 102300020307 102400020604 102801020801 071000040902 "
"070600040608 070802040604 070802090402"
).split()
XREF = {
91: 1,
92: 2,
93: 3,
94: 4,
95: 5,
96: 6,
97: 7,
98: 9,
99: 8,
100: 11,
101: 10,
}
def get_flowpath_lengths(pgconn):
"""Load some metadata."""
return read_sql(
"SELECT scenario, huc_12, fpath, bulk_slope,"
"ST_LENGTH(geom) as len from flowpaths "
"WHERE scenario >= 91 and scenario <= 101 and huc_12 in %s"
"ORDER by scenario ASC, huc_12 ASC",
pgconn,
params=(tuple(HUCS),),
index_col=None,
)
def compute_yearly(df, bymax=False, individual=False):
"""Compute yearly 2011-2020 totals."""
envs = []
for _, row in tqdm(df.iterrows(), total=len(df.index)):
envfn = (
f"/i/{row['scenario']}/env/{row['huc_12'][:8]}/"
f"{row['huc_12'][8:]}/{row['huc_12']}_{row['fpath']}.env"
)
if bymax:
envdf = (
read_env(envfn)
.sort_values("precip", ascending=False)
.groupby("year")
.nth(0)
.reset_index()
)
else:
envdf = read_env(envfn).groupby("year").sum().copy().reset_index()
envdf["scenario"] = row["scenario"]
envdf["huc_12"] = row["huc_12"]
envdf["flowpath"] = row["fpath"]
envdf["delivery"] = envdf["sed_del"] / row["len"]
envs.append(envdf)
# We now have dataframe with yearly flowpath totals
envdf = pd.concat(envs)
key = ["scenario", "huc_12", "year"]
if individual:
key = ["scenario", "huc_12", "flowpath", "year"]
if individual:
genvdf = envdf.set_index(key)
else:
genvdf = envdf.groupby(by=key).mean().copy()
if bymax:
# add back in a good date, maybe
genvdf["date"] = envdf.groupby(by=key).nth(0)["date"]
return genvdf
def main():
"""Do great things."""
pgconn = get_dbconn("idep")
df = get_flowpath_lengths(pgconn)
# df221 = df[df['len'] > 22.1]
# avg_len_full = df.groupby(["scenario", "huc_12"]).mean()
# avg_len_221 = df221.groupby(["scenario", "huc_12"]).mean()
# flowpath_counts_full = df.groupby(["scenario", "huc_12"]).count()
# flowpath_counts_221 = df221.groupby(["scenario", "huc_12"]).count()
# For all hillslopes
result_full = compute_yearly(df, bymax=True, individual=True)
# Only those over > 22.1m
# result_221 = compute_yearly(df[df['len'] > 22.1], individual=True)
# Result time
rows = []
for huc_12 in tqdm(HUCS):
for scenario in XREF:
fdf = df[(df["huc_12"] == huc_12) & (df["scenario"] == scenario)]
for flowpath in fdf["fpath"].values:
for year in range(2011, 2021):
key = (scenario, huc_12, flowpath, year)
if key not in result_full.index:
print(key)
continue
rows.append(
{
"Flowpath ID": flowpath,
"HUC12": huc_12,
"Situation": XREF[scenario],
# "Year": year,
# "Date for daily max rainfall": result_full.at[
# (scenario, huc_12, year), "date"],
"Date for daily max rainfall": result_full.at[
key, "date"
],
"Flowpath length(m)": float(
fdf[(fdf["fpath"] == flowpath)]["len"]
),
"Daily Rainfall (mm)": result_full.at[
key, "precip"
],
"Daily Runoff (mm)": result_full.at[key, "runoff"],
"Daily Delivery (kg/ha)": result_full.at[
key, "delivery"
],
"Daily Detachment (kg/ha)": result_full.at[
key, "av_det"
],
"Slope Gradient(%)": float(
fdf[(fdf["fpath"] == flowpath)]["bulk_slope"]
)
* 100.0,
# "Number of flowpath": flowpath_counts_full.at[
# (scenario, huc_12), 'len'],
# "Avg Flowpath Length(m)": avg_len_full.at[
# (scenario, huc_12), "len"],
# "Daily Rainfall (mm)": result_full.at[
# (scenario, huc_12, year), "precip"],
# "Daily Runoff (mm)": result_full.at[
# (scenario, huc_12, year), "runoff"],
# "Daily Delivery (kg/ha)": result_full.at[
# (scenario, huc_12, year), "delivery"],
# "Daily Detachment (kg/ha)": result_full.at[
# (scenario, huc_12, year), "av_det"],
# "Number of flowpath >22.1": flowpath_counts_221.at[
# (scenario, huc_12), 'len'],
# "Avg Flowpath Length(m) >22.1": avg_len_221.at[
# (scenario, huc_12), "len"],
# "Daily Runoff (mm) >22.1": result_221.at[
# (scenario, huc_12, year), "runoff"],
# "Daily Delivery (kg/ha) >22.1": result_221.at[
# (scenario, huc_12, year), "delivery"],
# "Daily Detachment (kg/ha) >22.1": result_221.at[
# (scenario, huc_12, year), "av_det"],
}
)
resdf = pd.DataFrame(rows)
with pd.ExcelWriter("daily.xlsx") as writer:
resdf.to_excel(writer, "Single Flowpath-yearly", index=False)
if __name__ == "__main__":
main()
| Python | 0 | |
5c02d7ccb9721e0b02cdd31edc36be095a4568e2 | test the Meta and the MetaMixin classes | tests/meta.py | tests/meta.py | import unittest
import slumber
class MetaTestCase(unittest.TestCase):
def test_init_kwargs_to_attributes(self):
m = slumber.Meta(item1="test", item2=41, item3="example")
self.assertEqual(m.item1, "test")
self.assertEqual(m.item2, 41)
self.assertEqual(m.item3, "example")
class MetaMixinTestCase(unittest.TestCase):
def test_init_kwargs_to_meta(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest(item1="test", item2=41, item3="example")
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, "test")
self.assertEqual(mmt._meta.item2, 41)
self.assertEqual(mmt._meta.item3, "example")
def test_meta_to_meta_defaults(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest()
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, None)
self.assertEqual(mmt._meta.item2, None)
self.assertEqual(mmt._meta.item3, None)
def test_meta_to_meta_defaults_with_init_kwargs(self):
class MetaMixinTest(slumber.MetaMixin, object):
class Meta:
item1 = None
item2 = None
item3 = None
mmt = MetaMixinTest(item2=41)
self.assertTrue(hasattr(mmt, "_meta"))
self.assertTrue(isinstance(mmt._meta, slumber.Meta))
self.assertEqual(mmt._meta.item1, None)
self.assertEqual(mmt._meta.item2, 41)
self.assertEqual(mmt._meta.item3, None)
| Python | 0 | |
4c3c9c6929ebc3f439ccf3bb7d3696f484b154bc | Add missing noop-migrations for PositiveIntegerField | karspexet/ticket/migrations/0017_positive_integers_20180322_2056.py | karspexet/ticket/migrations/0017_positive_integers_20180322_2056.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2018-03-22 19:56
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ticket', '0016_add_voucher_note_20180213_2307'),
]
operations = [
migrations.AlterField(
model_name='discount',
name='amount',
field=models.PositiveIntegerField(validators=[django.core.validators.MinValueValidator(100), django.core.validators.MaxValueValidator(5000)]),
),
migrations.AlterField(
model_name='reservation',
name='ticket_price',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='reservation',
name='total',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='ticket',
name='price',
field=models.PositiveIntegerField(),
),
migrations.AlterField(
model_name='voucher',
name='amount',
field=models.PositiveIntegerField(help_text='Rabatt i SEK'),
),
]
| Python | 0.00008 | |
af508daaf016b824c7518a36f9b92f571f0f65af | Implement management command for creating demo records of balance history (NC-842) | nodeconductor/structure/management/commands/init_balance_history.py | nodeconductor/structure/management/commands/init_balance_history.py | from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from nodeconductor.structure.models import BalanceHistory
from nodeconductor.structure.models import Customer
class Command(BaseCommand):
help = """ Initialize demo records of balance history """
def handle(self, *args, **options):
self.stdout.write('Creating demo records of balance history for all customers')
for customer in Customer.objects.all():
for i in range(10):
BalanceHistory.objects.create(customer=customer,
created=timezone.now() - timedelta(days=i),
amount=100 + i * 10)
self.stdout.write('... Done')
| Python | 0 | |
7f1883275e9aa0b489de99947db7daf87f616ed4 | solve k58 | chap06/k58.py | chap06/k58.py | #
# usage: python k58.py {file name} {number}
#
import sys
from xml.etree import ElementTree as ET
def get_tuple(nll, dll):
return [[p, [n[1] for n in nl if n[0] == p][0],
[d[1] for d in dl if d[0] == p][0]]
for nl, dl in zip(nll, dll)
for p in list({n[0] for n in nl} & {d[0] for d in dl})]
if __name__ == '__main__':
fn = sys.argv[1]
root = ET.parse(fn).getroot()
cdl = [d for d in root.findall('document/sentences/*/dependencies')
if d.get('type') == 'collapsed-dependencies']
nll = [[[n.find('governor').text, n.find('dependent').text]
for n in e.findall('*[@type="nsubj"]')]
for e in cdl]
dll = [[[d.find('governor').text, d.find('dependent').text]
for d in e.findall('*[@type="dobj"]')]
for e in cdl]
for t in get_tuple(nll, dll):
print('\t'.join(t))
| Python | 0.999919 | |
ee8b22fbec74b81521c89c162c47c390cfca1618 | Add unittest with mnist model to test dygraph_to_static (#22777) | python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py | python/paddle/fluid/tests/unittests/dygraph_to_static/test_mnist.py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from time import time
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.optimizer import AdamOptimizer
from paddle.fluid.dygraph.nn import Conv2D, Pool2D, Linear
from paddle.fluid.dygraph.jit import dygraph_to_static_output
import unittest
class SimpleImgConvPool(fluid.dygraph.Layer):
def __init__(self,
num_channels,
num_filters,
filter_size,
pool_size,
pool_stride,
pool_padding=0,
pool_type='max',
global_pooling=False,
conv_stride=1,
conv_padding=0,
conv_dilation=1,
conv_groups=1,
act=None,
use_cudnn=False,
param_attr=None,
bias_attr=None):
super(SimpleImgConvPool, self).__init__()
self._conv2d = Conv2D(
num_channels=num_channels,
num_filters=num_filters,
filter_size=filter_size,
stride=conv_stride,
padding=conv_padding,
dilation=conv_dilation,
groups=conv_groups,
param_attr=None,
bias_attr=None,
act=act,
use_cudnn=use_cudnn)
self._pool2d = Pool2D(
pool_size=pool_size,
pool_type=pool_type,
pool_stride=pool_stride,
pool_padding=pool_padding,
global_pooling=global_pooling,
use_cudnn=use_cudnn)
@dygraph_to_static_output
def forward(self, inputs):
x = self._conv2d(inputs)
x = self._pool2d(x)
return x
class MNIST(fluid.dygraph.Layer):
def __init__(self):
super(MNIST, self).__init__()
self._simple_img_conv_pool_1 = SimpleImgConvPool(
1, 20, 5, 2, 2, act="relu")
self._simple_img_conv_pool_2 = SimpleImgConvPool(
20, 50, 5, 2, 2, act="relu")
self.pool_2_shape = 50 * 4 * 4
SIZE = 10
scale = (2.0 / (self.pool_2_shape**2 * SIZE))**0.5
self._fc = Linear(
self.pool_2_shape,
10,
param_attr=fluid.param_attr.ParamAttr(
initializer=fluid.initializer.NormalInitializer(
loc=0.0, scale=scale)),
act="softmax")
@dygraph_to_static_output
def forward(self, inputs, label=None):
x = self.inference(inputs)
if label is not None:
acc = fluid.layers.accuracy(input=x, label=label)
loss = fluid.layers.cross_entropy(x, label)
avg_loss = fluid.layers.mean(loss)
return x, acc, avg_loss
else:
return x
@dygraph_to_static_output
def inference(self, inputs):
x = self._simple_img_conv_pool_1(inputs)
x = self._simple_img_conv_pool_2(x)
x = fluid.layers.reshape(x, shape=[-1, self.pool_2_shape])
x = self._fc(x)
return x
class TestMNIST(unittest.TestCase):
def setUp(self):
self.epoch_num = 1
self.batch_size = 64
self.place = fluid.CUDAPlace(0) if fluid.is_compiled_with_cuda(
) else fluid.CPUPlace()
self.train_reader = paddle.batch(
paddle.dataset.mnist.train(),
batch_size=self.batch_size,
drop_last=True)
class TestMNISTWithStaticMode(TestMNIST):
"""
Tests model when using `dygraph_to_static_output` to convert dygraph into static
model. It allows user to add customized code to train static model, such as `with`
and `Executor` statement.
"""
def test_train(self):
main_prog = fluid.Program()
with fluid.program_guard(main_prog):
mnist = MNIST()
adam = AdamOptimizer(
learning_rate=0.001, parameter_list=mnist.parameters())
exe = fluid.Executor(self.place)
start = time()
img = fluid.data(
name='img', shape=[None, 1, 28, 28], dtype='float32')
label = fluid.data(name='label', shape=[None, 1], dtype='int64')
label.stop_gradient = True
prediction, acc, avg_loss = mnist(img, label)
adam.minimize(avg_loss)
exe.run(fluid.default_startup_program())
for epoch in range(self.epoch_num):
for batch_id, data in enumerate(self.train_reader()):
dy_x_data = np.array([x[0].reshape(1, 28, 28)
for x in data]).astype('float32')
y_data = np.array(
[x[1] for x in data]).astype('int64').reshape(-1, 1)
out = exe.run(main_prog,
fetch_list=[avg_loss, acc],
feed={'img': dy_x_data,
'label': y_data})
if batch_id % 100 == 0:
print(
"Loss at epoch {} step {}: loss: {:}, acc: {}, cost: {}"
.format(epoch, batch_id,
np.array(out[0]),
np.array(out[1]), time() - start))
if batch_id == 300:
# The accuracy of mnist should converge over 0.9 after 300 batch.
accuracy = np.array(out[1])
self.assertGreater(
accuracy,
0.9,
msg="The accuracy {} of mnist should converge over 0.9 after 300 batch."
.format(accuracy))
break
# TODO: TestCase with cached program is required when building program in `for` loop.
if __name__ == "__main__":
unittest.main()
| Python | 0 | |
a9d0a3dcc5221adbca5142a0cd7548ef874afd45 | add script to make matrix | matrix/build_maxrix.py | matrix/build_maxrix.py | WIDTH = 10
HEIGHT = 10
HORIZONTAL_NEXT = "hnext"
VERTICAL_NEXT = "vnext"
BASE = "Cell"
print """
abstract sig %(BASE)s {
%(HORIZONTAL_NEXT)s: lone %(BASE)s,
%(VERTICAL_NEXT)s: lone %(BASE)s
}
""" % globals()
for x in range(WIDTH):
for y in range(HEIGHT):
print "one sig Cell_%d_%d extends %s {}" % (x, y, BASE)
# fact
print "fact matrix_adj {"
for x in range(WIDTH):
for y in range(HEIGHT - 1):
next = y + 1
print " Cell_%(x)d_%(y)d.%(VERTICAL_NEXT)s = Cell_%(x)d_%(next)d" % globals()
print " no Cell_%(x)d_%(next)d.%(VERTICAL_NEXT)s" % globals()
for y in range(HEIGHT):
for x in range(WIDTH - 1):
next = x + 1
print " Cell_%(x)d_%(y)d.%(HORIZONTAL_NEXT)s = Cell_%(next)d_%(y)d" % globals()
print " no Cell_%(next)d_%(y)d.%(HORIZONTAL_NEXT)s" % globals()
print "}"
| Python | 0.000001 | |
9efc48eadfb896a70197f8963caa6034d9b4856d | store res_log in database instead of memory, add index | bin/addons/base/res/res_log.py | bin/addons/base/res/res_log.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
import tools
import time
class res_log(osv.osv):
_name = 'res.log'
_columns = {
'name': fields.char('Message', size=128, help='The logging message.', required=True),
'user_id': fields.many2one('res.users','User', required=True),
'res_model': fields.char('Object', size=128),
'context': fields.char('Context', size=250),
'res_id': fields.integer('Object ID'),
'secondary': fields.boolean('Secondary Log', help='Do not display this log if it belongs to the same object the user is working on'),
'create_date': fields.datetime('Created Date', readonly=True),
'read': fields.boolean('Read', help="If this log item has been read, get() should not send it to the client")
}
_defaults = {
'user_id': lambda self,cr,uid,ctx: uid,
'context': "{}",
'read': False
}
_order='create_date desc'
_index_name = 'res_log_uid_read'
def _auto_init(self, cr, context={}):
super(res_log, self)._auto_init(cr, context)
cr.execute('SELECT 1 FROM pg_indexes WHERE indexname=%s',
(self._index_name,))
if not cr.fetchone():
cr.execute('CREATE INDEX %s ON res_log (user_id, read)' %
self._index_name)
# TODO: do not return secondary log if same object than in the model (but unlink it)
def get(self, cr, uid, context=None):
unread_log_ids = self.search(cr, uid, [('user_id','=',uid),
('read', '=', False)],
context=context)
unread_logs = self.read(cr, uid, unread_log_ids,
['name','res_model','res_id'],
context=context)
self.write(cr, uid, unread_log_ids, {'read': True}, context=context)
return unread_logs
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
res = []
log_ids = super(res_log, self).search(cr, uid, args, offset, limit, order, context, count)
logs = {}
for log in self.browse(cr, uid, log_ids, context=context):
res_dict = logs.get(log.res_model, {})
res_dict.update({log.res_id: log.id})
logs.update({log.res_model: res_dict})
res = map(lambda x: x.values(), logs.values())
return tools.flatten(res)
res_log()
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
import tools
import time
class res_log(osv.osv_memory):
_name = 'res.log'
_columns = {
'name': fields.char('Message', size=128, help='The logging message.', required=True),
'user_id': fields.many2one('res.users','User', required=True),
'res_model': fields.char('Object', size=128),
'context': fields.char('Context', size=250),
'res_id': fields.integer('Object ID'),
'secondary': fields.boolean('Secondary Log', help='Do not display this log if it belongs to the same object the user is working on'),
'create_date': fields.datetime('Created Date', readonly=True),
'read': fields.boolean('Read', help="If this log item has been read, get() should not send it to the client")
}
_defaults = {
'user_id': lambda self,cr,uid,ctx: uid,
'create_date': fields.datetime.now,
'context': "{}",
'read': False
}
_order='create_date desc'
# TODO: do not return secondary log if same object than in the model (but unlink it)
def get(self, cr, uid, context=None):
unread_log_ids = self.search(cr, uid, [('user_id','=',uid),
('read', '=', False)],
context=context)
unread_logs = self.read(cr, uid, unread_log_ids,
['name','res_model','res_id'],
context=context)
self.write(cr, uid, unread_log_ids, {'read': True}, context=context)
return unread_logs
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
res = []
log_ids = super(res_log, self).search(cr, uid, args, offset, limit, order, context, count)
logs = {}
for log in self.browse(cr, uid, log_ids, context=context):
res_dict = logs.get(log.res_model, {})
res_dict.update({log.res_id: log.id})
logs.update({log.res_model: res_dict})
res = map(lambda x: x.values(), logs.values())
return tools.flatten(res)
res_log()
| Python | 0 |
98cbb29d008fc7abf1a066d9ecf7b3399395aefe | add users api | mcp/interface/users.py | mcp/interface/users.py | import json
from mcp import users
from mcp.interface import common
class UsersHandler(common.AuthorizedHandler):
def forbidden(self):
return True
def do_get(self):
return 200, json.dumps(list(iter(users.user_db)))
class UserHandler(common.AuthorizedHandler):
def __init__(self, request, response, groups):
common.AuthorizedHandler.__init__(self, request, response, groups)
self.userentry = users.get(self.groups[0])
def forbidden(self):
return self.user.name != self.userentry.name
class UserInfoHandler(UserHandler):
def do_get(self):
return 200, json.dumps({'name': self.userentry.name, 'key': self.userentry.key, 'admin': self.userentry.admin, 'active': self.userentry.active, 'servers': self.userentry.servers})
users_base = '/users/'
user_base = users_base + '(' + users.users_allowed + ')'
routes = {users_base: UsersHandler, user_base: UserInfoHandler}
| Python | 0.000001 | |
6e165165974f99a30ba6dce528d97c03152eeab6 | Add tracegraph script | tracegraph.py | tracegraph.py | #!/usr/bin/python
import sys
import re
class Host(object):
def __init__(self, step, name, ip):
self.step = step
self.name = name
self.ip = ip
self.pings = []
def add_ping(self, ping):
self.pings.append(ping)
def max_ping(self):
return max(self.pings) if self.pings else 0
def min_ping(self):
return min(self.pings) if self.pings else float('inf')
class Step(object):
def __init__(self):
self.hosts = []
def to_host(line):
parts = re.split(r'\s+', line.strip())
step = ''
if parts[0].isdigit():
step = int(parts[0])
parts = parts[1:]
x = 0
while x < len(parts) and parts[x] == '*':
x += 1
name = '?'
ip = '?'
if x < len(parts):
name = parts[x]
x += 1
if x < len(parts):
ip = parts[x]
x += 1
pings = [float(t) for t in parts[x:] if t != 'ms' and t != '*']
host = Host(step, name, ip)
for ping in pings:
host.add_ping(ping)
return host
def barplot(host, scale):
if not host.pings:
return ''
p0 = int(host.min_ping() * scale)
p1 = int(host.max_ping() * scale)
return (max(0, p0 - 1) * ' '
+ 'o'
+ (p1 - p0 - 1) * '-'
+ ('o' if p1 > p0 else ''))
def rchop_to(s, l):
if len(s) <= l:
return s
return '...' + s[-l+3:]
if len(sys.argv) > 1:
# Pass arguments to traceroute
import subprocess
#lines = subprocess.check_output(['traceroute'] + sys.argv[1:]).splitlines()
p = subprocess.Popen(['traceroute'] + sys.argv[1:], stdout=subprocess.PIPE, stderr=None)
lines = []
for line in iter(p.stdout.readline,''):
lines.append(line.rstrip())
print line.rstrip()
else:
print 'Reading from stdin.'
lines = sys.stdin.read().splitlines()
print 'traceroute:'
print '\n'.join(lines)
print ''
print 'tracegraph:'
hosts = list(to_host(line) for line in lines)
max_ping = max(h.max_ping() for h in hosts)
WIDTH = 60
scale = float(WIDTH) / max_ping
for host in hosts:
print '%2s %-20s | %s' % (host.step, rchop_to(host.name, 20), barplot(host, scale))
print 25 * ' ' + ' 0ms' + (WIDTH - 8) * ' ' + ' %.1fms' % max_ping
| Python | 0.000001 | |
8bc3c2c82b1b8f7e4582fe401c05c4c4f34cc345 | create structure for python | src/boat_servo_sail/setup.py | src/boat_servo_sail/setup.py | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
## http://docs.ros.org/api/catkin/html/howto/format2/installing_python.html
## pdf download of the page in sources folder:
## docs-ros_installing_python.pdf
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['boat_servo_sail'],
package_dir={'': 'src'})
setup(**setup_args)
| Python | 0.000261 | |
dc3df810c3c6ffea429e43ec0f8e6f006a9c1c6f | Create tensorCase.py | tensorCase.py | tensorCase.py | import tensorflow as tf
sess = tf.InteractiveSession()
x = tf.random_uniform([])
y = tf.random_uniform([])
out1 = tf.cond(tf.greater(x,y), lambda:tf.add(x,y), lambda:(tf.subtract(x,y)))
print(x.eval(), y.eval(), out1.eval())
x = tf.random_uniform([],-1,1)
y = tf.random_uniform([],-1,1)
def f1(): return tf.cast(tf.add(x,y), tf.float32)
def f2(): return tf.cast(tf.subtract(x,y), tf.float32)
def f3(): return tf.cast(tf.constant(0), tf.float32)
out2 = tf.case({tf.less(x, y):f2, tf.greater(x,y):f1}, default=f3)
print(x.eval(), y.eval(), out2.eval())
| Python | 0.000002 | |
6584d9e71c82097f65f316bf85b2f019350cfa58 | Add run_aiotest.py | run_aiotest.py | run_aiotest.py | import aioeventlet
import aiotest.run
import eventlet
config = aiotest.TestConfig()
config.new_event_pool_policy = aioeventlet.EventLoopPolicy
config.sleep = eventlet.sleep
aiotest.run.main(config)
| Python | 0.000003 | |
50983c0a6d18e1ec8fcaed076f3c82b5935fe913 | Solve problem 23 | problem023.py | problem023.py | #!/usr/bin/env python3
from itertools import *
def divisors(n):
for d in takewhile(lambda d: d * d <= n, count(1)):
if n % d == 0:
yield d
if n // d != d:
yield n // d
def is_abundant_number(n):
return n < sum(divisors(n)) - n
all_abundant = set(filter(is_abundant_number, range(1, 28123+1)))
print(sum(n for n in range(1, 28123+1)
if not any(n-d in all_abundant for d in all_abundant)))
| Python | 0.999965 | |
8db65dc2a6a99a0e6287b12f7bfdcd423a62e515 | Add test running harness | run_harness.py | run_harness.py | import sys
import os
import subprocess
import shutil
import shlex
import collections
from babi_train import TrainExitStatus
from graceful_interrupt import GracefulInterruptHandler
TaskSpec = collections.namedtuple("TaskSpec", ["task_name", "variant_name", "run_params"])
def run(tasks_dir, output_dir, base_params, specs):
base_params_split = shlex.split(base_params)
for spec in specs:
print("### Task {} ({}) ###".format(spec.task_name, spec.variant_name))
run_params_split = shlex.split(spec.run_params)
task_folder_train = os.path.join(tasks_dir, "{}_train".format(spec.task_name))
if not os.path.isdir(task_folder_train):
print("Train directory doesn't exist. Parsing text file...")
textfile = task_folder_train + ".txt"
subprocess.run(["python3","babi_graph_parse.py",textfile], check=True)
task_folder_valid = os.path.join(tasks_dir, "{}_valid".format(spec.task_name))
if not os.path.isdir(task_folder_valid):
print("Validation directory doesn't exist. Parsing text file...")
textfile = task_folder_valid + ".txt"
subprocess.run(["python3","babi_graph_parse.py",textfile], check=True)
task_output_dir = os.path.join(output_dir, spec.task_name, spec.variant_name)
if not os.path.isdir(task_output_dir):
os.makedirs(task_output_dir)
completed_file = os.path.join(task_output_dir, "completed.txt")
if os.path.exists(completed_file):
print("Task is already completed! Skipping...")
continue
stdout_fn = os.path.join(task_output_dir, "stdout.txt")
all_params = [task_folder_train] + run_params_split + base_params_split
all_params.extend(["--outputdir", task_output_dir])
all_params.extend(["--validation", task_folder_valid])
all_params.extend(["--set-exit-status"])
all_params.extend(["--resume-auto"])
with open(stdout_fn, 'a', 1) as stdout_file:
proc = subprocess.Popen(all_params, stdout=stdout_file, stderr=subprocess.STDOUT)
with GracefulInterruptHandler():
returncode = proc.wait()
task_status = TrainExitStatus(returncode)
if task_status == TrainExitStatus.accuracy_success:
print("SUCCESS! Reached desired accuracy.")
with open(completed_file,'w') as f:
f.write("SUCCESS\n")
elif task_status == TrainExitStatus.reached_update_limit:
print("FAIL! Reached update limit without attaining desired accuracy.")
with open(completed_file,'w') as f:
f.write("FAIL_UPDATE_LIMIT\n")
elif task_status == TrainExitStatus.error:
print("Got an error; skipping for now. See {} for details.".format(stdout_fn))
elif task_status == TrainExitStatus.nan_loss:
print("NaN loss detected; skipping for now.")
elif task_status == TrainExitStatus.interrupted:
print("Process was interrupted! Stopping now")
break
| Python | 0 | |
a423b0eb6277c260a16dc11f37088e496964cc0c | Create count-neighbours.py | home/count-neighbours.py | home/count-neighbours.py | def checkio ( data ) :
if len( data ) < 10 : return False
if not any( c in data for c in "abcdefghijklmnopqrstuvwxyz" ) : return False
if not any( c in data for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ" ) : return False
if not any( c in data for c in "1234567890" ) : return False
return True
#Some hints
#Just check all conditions
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio('A1213pokl') == False, "1st example"
assert checkio('bAse730onE4') == True, "2nd example"
assert checkio('asasasasasasasaas') == False, "3rd example"
assert checkio('QWERTYqwerty') == False, "4th example"
assert checkio('123456123456') == False, "5th example"
assert checkio('QwErTy911poqqqq') == True, "6th example"
| Python | 0.000009 | |
557d21ffbbbf5cb8a452d6bc0b4c013daf8eabdc | Add new migration | server/store/migrations/0003_alter_testimonial_reviewer_location.py | server/store/migrations/0003_alter_testimonial_reviewer_location.py | # Generated by Django 4.1.3 on 2022-11-14 02:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("store", "0002_initial"),
]
operations = [
migrations.AlterField(
model_name="testimonial",
name="reviewer_location",
field=models.CharField(max_length=100),
),
]
| Python | 0 | |
0df48d7159841f66cc4e8dac8e9a52727c69091e | add testcase for settings() | test/model.py | test/model.py | # -*- coding: utf-8 -*-
import unittest
from cliez.conf import settings, Settings
class Model(object):
config_none = settings()
def __init__(self):
self.config = settings()
pass
pass
class ModelTestCase(unittest.TestCase):
def setUp(self):
Settings.bind('cliez.conf')
pass
def test_ok(self):
a = Model()
self.assertEqual(None, a.config_none)
self.assertEqual(None, a.config.PACKAGE_ROOT)
pass
pass
| Python | 0 | |
b1c405950ce5be535796d6d31a329b5083dc91a9 | adding test_hello | test_hello.py | test_hello.py | print "Hello All"
| Python | 0.999096 | |
bb0e8032d325d2fd015a53d4513d632d12e8afb3 | Create pset2part3.py | pset2part3.py | pset2part3.py | balance = 270472
annualInterestRate = 0.21
# test case 1
# balance = 320000
# annualInterestRate = 0.2
# Lowest Payment: 29157.09
# test case 2
# balance = 999999
# annualInterestRate = 0.18
# Lowest Payment: 90325.03
epsilon = 0.01
lower = balance / 12
upper = balance * ((1 + annualInterestRate / 12.0) ** 12) / 12.0
ans = (lower + upper) / 2.0
def yearEndBalance(monthlyPayment):
'''
Calculates year end balance given a monthly payment
as an argument. monthlyPayment can be int or float '''
myBalance = balance
for m in range(12):
interest = (myBalance - monthlyPayment) * annualInterestRate / 12.0
myBalance = myBalance + interest - monthlyPayment
return myBalance
while abs(yearEndBalance(ans)) >= epsilon:
# print("lower = " + str(lower) + " upper = " + str(upper) + " ans = " + str(ans))
if yearEndBalance(ans) < 0:
upper = ans
else:
lower = ans
ans = (lower + upper) / 2.0
print ("Lowest Payment: " + str(round(ans, 2)))
| Python | 0.000003 | |
3f64c94f51698ab69fc1996d73a04bde58da9052 | Update build script. | client/scripts/build-plugin.py | client/scripts/build-plugin.py | import os, sys
import ue4config
import ue4util, gitutil, shutil, uploadutil
plugin_version = gitutil.get_short_version('.')
plugin_output_folder = os.path.abspath('./unrealcv-%s' % plugin_version)
plugin_output_folder = ue4util.get_real_path(plugin_output_folder)
def build_plugin():
UAT_script = ue4config.conf['UATScript']
if not os.path.isfile(UAT_script):
print('Can not find Automation Script of UE4 %s' % UAT_script)
print('Please set UnrealEnginePath in ue4config.py correctly first')
return False
else:
if gitutil.is_dirty(os.path.abspath('.')):
print 'Error: uncommited changes of this repo exist'
return False
plugin_file = os.path.abspath('../../UnrealCV.uplugin')
plugin_file = ue4util.get_real_path(plugin_file)
UAT_script = UAT_script.replace(' ', '\ ')
cmd = '%s BuildPlugin -plugin=%s -package=%s -rocket -targetplatforms=Win64+Linux' % (UAT_script, plugin_file, plugin_output_folder)
print(cmd)
os.system(cmd)
# Clean up intermediate files
intermediate_folder = os.path.join(plugin_output_folder, 'Intermediate')
print 'Delete intermediate folder %s' % intermediate_folder
shutil.rmtree(intermediate_folder)
return True
def output_plugin(output_conf):
type = output_conf['Type']
upload_handlers = dict(
scp = uploadutil.upload_scp,
s3 = uploadutil.upload_s3,
)
upload_handlers[type](output_conf, [plugin_output_folder], '.')
if __name__ == '__main__':
if build_plugin():
output_confs = ue4config.conf['PluginOutput']
for conf in output_confs:
print conf['Type']
output_plugin(conf)
| import os, sys
import ue4config
import ue4util, gitutil, shutil, uploadutil
plugin_version = gitutil.get_short_version('.')
plugin_output_folder = os.path.abspath('./unrealcv-%s' % plugin_version)
def build_plugin():
UAT_script = ue4config.conf['UATScript']
if not os.path.isfile(UAT_script):
print('Can not find Automation Script of UE4 %s' % UAT_script)
print('Please set UnrealEnginePath in ue4config.py correctly first')
return False
else:
if gitutil.is_dirty(os.path.abspath('.')):
print 'Error: uncommited changes of this repo exist'
return False
plugin_file = os.path.abspath('../../UnrealCV.uplugin')
plugin_file = ue4util.get_real_path(plugin_file)
UAT_script = UAT_script.replace(' ', '\ ')
cmd = '%s BuildPlugin -plugin=%s -package=%s -rocket -targetplatforms=Win64+Linux' % (UAT_script, plugin_file, plugin_output_folder)
print(cmd)
os.system(cmd)
# Clean up intermediate files
intermediate_folder = os.path.join(plugin_output_folder, 'Intermediate')
print 'Delete intermediate folder %s' % intermediate_folder
shutil.rmtree(intermediate_folder)
return True
def output_plugin(output_conf):
type = output_conf['Type']
upload_handlers = dict(
scp = uploadutil.upload_scp,
s3 = uploadutil.upload_s3,
)
upload_handlers[type](output_conf, [plugin_output_folder], '.')
if __name__ == '__main__':
if build_plugin():
output_confs = ue4config.conf['PluginOutput']
for conf in output_confs:
print conf['Type']
output_plugin(conf)
| Python | 0 |
0bcd1ced7ddcaccd37a9504d542ba4eff68e479a | Create cefjs.py | cefjs.py | cefjs.py | # -*- coding: UTF-8 -*-
from cefpython3 import cefpython
import wx
application_settings = {
"cache_path": "/tmp/cef/cache/",
"debug": True,
"log_severity": cefpython.LOGSEVERITY_INFO,
"log_file": "/tmp/cef/debug.log",
"resources_dir_path": cefpython.GetModuleDirectory() + "/Resources",
"browser_subprocess_path": "%s/%s" % (cefpython.GetModuleDirectory(), "subprocess"),
"unique_request_context_per_browser": True,
"downloads_enabled": True,
"remote_debugging_port": 0,
"context_menu": {
"enabled": True,
"navigation": True,
"print": True,
"view_source": True,
"external_browser": True,
"devtools": True,
},
"ignore_certificate_errors": True,
}
browser_settings = {
"plugins_disabled": True,
"file_access_from_file_urls_allowed": True,
"universal_access_from_file_urls_allowed": True,
}
switch_settings = {
"locale_pak": cefpython.GetModuleDirectory() + "/Resources/en.lproj/locale.pak",
# "proxy-server": "socks5://127.0.0.1:8888",
# "no-proxy-server": "",
# "enable-media-stream": "",
# "remote-debugging-port": "12345",
# "disable-gpu": "",
# "--invalid-switch": "" -> Invalid switch name
}
def set_app_settings(settings={}):
global application_settings
application_settings.update(settings)
def set_browser_settings(settings={}):
global browser_settings
browser_settings.update(settings)
def set_switch_settings(settings={}):
global switch_settings
switch_settings.update(settings)
class CEF(object):
js_bindings = None
main_browser = None
def initialize(self):
self.js_bindings = cefpython.JavascriptBindings(bindToFrames=False, bindToPopups=True)
self.js_bindings.SetFunction('py_func', self.py_func) # in `html js` can call the function of js_func_name
self.js_bindings.SetFunction('__py_cb',
self.__py_sentinel) # in `html js` can call the function of js_func_name
self.main_browser.SetJavascriptBindings(self.js_bindings)
def py_func(self, *args):
pass
def __py_sentinel(self):
print '__py_sentinel'
def evaluate(self, js):
js += '\n__py_cb();'
self.main_browser.GetMainFrame().ExecuteJavascript(js)
def open(self, url):
self.main_browser.LoadUrl(url)
def on_init(self, browser, frame, status_code):
print 'cef init ok'
def on_load_end(self, browser, frame, status_code):
print '%s load ok' % frame.GetUrl()
class CEFHandler:
cef = None
def __init__(self):
pass
def OnBeforePopup(self, browser, frame, targetUrl, unknown1, unknown2, unknown3, unknown4, unknown5, unknown6):
# open only in one tab
browser.LoadUrl(targetUrl)
return True
def OnLoadStart(self, browser, frame):
pass
def OnLoadEnd(self, browser, frame, httpStatusCode):
url = frame.GetUrl()
if url == 'about:blank':
self.cef.on_init(browser, frame, httpStatusCode)
else:
self.cef.on_load_end(browser, frame, httpStatusCode)
class CEFJSFrame(wx.Frame):
window_count = 0
browser = None
mainPanel = None
def __init__(self, url, cef_cls):
wx.Frame.__init__(self, parent=None, id=wx.ID_ANY, title='cef wx', size=(800, 600))
self.window_count += 1
self.mainPanel = wx.Panel(self, style=wx.WANTS_CHARS)
window_info = cefpython.WindowInfo()
width, height = self.mainPanel.GetClientSizeTuple()
window_info.SetAsChild(self.mainPanel.GetHandle(), [0, 0, width, height])
self.browser = cefpython.CreateBrowserSync(
window_info,
browserSettings=browser_settings,
navigateUrl=url)
self.clientHandler = CEFHandler()
self.clientHandler.cef = cef_cls()
self.clientHandler.cef.main_browser = self.browser
self.clientHandler.cef.initialize()
self.browser.SetClientHandler(self.clientHandler)
menu = wx.Menu()
act_test = menu.Append(1, 'Test')
act_exit = menu.Append(10, 'Exit')
menu_bar = wx.MenuBar()
menu_bar.Append(menu, 'Action')
self.SetMenuBar(menu_bar)
self.Bind(wx.EVT_MENU, self.on_close, act_exit)
self.Bind(wx.EVT_MENU, self.on_test, act_test)
self.Bind(wx.EVT_CLOSE, self.on_close)
def on_test(self, event):
print 'on test'
def on_close(self, event):
del self.browser
self.Destroy()
self.window_count -= 1
if self.window_count <= 0:
cefpython.Shutdown()
wx.GetApp().Exit()
class CEFWXApp(wx.App):
timer = None
timerID = 1
timerCount = 0
def __init__(self, redirect, cef_cls):
self.cef_cls = cef_cls
wx.App.__init__(self, redirect)
def OnInit(self):
self._create_timer()
frame = CEFJSFrame('about:blank', self.cef_cls)
self.SetTopWindow(frame)
frame.Show()
return True
def _create_timer(self):
self.timer = wx.Timer(self, self.timerID)
self.timer.Start(10) # 10ms
wx.EVT_TIMER(self, self.timerID, self.on_timer)
def on_timer(self, event):
self.timerCount += 1
cefpython.MessageLoopWork()
def OnExit(self):
self.timer.Stop()
def loop(cef_cls):
cefpython.Initialize(application_settings, switch_settings)
app = CEFWXApp(False, cef_cls)
app.MainLoop()
del app
cefpython.Shutdown()
if __name__ == '__main__':
cef_cls = CEF
loop(cef_cls)
| Python | 0.000004 | |
1765ac3a12ea2a56b4e25e05cf1f1b531de5b2cf | Add External Temperature Probe from OpenWeather | pyexternal.py | pyexternal.py | #!/usr/bin/env python
# Get External Temperature from OpenWeatherMap
# External informations are :
# - temperature
# - humidity
# - pressure
# - precipitation volume (each 3h)
import urllib.request
import json
import pyowm
from datetime import datetime
from pyserial import pySerial
from imports.pyTemperature import pyTemperature
BASE_URL = "http://api.openweathermap.org/data/2.5/weather?q="
DEFAULT_CITY = "Meyreuil, France"
API_KEY = "4ca5e2bebb63f72d4cc5564300cf68d5"
class py_external(object):
def __init__(self):
super(py_external, self).__init__()
self.pyTemperature = None
def getDataAPI(self):
owm = pyowm.OWM(API_KEY)
#observation = owm.weather_at_place(DEFAULT_CITY,'accurate')
observation = owm.weather_at_id(2994068)
print(observation)
if observation is not None:
w = observation.get_weather()
w_temp = w.get_temperature(unit='celsius')
w_hum = w.get_humidity()
w_pres = w.get_pressure()
w_prec = w.get_rain()
#print(w_prec)
l = observation.get_location()
#print(l.get_ID())
#print(l.get_name())
#print(l.get_lat())
#print(l.get_lon())
#pyTemperature Constructor (self, date = datetime.now(), temp=None,pressure=None,humidity=None,precicipationVol=None):
dateNow = datetime.now()
self.pyTemperature = pyTemperature(dateNow,w_temp['temp'],w_pres['press'],w_hum)
#print("Temperature at pyExternal")
#self.pyTemperature.printTemperature()
def getPyTemperature(self):
return self.pyTemperature
def setDate(self,newDate):
self.date = newDate
def setPressure(self,newPressure):
self.pressure = newPressure
def setHumidity(self,newHumidity):
self.humidity = newHumidity
| Python | 0 | |
9a37f573aa985b3bd876c2df4c3f83670fddab42 | add script to set script permissions | chmod.py | chmod.py | import os, stat, sublime, sublime_plugin
# Configuration
pkg = 'NSIS'
script = sublime.packages_path() + '/' + pkg + '/nsis_build.sh'
def plugin_loaded():
from package_control import events
# chmod +x <script>
if (events.install(pkg) or events.post_upgrade(pkg)) and os.name is 'posix' or 'mac':
st = os.stat(script)
os.chmod(script, st.st_mode | stat.S_IEXEC) | Python | 0.000001 | |
7cd1c65b77eb474f67f1e194ceeb7bcde2d2bdb9 | Create wsgi.py | NGeO/NGeO/wsgi.py | NGeO/NGeO/wsgi.py | """
WSGI config for NGeO project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "NGeO.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| Python | 0.000001 | |
a7f1565efbdfa20d4d97d90a688b78da51533113 | Add new package: ycsb (#17788) | var/spack/repos/builtin/packages/ycsb/package.py | var/spack/repos/builtin/packages/ycsb/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Ycsb(Package):
"""Yahoo! Cloud Serving Benchmark."""
homepage = "https://research.yahoo.com/news/yahoo-cloud-serving-benchmark/"
url = "https://github.com/brianfrankcooper/YCSB/archive/0.17.0.tar.gz"
git = "https://github.com/brianfrankcooper/YCSB.git"
version('0.17.0', sha256='5dd1a3d4dd7ac336eadccc83b097c811e142cfe1b23fc278f247054a1892c0e0')
version('0.16.0', sha256='4296fd5e90d7d6d7dfcbad90039ddf16e785706a07f99c1c8a06e6ee06440f71')
version('0.15.0', sha256='50b83c11f1a2f19f45e3cc6781f952c69944d1221dfec72169c3587802fc7fbb')
version('0.14.0', sha256='456bcc9fa3d5d66d76fffa9cec34afd4528d9f02aa8a8d1135f511650516d5cb')
version('0.13.0', sha256='21cb8078a0fe2d8d909145744ca15848dbb6757e98a7fdc97fb4049f82f4afbc')
depends_on('maven', type='build')
depends_on('java@8', type=('build', 'run'))
depends_on('mongodb-async-driver', type='build')
def install(self, spec, prefix):
mvn = which('mvn')
jar_name = 'target/mongodb-async-driver-' + \
spec['mongodb-async-driver'].version.string + '.jar'
path = join_path(self.spec['mongodb-async-driver'].prefix, jar_name)
mvn('install:install-file', '-Dfile={0}'.format(path),
'-DgroupId=com.allanbank', '-DartifactId=mongodb-async-driver',
'-Dversion=2.0.1', '-Dpackaging=jar')
mvn('package', '-DskipTests')
install_tree('.', prefix)
| Python | 0 | |
b7baf1e53f24bb96a0b09e9305f5f1e562cf3547 | Create analog_tester.py | home/moz4r/analog_tester.py | home/moz4r/analog_tester.py | arduino = Runtime.createAndStart("arduino","Arduino")
arduino.setBoardNano()
arduino.connect("COM6")
arduino.setAref("DEFAULT")
def publishPin(pins):
for pin in range(0, len(pins)):print(pins[pin].value)
arduino.addListener("publishPinArray","python","publishPin")
#arduino.enablePin(pinAddress, rate)
#analog pin range are 14-18 on uno, 54-70 on mega
#rate is the number of polling / sec
arduino.enablePin(14, 1)
| Python | 0.000009 | |
17956f008eabcca80dbacdb20e92b819b87d0f57 | Create homework-1-cryptanalysis.py | homework-1-cryptanalysis.py | homework-1-cryptanalysis.py | #! usr/bin/env python3
ORIGINAL_CIPHERTEXT = "\
NAGQNXIIZAGBGIIYXQOMQUGQUZAXTNGMYXQGTTASNISQO\
AMFGZAGEZVOOGUZAGIGMTAMQUTZYMXQGUMCMYZDECMLWS\
RVQYVIEASVQUTXLMQQSZTZMYZZAGDMOMXQSQMPVMYYESR\
WQSNIGUOGZAGEAMZGZSAVQZXLMQAMVIZAGDMQUVYOGZAG\
DQSDSYGQSDSYGLMQXGQUVYGZSBGMYZAGBYVQZSRZAGBSS\
WTZAMZIXGSVZSQZAGUGTWTMRVIIZAYGGTLSYGSRTGFGYM\
IXQTVIZTSRBISZZGUCMOGTMQUTLYMNISRTISFGQIENSYW\
ZAMZZAGEAMFGSRRGYGUDGXMDTXLWMQUZXYGUDSYGZAMQM\
QEZAYMIIVCSQZAGNSSUTZMLWTNSYWXQONGMYXGUIE"
translate_dict = {
'A' : 'h',
'B' : 'b',
'C' : 'p',
'D' : 'm',
'E' : 'y',
'F' : 'v',
'G' : 'e',
'H' : ' ', # H is not present in the ciphertext
'I' : 'l',
'J' : ' ', # J is not present in the ciphertext
'K' : ' ', # H is not present in the ciphertext
'L' : 'c',
'M' : 'a',
'N' : 'w',
'O' : 'g',
'P' : 'q', # Only used once
'Q' : 'n',
'R' : 'f',
'S' : 'o',
'T' : 's',
'U' : 'd',
'V' : 'u',
'W' : 'k',
'X' : 'i',
'Y' : 'r',
'Z' : 't'
}
def print_letter_distribution(ciphertext):
"""
Prints the letter distribution of ciphertext from
highest frequency to lowest.
"""
distribution = dict()
for character in ciphertext:
# Keep a running total in 'distribution' of character count
if character not in distribution:
distribution[character] = 1
else:
distribution[character] += 1
# Print the number of unique letters in the ciphertext
print("Unique characters in ciphertext:", len(distribution), '\n')
# Print the highest frequency letters first
for char in sorted(distribution, key=distribution.get, reverse=True):
print(char,'has a frequency of',distribution[char]/4.01)
print()
def translate(ciphertext):
"""
Translate the ciphertext using the
translate_dict as a substitution table.
"""
plaintext = ''
for character in ciphertext:
plaintext += translate_dict[character]
return plaintext
def count_digraphs(ciphertext):
"""
Count and print the most frequent 2-letter
combinations in the ciphertext.
"""
# Zip the ciphertext with an offset to get 2char elements
two_letter_set = [x+y for x,y in zip(*[ciphertext[i:]
for i in range(2)])]
digraph_frequency = dict()
for digraph in two_letter_set:
if digraph not in digraph_frequency:
digraph_frequency[digraph] = 1
else:
digraph_frequency[digraph] += 1
print('2-letter sequences:')
for digraph in sorted(digraph_frequency,
key=digraph_frequency.get,
reverse=True):
if digraph_frequency[digraph] > 5:
print(digraph, digraph_frequency[digraph])
print()
def count_trigraphs(ciphertext):
"""
Count and print the most frequent
3-letter combinations in ciphertext.
"""
# Zip the ciphertext with an offset to get 3char elements
three_letter_set = [x+y+z for x,y,z in zip(*[ciphertext[i:]
for i in range(3)])]
trigraph_frequency = dict()
for trigraph in three_letter_set:
if trigraph not in trigraph_frequency:
trigraph_frequency[trigraph] = 1
else:
trigraph_frequency[trigraph] += 1
print('3 letter sequences:')
for trigraph in sorted(trigraph_frequency,
key=trigraph_frequency.get,
reverse=True):
if trigraph_frequency[trigraph] > 3:
print(trigraph, trigraph_frequency[trigraph])
print()
print('Original ciphertext:\n', ORIGINAL_CIPHERTEXT, '\n')
print("Total lengths of ciphertext:", len(ORIGINAL_CIPHERTEXT))
print_letter_distribution(ORIGINAL_CIPHERTEXT)
count_digraphs(ORIGINAL_CIPHERTEXT)
count_trigraphs(ORIGINAL_CIPHERTEXT)
print('Translation:\n', translate(ORIGINAL_CIPHERTEXT))
| Python | 0.000001 | |
5e6cfc84a4b34a292281ea466bf11facb680e72b | initialize radix sort file | radix_sort.py | radix_sort.py | def radix_sort(array):
"""
Sorts an array of numbers using the least signficant digit radix algorithm.
"""
if __name__ == '__main__':
print radix_sort.func_doc
| Python | 0.000001 | |
3e54e311a747b1e032384c7a74a8ed9aeafe1e8d | Fix constructor for Bernoulli node | bayespy/inference/vmp/nodes/bernoulli.py | bayespy/inference/vmp/nodes/bernoulli.py | ######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
A module for the Bernoulli distribution node
"""
import numpy as np
from .binomial import (BinomialMoments,
BinomialDistribution,
Binomial)
class BernoulliMoments(BinomialMoments):
"""
Class for the moments of Bernoulli variables.
"""
def __init__(self):
super().__init__(1)
class BernoulliDistribution(BinomialDistribution):
"""
Class for the VMP formulas of Bernoulli variables.
"""
def __init__(self):
super().__init__(1)
class Bernoulli(Binomial):
"""
Node for Bernoulli random variables.
"""
_moments = BernoulliMoments()
_distribution = BernoulliDistribution()
@classmethod
def _constructor(cls, p, **kwargs):
"""
Constructs distribution and moments objects.
"""
p = cls._ensure_moments(p, cls._parent_moments[0])
parents = [p]
return ( parents,
kwargs,
( (), ),
cls._total_plates(kwargs.get('plates'),
cls._distribution.plates_from_parent(0, p.plates)),
cls._distribution,
cls._moments,
cls._parent_moments)
def show(self):
"""
Print the distribution using standard parameterization.
"""
p = 1 / (1 + np.exp(-self.phi[0]))
print("%s ~ Bernoulli(p)" % self.name)
print(" p = ")
print(p)
| ######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
A module for the Bernoulli distribution node
"""
import numpy as np
from .binomial import (BinomialMoments,
BinomialDistribution,
Binomial)
class BernoulliMoments(BinomialMoments):
"""
Class for the moments of Bernoulli variables.
"""
def __init__(self):
super().__init__(1)
class BernoulliDistribution(BinomialDistribution):
"""
Class for the VMP formulas of Bernoulli variables.
"""
def __init__(self):
super().__init__(1)
class Bernoulli(Binomial):
"""
Node for Bernoulli random variables.
"""
_moments = BernoulliMoments()
_distribution = BernoulliDistribution()
def __init__(self, p, **kwargs):
super().__init__(1, p, **kwargs)
def show(self):
"""
Print the distribution using standard parameterization.
"""
p = 1 / (1 + np.exp(-self.phi[0]))
print("%s ~ Bernoulli(p)" % self.name)
print(" p = ")
print(p)
| Python | 0 |
0a16a2002e1247ad87a877de6aa85bb0844dc9c4 | tag tweaks | boards/NRF51TAG.py | boards/NRF51TAG.py | #!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# This file contains information for a specific board - the available pins, and where LEDs,
# Buttons, and other in-built peripherals are. It is used to build documentation as well
# as various source and header files for Espruino.
# ----------------------------------------------------------------------------------------
import pinutils;
info = {
'name' : "nRF51 Tag",
'link' : [ "" ],
'default_console' : "EV_BLUETOOTH",
# 'default_console_tx' : "D15",
# 'default_console_rx' : "D17",
# 'default_console_baudrate' : "9600",
'variables' : 150,
'binary_name' : 'espruino_%v_nrf51tag.bin',
'build' : {
'defines' : [
'USE_BLUETOOTH'
]
}
};
chip = {
'part' : "NRF51822",
'family' : "NRF51",
'package' : "QFN48",
'ram' : 16,
'flash' : 256,
'speed' : 16,
'usart' : 1,
'spi' : 1,
'i2c' : 1,
'adc' : 1,
'dac' : 0,
# If using DFU bootloader, it sits at 0x3C000 - 0x40000 (0x40000 is end of flash)
# Might want to change 256 -> 240 in the code below
'saved_code' : {
'address' : ((256 - 3 - 16) * 1024),
'page_size' : 1024,
'pages' : 3,
'flash_available' : (256 - 108 - 16 - 3) # total flash pages - softdevice - bootloader - saved code
}
};
devices = {
# 'LED1' : { 'pin' : 'D22' },
# 'LED2' : { 'pin' : 'D21' },
# 'LED3' : { 'pin' : 'D23' }
};
def get_pins():
pins = pinutils.generate_pins(0,31) # 32 General Purpose I/O Pins.
pinutils.findpin(pins, "PD27", True)["functions"]["XL1"]=0;
pinutils.findpin(pins, "PD26", True)["functions"]["XL2"]=0;
#The boot/reset button will function as a reset button in normal operation. Pin reset on PD21 needs to be enabled on the nRF52832 device for this to work.
return pins
| #!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# This file contains information for a specific board - the available pins, and where LEDs,
# Buttons, and other in-built peripherals are. It is used to build documentation as well
# as various source and header files for Espruino.
# ----------------------------------------------------------------------------------------
import pinutils;
info = {
'name' : "nRF51 Tag",
'link' : [ "" ],
'default_console' : "EV_SERIAL1",
'default_console_tx' : "D15",
'default_console_rx' : "D17",
'default_console_baudrate' : "9600",
'variables' : 310,
'binary_name' : 'espruino_%v_nrf51tag.bin',
'build' : {
'defines' : [
'USE_BLUETOOTH'
]
}
};
chip = {
'part' : "NRF51822",
'family' : "NRF51",
'package' : "QFN48",
'ram' : 16,
'flash' : 256,
'speed' : 16,
'usart' : 1,
'spi' : 1,
'i2c' : 1,
'adc' : 1,
'dac' : 0,
# If using DFU bootloader, it sits at 0x3C000 - 0x40000 (0x40000 is end of flash)
# Might want to change 256 -> 240 in the code below
'saved_code' : {
'address' : ((256 - 3) * 1024),
'page_size' : 1024,
'pages' : 3,
'flash_available' : (256 - 108 - 16) # total flash pages - softdevice - bootloader
}
};
devices = {
# 'LED1' : { 'pin' : 'D22' },
# 'LED2' : { 'pin' : 'D21' },
# 'LED3' : { 'pin' : 'D23' }
};
def get_pins():
pins = pinutils.generate_pins(0,31) # 32 General Purpose I/O Pins.
pinutils.findpin(pins, "PD27", True)["functions"]["XL1"]=0;
pinutils.findpin(pins, "PD26", True)["functions"]["XL2"]=0;
#The boot/reset button will function as a reset button in normal operation. Pin reset on PD21 needs to be enabled on the nRF52832 device for this to work.
return pins
| Python | 0.000001 |
e96832e16a6e5746faeaf647c6cd681f1d2f9bca | Create break_fracmorse.py | break_fracmorse.py | break_fracmorse.py | # usage: python break_fracmorse.py 'CIPHERTEXTMESSAGE'
# ideally you'll want 200 or so characters to reliably decrypt, shorter will often work but not as reliably.
import random
from ngram_score import ngram_score
import re
import sys
from pycipher import FracMorse
#ctext = FracMorse('PQRSTUVWXYZABCDEFGHIJKLMNO').encipher("He has not been returned to sea because of his affection for caregivers.The waitress pointed to the lunch menu, but the oldest living ex-major leaguer had no use for it")
fitness = ngram_score('fmorse_quadgrams.txt') # load our quadgram model
# helper function, converts an integer 0-25 into a character
def i2a(i): return 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[i%26]
# decipher a piece of text using the substitution cipher and a certain key
def sub_decipher(text,key):
invkey = [i2a(key.index(i)) for i in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
ret = ''
for c in text:
if c.isalpha(): ret += invkey[ord(c.upper())-ord('A')]
else: ret += c
return ret
# This code is just the simple substitution cipher cracking code, it works perfectly for fractionated morse as
# long as you use fractioned morse statistics instead of english statistics.
def break_simplesub(ctext,startkey=None):
''' perform hill-climbing with a single start. This function may have to be called many times
to break a substitution cipher. '''
# make sure ciphertext has all spacing/punc removed and is uppercase
ctext = re.sub('[^A-Z]','',ctext.upper())
parentkey,parentscore = startkey or list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'),-99e99
if not startkey: random.shuffle(parentkey)
parentscore = fitness.score(sub_decipher(ctext,parentkey))
count = 0
while count < 1000:
a = random.randint(0,25)
b = random.randint(0,25)
child = parentkey[:]
# swap two characters in the child
child[a],child[b] = child[b],child[a]
score = fitness.score(sub_decipher(ctext,child))
# if the child was better, replace the parent with it
if score > parentscore:
parentscore, parentkey = score, child[:]
count = 0 # reset the counter
count += 1
return parentscore, parentkey
ctext = sys.argv[1]
ctext = re.sub(r'[^A-Z ]','',ctext.upper())
maxscore, maxkey = break_simplesub(ctext,list('ABCDEFGHIJKLMNOPQRSTUVWXYZ'))
print str(maxscore),'simplesub key:',''.join(maxkey), 'decrypt: ',sub_decipher(ctext,maxkey)
for i in range(1000):
score, key = break_simplesub(ctext)
if score > maxscore:
maxscore,maxkey = score,key[:]
print str(maxscore),'FractionatedMorse key:',''.join(maxkey), 'decrypt: ',FracMorse(maxkey).decipher(ctext)
| Python | 0.998165 | |
decf4b1916a421fe996a31feb131b7ed9e4e3c36 | Add a simple benchmark script | numpy-benchmark-one.py | numpy-benchmark-one.py | import timeit
normal_py_sec = timeit.timeit('sum (x*x for x in xrange(1000))',number = 10000)
naive_np_sec = timeit.timeit('sum(na*na)',setup='import numpy as np; na=np.arange(1000)', number = 10000)
good_np_sec = timeit.timeit('na.dot(na)',setup='import numpy as np; na=np.arange(1000)', number = 10000)
print("Normal Python: %f sec"%normal_py_sec)
print("Naive Numpy : %f sec"%naive_np_sec)
print("Good Numpy : %f sec"%good_np_sec)
| Python | 0.000001 | |
cb517a2cd1dea12fadf4f72147fecf0105cbd717 | include missing Message | palaverapi/message.py | palaverapi/message.py | # Adapted from https://github.com/kylef/irctk/blob/master/irctk/message.py
from typing import List, Optional
class Message:
@classmethod
def parse(cls, string: str) -> 'Message':
prefix = None
parameters = []
if string.startswith('@'):
_, string = string[1:].split(' ', 1)
if string.startswith(':'):
prefix, string = string.split(' ', 1)
prefix = prefix[1:]
if ' ' in string:
command, string = string.split(' ', 1)
else:
command = string
string = ''
while len(string) != 0:
if string[0] == ':':
parameters.append(string[1:])
string = ''
elif ' ' in string:
parameter, string = string.split(' ', 1)
parameters.append(parameter)
else:
parameters.append(string)
string = ''
return cls(prefix, command, parameters)
def __init__(
self,
prefix: str = None,
command: str = '',
parameters: List[str] = None,
):
self.prefix = prefix
self.command = command
self.parameters = parameters or []
def get(self, index: int) -> Optional[str]:
if index >= len(self.parameters):
return None
return self.parameters[index]
| Python | 0.000027 | |
7aaf42a7b129ba5b9548db0b2a71a095246aeac9 | Add Py3 compatibility support helpers | paramiko/py3compat.py | paramiko/py3compat.py | import sys
__all__ = ['PY3', 'string_types', 'integer_types', 'text_type', 'bytes_type', 'long', 'input', 'bytestring', 'byte_ord', 'byte_chr', 'byte_mask', 'b', 'u', 'StringIO', 'BytesIO', 'is_callable', 'MAXSIZE', 'next']
PY3 = sys.version_info[0] >= 3
if PY3:
import collections
import struct
string_types = str
integer_types = int
text_type = str
bytes_type = bytes
long = int
input = input
def bytestring(s):
return s
def byte_ord(c):
assert isinstance(c, int)
return c
def byte_chr(c):
assert isinstance(c, int)
return struct.pack('B', c)
def byte_mask(c, mask):
assert isinstance(c, int)
return struct.pack('B', c & mask)
def b(s, encoding='utf8'):
"""cast unicode or bytes to bytes"""
if isinstance(s, bytes):
return s
elif isinstance(s, str):
return s.encode(encoding)
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
def u(s, encoding='utf8'):
"""cast bytes or unicode to unicode"""
if isinstance(s, bytes):
return s.decode(encoding)
elif isinstance(s, str):
return s
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
import io
StringIO = io.StringIO # NOQA
BytesIO = io.BytesIO # NOQA
def is_callable(c):
return isinstance(c, collections.Callable)
def get_next(c):
return c.__next__
next = next
MAXSIZE = sys.maxsize # NOQA
else:
string_types = basestring
integer_types = (int, long)
text_type = unicode
bytes_type = str
long = long
input = raw_input
def bytestring(s): # NOQA
if isinstance(s, unicode):
return s.encode('utf-8')
return s
byte_ord = ord # NOQA
byte_chr = chr # NOQA
def byte_mask(c, mask):
return chr(ord(c) & mask)
def b(s, encoding='utf8'): # NOQA
"""cast unicode or bytes to bytes"""
if isinstance(s, str):
return s
elif isinstance(s, unicode):
return s.encode(encoding)
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
def u(s, encoding='utf8'): # NOQA
"""cast bytes or unicode to unicode"""
if isinstance(s, str):
return s.decode(encoding)
elif isinstance(s, unicode):
return s
else:
raise TypeError("Expected unicode or bytes, got %r" % s)
try:
import cStringIO
StringIO = cStringIO.StringIO # NOQA
except ImportError:
import StringIO
StringIO = StringIO.StringIO # NOQA
BytesIO = StringIO
def is_callable(c): # NOQA
return callable(c)
def get_next(c): # NOQA
return c.next
def next(c):
return c.next()
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1) # NOQA
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1) # NOQA
del X
| Python | 0 | |
6db9688d7c078c8cf8d1b17305e89bb680a46e53 | Create lc1001.py | LeetCode/lc1001.py | LeetCode/lc1001.py | xs = {}
ys = {}
fs = {}
ss = {}
grid = {}
def checkdic(xs, x):
if x not in xs:
xs[x] = set()
def checkempty(xs, x):
if x not in xs:
return 1
if len(xs[x]) == 0:
return 1
return 0
def remove(x, y):
if x not in grid:
return
if y not in grid[x]:
return
grid[x].remove(y)
xs[x].remove((x,y))
ys[y].remove((x,y))
fs[x+y].remove((x,y))
ss[x-y].remove((x,y))
class Solution:
def gridIllumination(self, N: int, lamps: List[List[int]], queries: List[List[int]]) -> List[int]:
global xs, ys, fs, ss, grid
xs = {}
ys = {}
fs = {}
ss = {}
grid = {}
n = len(lamps)
for l in lamps:
x = l[0]
y = l[1]
if x not in grid:
grid[x] = {y}
else:
grid[x].add(y)
checkdic(xs, x)
checkdic(ys, y)
checkdic(fs, x + y)
checkdic(ss, x - y)
xs[x].add((x,y))
ys[y].add((x,y))
fs[x+y].add((x,y))
ss[x-y].add((x,y))
ans = []
for q in queries:
x = q[0]
y = q[1]
tmp = checkempty(xs, x) and checkempty(ys, y) and checkempty(fs, x+y) and checkempty(ss, x-y)
if tmp:
ans.append(0)
else:
ans.append(1)
remove(x, y-1)
remove(x, y)
remove(x, y+1)
remove(x-1, y-1)
remove(x-1, y)
remove(x-1, y+1)
remove(x+1, y-1)
remove(x+1, y)
remove(x+1, y+1)
return ans
| Python | 0.000003 | |
b7762c1b8bc987ed1b72ba0db0dbf47894c2e931 | add score | StuScore/Score.py | StuScore/Score.py | #!/usr/bin/env python
# coding:utf-8
from bs4 import BeautifulSoup
import requests
import re
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
def login(username, pswd='0'):
'''
模拟登录教务系统
:param username:
:param pswd:
:return: 登录状态
'''
login_url = 'http://219.242.68.33/Login.aspx'
from_data = {
"ToolkitScriptManager1_HiddenField": "",
"__EVENTTARGET": "",
"__EVENTARGUMENT": "",
"__VIEWSTATE": "/wEPDwUKMTY0Njg4MjEwM2Rkj+Af8kaVOxsefGZECk5PM6rOOYgs0taVhQxQSxoC298=",
"__VIEWSTATEGENERATOR": "C2EE9ABB",
"__EVENTVALIDATION": "/wEWCQKK9JioBQLB2tiHDgK1qbSRCwLB9fLCCQKVwf3jAwL7jJeqDQK2yLNyAoyp3LQNAoLch4YM4/7Gzd6qXWcFlpTQVOKRLsJcEeZ1kj5lh7u9AQrHyms=",
"txtUser": username,
"txtPassword": pswd,
"rbLx": "学生",
"btnLogin": " 登 录 "
}
header = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.86 Safari/537.36"
}
s = requests.session()
response = s.post(url=login_url, data=from_data, headers=header)
response_text = response.text
if response_text.find('个人资料') > 0:
print '登录成功!'
return s
elif response_text.find('密码不正确') > 0:
print '密码错误...请重试...'
return False
else:
print '登录失败...请重试...'
return False
def get_ifo(sess):
'''
通过登录会话session获取学生信息
:param sess:
:return: 学生信息
'''
ifo_url = 'http://219.242.68.33/xuesheng/xsxx.aspx'
html = sess.get(ifo_url)
soup = BeautifulSoup(html.text, 'lxml')
data = {}
data['a.姓名'] = soup.find(id="ctl00_ContentPlaceHolder1_lblXm").text
data['b.身份证号'] = soup.find(id="ctl00_ContentPlaceHolder1_lblSfz").text
data['c.学号'] = soup.find(id="ctl00_ContentPlaceHolder1_lblXh").text
data['d.班级'] = soup.find(id="ctl00_ContentPlaceHolder1_className").text
data['e.院系'] = soup.find(id="ctl00_ContentPlaceHolder1_collegeName").text
for item in sorted(data):
print '{0}:{1}{2}'.format(item, '-'*5, data[item])
def get_score(username, pswd='0'):
pass
def elective(username, pswd):
pass
def Quit():
'''
退出
:return: None
'''
print 'Quited...'
def main():
prompt = '''
+===========================+
| [1]查成绩 |
| [2]个人信息 |
| [3]选修课 |
| [4]登录其他账号 |
| [5]安全退出 |
+===========================+
>>> '''
username = raw_input('学号: ')
pswd = raw_input('密码: ')
sess = login(username, pswd)
if sess:
choice = True
while choice:
usr_choice = raw_input('\r'+prompt).strip()[0]
if usr_choice == '1':
get_score(sess, username, pswd)
elif usr_choice == '2':
get_ifo(sess)
elif usr_choice == '3':
elective(sess, username, pswd)
elif usr_choice == '4':
main()
break
elif usr_choice == '5':
Quit()
break
else:
print 'Input incorrect..again!'
else:
cho = raw_input('Cotinue or not [n/y]: ').strip()[0]
if cho == 'y':
main()
else:
Quit()
if __name__ == '__main__':
main()
| Python | 0.998398 | |
98aee2af9aa3f7dcc75969f1ec3118c40539793e | Add clone of Haskell version | pandoc-include-code.py | pandoc-include-code.py | #! /usr/bin/env python3
from sys import stdout, stderr, exit
import json
def walktransform(tree):
if isinstance(tree, list):
return [walktransform(subtree)
for subtree
in tree]
elif not isinstance(tree, dict):
exit('Unsupported AST node', type(tree))
elif isinstance(tree, dict):
if tree.get('t') == 'CodeBlock':
(_, _, meta, *_), code = tree.get('c', [[None, None, None], ''])
if code.strip():
breakpoint()
exit('Code in block:', code, sep='\n')
includes = [v for k, v in meta if k == 'include']
if len(includes) > 1:
exit('Too many includes', *includes)
elif not includes:
exit('No file to include', meta)
else:
with open(includes[0]) as fp:
code = fp.read()
return {
't': 'CodeBlock',
'c': [
[
'',
[],
[
# TODO: file type
],
],
code
],
}
# TODO: https://github.com/owickstrom/pandoc-include-code#snippets
# TODO: https://github.com/owickstrom/pandoc-include-code#ranges
# TODO: https://github.com/owickstrom/pandoc-include-code#dedent
# TODO: https://github.com/owickstrom/pandoc-include-code#adding-base-url-for-all-codeblock-links # noqa
if __name__ == '__main__':
from argparse import ArgumentParser, FileType
argument_parser = ArgumentParser()
argument_parser.add_argument('ast', type=FileType('r'), default='-')
args = argument_parser.parse_args()
ast = json.load(args.ast)
if ast['pandoc-api-version'] != (1, 22):
print('Unsupported Pandoc API version',
'.'.join(map(str, ast['pandoc-api-version'])) + '.',
'Use at own risk.',
file=stderr)
json.dump(walktransform(ast['blocks']), stdout)
| Python | 0 | |
70a6553d9323b3522e492c414b67e76111519368 | Add file to create all files to school census. | scripts/data_download/school_census/create_all_files.py | scripts/data_download/school_census/create_all_files.py | import os
import commands
import time
import logging
import sys
if len(sys.argv) != 3 or (sys.argv[1:][0] not in ['pt', 'en']):
print "ERROR! Use:\n python scripts/data_download/school_census/create_files.py en/pt output_path\n"
exit()
logging.basicConfig(filename=os.path.abspath(os.path.join(sys.argv[2],str(sys.argv[0].split('/')[2]) + '-all-data-download.log' )),level=logging.DEBUG)
for year in range(2007, 2016):
logging.info("python scripts/data_download/higher_education/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year) + "\n")
ret = commands.getoutput("python scripts/data_download/school_census/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year))
logging.info(str(ret) + "\nYear: " + str(year) + " ok =D\n\n") | Python | 0 | |
e8d596ffc074dea152a2d714e9f68bb9e5c75eaa | change serviceClass from CharField to ForeignKey | planetstack/core/models/slice.py | planetstack/core/models/slice.py | import os
from django.db import models
from core.models import PlCoreBase
from core.models import Site
from core.models import User
from core.models import Role
from core.models import DeploymentNetwork
from core.models import ServiceClass
# Create your models here.
class Slice(PlCoreBase):
tenant_id = models.CharField(max_length=200, help_text="Keystone tenant id")
name = models.CharField(unique=True, help_text="The Name of the Slice", max_length=80)
enabled = models.BooleanField(default=True, help_text="Status for this Slice")
omf_friendly = models.BooleanField()
description=models.TextField(blank=True,help_text="High level description of the slice and expected activities", max_length=1024)
slice_url = models.URLField(blank=True, max_length=512)
site = models.ForeignKey(Site, related_name='slices', help_text="The Site this Node belongs too")
network_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum network")
router_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum router id")
subnet_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum subnet id")
serviceClass = models.ForeignKey(ServiceClass, related_name = "slices", null=True, default=ServiceClass.get_default)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.serviceClass is None:
# We allowed None=True for serviceClass because Django evolution
# will fail unless it is allowed. But, we we really don't want it to
# ever save None, so fix it up here.
self.serviceClass = ServiceClass.get_default()
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
self.os_manager.save_slice(self)
super(Slice, self).save(*args, **kwds)
def delete(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
self.os_manager.delete_slice(self)
super(Slice, self).delete(*args, **kwds)
class SliceMembership(PlCoreBase):
user = models.ForeignKey('User', related_name='slice_memberships')
slice = models.ForeignKey('Slice', related_name='slice_memberships')
role = models.ForeignKey('Role')
def __unicode__(self): return u'%s %s %s' % (self.slice, self.user, self.role)
def save(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
if self.os_manager.driver:
self.os_manager.driver.add_user_role(self.user.kuser_id, self.slice.tenant_id, self.role.role_type)
super(SliceMembership, self).save(*args, **kwds)
def delete(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
if self.os_manager.driver:
self.os_manager.driver.delete_user_role(self.user.kuser_id, self.slice.tenant_id, self.role.role_type)
super(SliceMembership, self).delete(*args, **kwds)
| import os
from django.db import models
from core.models import PlCoreBase
from core.models import Site
from core.models import User
from core.models import Role
from core.models import DeploymentNetwork
# Create your models here.
class Slice(PlCoreBase):
tenant_id = models.CharField(max_length=200, help_text="Keystone tenant id")
name = models.CharField(unique=True, help_text="The Name of the Slice", max_length=80)
enabled = models.BooleanField(default=True, help_text="Status for this Slice")
omf_friendly = models.BooleanField()
description=models.TextField(blank=True,help_text="High level description of the slice and expected activities", max_length=1024)
slice_url = models.URLField(blank=True, max_length=512)
site = models.ForeignKey(Site, related_name='slices', help_text="The Site this Node belongs too")
network_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum network")
router_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum router id")
subnet_id = models.CharField(null=True, blank=True, max_length=256, help_text="Quantum subnet id")
SVC_CLASS_CHOICES = (('besteffort', 'Best Effort'), ('silver', 'Silver'), ('gold','Gold'))
serviceClass = models.CharField(verbose_name="Service Class",default="besteffort",help_text="The Service Class of this slice", max_length=30, choices=SVC_CLASS_CHOICES)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
self.os_manager.save_slice(self)
super(Slice, self).save(*args, **kwds)
def delete(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
self.os_manager.delete_slice(self)
super(Slice, self).delete(*args, **kwds)
class SliceMembership(PlCoreBase):
user = models.ForeignKey('User', related_name='slice_memberships')
slice = models.ForeignKey('Slice', related_name='slice_memberships')
role = models.ForeignKey('Role')
def __unicode__(self): return u'%s %s %s' % (self.slice, self.user, self.role)
def save(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
if self.os_manager.driver:
self.os_manager.driver.add_user_role(self.user.kuser_id, self.slice.tenant_id, self.role.role_type)
super(SliceMembership, self).save(*args, **kwds)
def delete(self, *args, **kwds):
if not hasattr(self, 'os_manager'):
from openstack.manager import OpenStackManager
setattr(self, 'os_manager', OpenStackManager())
if self.os_manager.driver:
self.os_manager.driver.delete_user_role(self.user.kuser_id, self.slice.tenant_id, self.role.role_type)
super(SliceMembership, self).delete(*args, **kwds)
| Python | 0 |
91b58112f1c83048511fdab09f9aad58351eb991 | add new package (#23573) | var/spack/repos/builtin/packages/py-pycocotools/package.py | var/spack/repos/builtin/packages/py-pycocotools/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPycocotools(PythonPackage):
"""Official APIs for the MS-COCO dataset."""
homepage = "https://github.com/cocodataset/cocoapi"
pypi = "pycocotools/pycocotools-2.0.2.tar.gz"
version('2.0.2', sha256='24717a12799b4471c2e54aa210d642e6cd4028826a1d49fcc2b0e3497e041f1a')
depends_on('python', type=('build', 'link', 'run'))
depends_on('py-setuptools@18.0:', type='build')
depends_on('py-cython@0.27.3:', type='build')
depends_on('py-numpy', type=('build', 'link', 'run'))
depends_on('py-matplotlib@2.1.0:', type=('build', 'run'))
| Python | 0 | |
638ee09f0f2958a955fbad42368ffc6bb2a2688a | Add minimal REST API script based on flask | pipeline/scripts/bb_pipeline_api.py | pipeline/scripts/bb_pipeline_api.py | #!/usr/bin/env python3
from tempfile import NamedTemporaryFile
import json
from threading import Lock
import numpy as np
from flask import Flask, request
from scipy.misc import imread
from pipeline import Pipeline
from pipeline.objects import Image, Candidates, Saliencies, IDs
from pipeline.pipeline import get_auto_config
app = Flask(__name__)
def init_pipeline():
pipeline = Pipeline([Image],
[Candidates, Saliencies, IDs],
**get_auto_config())
return pipeline
pipeline = init_pipeline()
pipeline_lock = Lock()
def jsonify(instance):
if isinstance(instance, np.ndarray):
return instance.tolist()
return instance
def process_image(image):
with pipeline_lock:
results = pipeline([image])
return json.dumps(dict([(k.__name__, jsonify(v)) for k, v in
results.items()]), ensure_ascii=False)
@app.route('/process', methods=['POST'])
def api_message():
print('Retrieving process request')
if request.headers['Content-Type'] == 'application/octet-stream':
try:
with NamedTemporaryFile(delete=True) as f:
f.write(request.data)
image = imread(f)
return process_image(image)
except Exception as err:
return '{}'.format(err)
else:
return "415 Unsupported Media Type"
if __name__ == '__main__':
app.run(host='0.0.0.0', port=10000)
| Python | 0 | |
427caaa998ea03bf80a00aaf90833eb910cf909d | Add migration file | webapp/apps/taxbrain/migrations/0061_auto_20171220_1859.py | webapp/apps/taxbrain/migrations/0061_auto_20171220_1859.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import webapp.apps.taxbrain.models
class Migration(migrations.Migration):
dependencies = [
('taxbrain', '0060_auto_20171219_2153'),
]
operations = [
migrations.AddField(
model_name='taxsaveinputs',
name='ID_RealEstate_crt',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_RealEstate_crt_cpi',
field=models.NullBooleanField(default=None),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_StateLocalTax_crt',
field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),
),
migrations.AddField(
model_name='taxsaveinputs',
name='ID_StateLocalTax_crt_cpi',
field=models.NullBooleanField(default=None),
),
]
| Python | 0.000001 | |
80d75bad57c8be1b08fbb2129bb0511c633446e2 | Create CertPaIT.py | plugins/feeds/public/CertPaIT.py | plugins/feeds/public/CertPaIT.py | import logging
from datetime import datetime, timedelta
from core.observables import Hash
from core.feed import Feed
from core.errors import ObservableValidationError
class CertPaIt(Feed):
default_values = {
"frequency": timedelta(minutes=30),
"name": "CertPaIT",
"source" : "https://infosec.cert-pa.it/analyze/submission.rss",
"description": "This feed contains data from infosec.cert-pa.it",
}
def update(self):
for item in self.update_xml('item', ["title", "link"]):
self.analyze(item)
def analyze(self, item):
md5 = item['title'].replace("MD5: ", "")
context = {}
context['date_added'] = datetime.now()
context['source'] = self.name
context['url'] = item['link']
try:
if md5:
hash_data = Hash.get_or_create(value=md5)
if hash_data.new is True or self.name not in hash_data.sources:
hash_data.add_context(context)
hash_data.add_source(self.name)
except ObservableValidationError as e:
logging.error(e)
| Python | 0 | |
90a467a849bb05cd0922ca0808279bf009657150 | Create reverse_words.py | Google_Code_Jam/2010_Africa/Qualification_Round/B/reverse_words.py | Google_Code_Jam/2010_Africa/Qualification_Round/B/reverse_words.py | #!/usr/bin/python -tt
"""Solves problem B from Google Code Jam Qualification Round Africa 2010
(https://code.google.com/codejam/contest/351101/dashboard#s=p1)
"Reverse Words"
"""
import sys
def main():
"""Reads problem data from stdin and prints answers to stdout.
Args:
None
Returns:
Nothing
"""
lines = sys.stdin.read().splitlines()
num_test_cases = int(lines[0])
test_cases = lines[1:]
assert len(test_cases) == num_test_cases
i = 1
for test_case in test_cases:
words = test_case.split()
words.reverse()
print 'Case #%d:' % (i,), ' '.join(words)
i += 1
if __name__ == '__main__':
main()
| Python | 0.999848 | |
3155a8ab725c1b1535a99229f31008587ceb3e64 | Add continuous finite test | HARK/ConsumptionSaving/tests/test_ConsRiskyContribModel.py | HARK/ConsumptionSaving/tests/test_ConsRiskyContribModel.py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 26 10:06:51 2021
@author: Mateo
"""
import unittest
from copy import copy
import numpy as np
from HARK.ConsumptionSaving.ConsRiskyAssetModel import (
RiskyContribConsumerType,
init_riskyContrib
)
class test_(unittest.TestCase):
def setUp(self):
# A set of finite parameters
self.par_finite = init_riskyContrib.copy()
# Four period model
self.par_finite['PermGroFac'] = [2.0, 1.0, 0.1, 1.0]
self.par_finite['PermShkStd'] = [0.1, 0.1, 0.0, 0.0]
self.par_finite['TranShkStd'] = [0.2, 0.2, 0.0, 0.0]
self.par_finite['AdjustPrb'] = [0.5, 0.5, 1.0, 1.0]
self.par_finite['tau'] = [0.1, 0.1, 0.0, 0.0]
self.par_finite['LivPrb'] = [1.0, 1.0, 1.0, 1.0]
self.par_finite['T_cycle'] = 4
self.par_finite['T_retire'] = 0
self.par_finite['T_age'] = 4
# Adjust discounting and returns distribution so that they make sense in a
# 4-period model
self.par_finite['DiscFac'] = 0.95**15
self.par_finite['Rfree'] = 1.03**15
self.par_finite['RiskyAvg'] = 1.08**15 # Average return of the risky asset
self.par_finite['RiskyStd'] = 0.20*np.sqrt(15) # Standard deviation of (log) risky returns
def test_finite_cont_share(self):
cont_params = copy(self.par_finite)
cont_params['DiscreteShareBool'] = False
cont_params['vFuncBool'] = False
fin_cont_agent = RiskyContribConsumerType(**cont_params)
fin_cont_agent.solve()
self.assertAlmostEqual(
fin_cont_agent.solution[0].stageSols['Reb'].DFuncAdj(3,4), -0.87757204
)
self.assertAlmostEqual(
fin_cont_agent.solution[0].stageSols['Sha'].ShareFuncAdj(5,0.1), 0.10846904
)
self.assertAlmostEqual(
fin_cont_agent.solution[0].stageSols['Cns'].cFunc(3,4,0.1), 2.46055802
)
| Python | 0.000178 | |
0f6961c10def1f1343c6c31d117e5ca87cefd4b7 | add openvas_vulns migration | alembic/versions/506c8e35ba7c_create_openvas_vuln_table.py | alembic/versions/506c8e35ba7c_create_openvas_vuln_table.py | """create openvas_vuln table
Revision ID: 506c8e35ba7c
Revises: 13b7c3d4c802
Create Date: 2017-07-21 12:19:35.711173
"""
from alembic import op
import sqlalchemy as sa
import datetime
def _get_date():
return datetime.datetime.now()
# revision identifiers, used by Alembic.
revision = '506c8e35ba7c'
down_revision = '13b7c3d4c802'
branch_labels = None
depends_on = None
def upgrade():
op.create_table('openvas_vulns',
sa.Column('id', sa.Integer, primary_key=True, nullable=False),
sa.Column('name', sa.Text, unique=True, nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=False), default=_get_date))
def downgrade():
op.drop_table('openvas_vulns')
| Python | 0 | |
6ce0d934cfe8b9e93a833ff1d31915ffd14c643d | add new package (#25526) | var/spack/repos/builtin/packages/py-pydantic/package.py | var/spack/repos/builtin/packages/py-pydantic/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPydantic(PythonPackage):
"""Data validation and settings management using Python type hinting."""
homepage = "https://github.com/samuelcolvin/pydantic"
pypi = "pydantic/pydantic-1.8.2.tar.gz"
version('1.8.2', sha256='26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b')
depends_on('python@3.6.1:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-dataclasses@0.6:', when='^python@:3.6', type=('build', 'run'))
depends_on('py-typing-extensions@3.7.4.3:', type=('build', 'run'))
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.