commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
f13c86833b5fb4c873e1242ab760fa701e3eac7d
|
Update cs251tk/student/markdownify/process_file.py
|
cs251tk/student/markdownify/process_file.py
|
cs251tk/student/markdownify/process_file.py
|
import os
from collections import Iterable
from cs251tk.common import run, flatten
from .truncate import truncate
from .cat import cat
from .pipe import pipe
def get_file(filename, results, options):
file_status, file_contents = cat(filename)
if file_status == 'success':
_, last_edit, _ = run(['git', 'log', '-n', '1', '--pretty=format:%cd', '--', filename])
results['last modified'] = last_edit
if options['hide_contents']:
file_contents = ''
elif options['truncate_contents']:
file_contents = truncate(file_contents, options['truncate_contents'])
if file_status != 'success':
results['missing'] = True
results['other files'] = os.listdir('.')
results['optional'] = options['optional']
return False
results['contents'] = file_contents
return True
def compile_file(filename, steps, results, supporting_dir, basedir, web, student):
server_path = ' '.join([
'-o "{}/server/server_file"'.format(basedir),
'"{}/data/supporting/sd_fun.h"'.format(basedir),
'"{}/data/supporting/sd_fun.o" '.format(basedir),
'"{}/data/supporting/str_util.o"'.format(basedir)
])
for step in steps:
command = step \
.replace('$@', './' + filename) \
.replace('$SUPPORT', supporting_dir) \
.replace('$SERVER', server_path)
cmd, input_for_cmd = pipe(command)
status, compilation, _ = run(cmd, timeout=30, input_data=input_for_cmd)
results['compilation'].append({
'command': command,
'output': compilation,
'status': status,
})
if web:
if status == 'success':
input("{} - {}".format(student, filename))
else:
print("{} - {} COMPILE ERROR".format(student, filename))
if status != 'success':
return False
return True
def test_file(filename, *, spec, results, options, cwd, supporting_dir, interact):
tests = flatten([test_spec['commands']
for test_spec in spec.get('tests', {})
if test_spec['filename'] == filename])
for test_cmd in tests:
if not test_cmd:
continue
test_cmd = test_cmd \
.replace('$@', './' + filename) \
.replace('$SUPPORT', supporting_dir)
test_cmd, input_for_test = pipe(test_cmd)
if os.path.exists(os.path.join(cwd, filename)):
again = True
while again:
status, full_result, again = run(test_cmd,
input_data=input_for_test,
timeout=options['timeout'],
interact=interact)
result = truncate(full_result, options['truncate_output'])
was_truncated = (full_result != result)
results['result'].append({
'command': test_cmd,
'status': status,
'output': result,
'truncated': was_truncated,
'truncated after': options['truncate_output'],
})
else:
results['result'].append({
'command': test_cmd,
'error': True,
'output': '{} could not be found.'.format(filename),
})
return True
def process_file(filename, *, steps, options, spec, cwd, supporting_dir, interact, basedir, student):
steps = steps if isinstance(steps, Iterable) else [steps]
base_opts = {
'timeout': 4,
'truncate_output': 10000, # 10K
'truncate_contents': False,
'optional': False,
'hide_contents': False,
'web': False
}
base_opts.update(options)
options = base_opts
results = {
'filename': filename,
'missing': False,
'compilation': [],
'result': [],
}
should_continue = get_file(filename, results, options)
if not should_continue:
return results
should_continue = compile_file(filename, steps, results, supporting_dir,
basedir, options['web'], student)
if not should_continue or not steps or options['web']:
return results
should_continue = test_file(filename,
spec=spec,
results=results,
options=options,
cwd=cwd,
supporting_dir=supporting_dir,
interact=interact)
if not should_continue:
return results
return results
|
Python
| 0
|
@@ -1103,17 +1103,16 @@
d_fun.o%22
-
'.format
|
21bbf9ec71c2d63f5c826dfdc3641927692cb202
|
test test
|
test.py
|
test.py
|
from flask import Flask
import pytest
def test_app():
app = Flask(__name__)
app.testing = True
@app.route("/")
def hello():
return "Hello World!"
# app.run() # this actually works here...
client = app.test_client()
response = client.get("/")
assert response.status_code == 200
assert response.data == "Hello World!"
print response.headers
assert False
|
Python
| 0.000037
|
@@ -220,16 +220,12 @@
-client =
+with
app
@@ -238,17 +238,32 @@
client()
-%0A
+ as client:%0A
resp
@@ -281,24 +281,28 @@
nt.get(%22/%22)%0A
+
assert r
@@ -328,16 +328,20 @@
== 200%0A
+
asse
@@ -375,16 +375,20 @@
World!%22%0A
+
prin
@@ -410,17 +410,16 @@
ers%0A
-%0A
asse
|
727078f0d7105138310f0870f8ab3a751e0f72da
|
Fix linting issues in test runner
|
test.py
|
test.py
|
# Run all tests in this project
import os
import sys
import unittest
if __name__=="__main__":
loader = unittest.TestLoader()
tests = loader.discover(".", pattern="test_*.py")
runner = unittest.TextTestRunner()
runner.run(tests)
|
Python
| 0.000001
|
@@ -1,6 +1,8 @@
-#
+%22%22%22%0A
Run
@@ -30,30 +30,14 @@
ject
-%0A%0Aimport os%0Aimport sys
+.%0A%22%22%22%0A
%0Aimp
@@ -65,10 +65,12 @@
me__
+
==
+
%22__m
|
31e75472c9a4884f2b2e2c8d634a08d202d1e425
|
make network configuration optional
|
see/context/resources/network.py
|
see/context/resources/network.py
|
# Copyright 2015-2017 F-Secure
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You may
# obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""SEE Network module.
This module provides an API for creating virNetwork objects through libvirt.
Configuration::
{
"configuration": "/path/of/network/configuration.xml",
"dynamic_address":
{
"ipv4": "192.168.0.0",
"prefix": 16,
"subnet_prefix": 24
}
}
The User must specify the path of the libvirt XML configuration.
The following fields in the configuration file are added or replaced.
::
* name
* uuid
* bridge
The user can delegate to SEE the generation of a valid address
for the newly created sub-network.
This is useful for running multiple isolated Environments in the same network.
To do so, the dynamic_address field must be provided specifying the network
address and prefix as well as the prefix for the created sub-network.
SEE will generate a libvirt network with a random IP address in the range
specified by the prefix and sub-prefix. The network will have DHCP server
enabled for the guest virtual machines.
In the given example, SEE will provide a random network in the range
192.168.[0-225].0/24 with DHCP server assigning addresses in the range
192.168.[0-225].[2-255].
Setting dynamic_address and providing a <ip> field in the libvirt XML
configuration will cause RuntimeError to be raised.
"""
import random
import ipaddress
from itertools import count
import xml.etree.ElementTree as etree
import libvirt
from see.context.resources.helpers import subelement
def create(hypervisor, identifier, configuration):
"""Creates a virtual network according to the given configuration.
@param hypervisor: (libvirt.virConnect) connection to libvirt hypervisor.
@param identifier: (str) UUID for the virtual network.
@param configuration: (dict) network configuration.
@return: (libvirt.virNetwork) virtual network.
"""
counter = count()
with open(configuration['configuration']) as xml_file:
xml_config = xml_file.read()
while True:
if 'dynamic_address' in configuration:
address = generate_address(hypervisor,
configuration['dynamic_address'])
xml_string = network_xml(identifier, xml_config, address=address)
else:
xml_string = network_xml(identifier, xml_config)
try:
return hypervisor.networkCreateXML(xml_string)
except libvirt.libvirtError as error:
if next(counter) > MAX_ATTEMPTS:
raise RuntimeError(
"Exceeded failed attempts ({}) to get IP address.".format(
MAX_ATTEMPTS),
"Last error: {}".format(error))
def lookup(domain):
"""Find the virNetwork object associated to the domain.
If the domain has more than one network interface,
the first one is returned.
None is returned if the domain is not attached to any network.
"""
xml = domain.XMLDesc(0)
element = etree.fromstring(xml)
subelm = element.find('.//interface[@type="network"]')
if subelm is not None:
network = subelm.find('.//source').get('network')
hypervisor = domain.connect()
return hypervisor.networkLookupByName(network)
return None
def delete(network):
"""libvirt network cleanup.
@raise: libvirt.libvirtError.
"""
try:
network.destroy()
except libvirt.libvirtError as error:
raise RuntimeError("Unable to destroy network: {}".format(error))
def network_xml(identifier, xml, address=None):
"""Fills the XML file with the required fields.
* name
* uuid
* bridge
* ip
** dhcp
"""
netname = identifier[:8]
network = etree.fromstring(xml)
subelement(network, './/name', 'name', identifier)
subelement(network, './/uuid', 'uuid', identifier)
subelement(network, './/bridge', 'bridge', None, name='virbr-%s' % netname)
if address is not None:
set_address(network, address)
return etree.tostring(network).decode('utf-8')
def set_address(network, address):
"""Sets the given address to the network XML element.
Libvirt bridge will have address and DHCP server configured
according to the subnet prefix length.
"""
if network.find('.//ip') is not None:
raise RuntimeError("Address already specified in XML configuration.")
netmask = str(address.netmask)
ipv4 = str(address[1])
dhcp_start = str(address[2])
dhcp_end = str(address[-2])
ip = etree.SubElement(network, 'ip', address=ipv4, netmask=netmask)
dhcp = etree.SubElement(ip, 'dhcp')
etree.SubElement(dhcp, 'range', start=dhcp_start, end=dhcp_end)
def generate_address(hypervisor, configuration):
"""Generate a valid IP address according to the configuration."""
ipv4 = configuration['ipv4']
prefix = configuration['prefix']
subnet_prefix = configuration['subnet_prefix']
subnet_address = ipaddress.IPv4Network(u'/'.join((str(ipv4), str(prefix))))
net_address_pool = subnet_address.subnets(new_prefix=subnet_prefix)
return address_lookup(hypervisor, net_address_pool)
def address_lookup(hypervisor, address_pool):
"""Retrieves a valid and available network IP address."""
address_pool = set(address_pool)
active_addresses = set(active_network_addresses(hypervisor))
try:
return random.choice(tuple(address_pool - active_addresses))
except IndexError:
raise RuntimeError("All IP addresses are in use")
def active_network_addresses(hypervisor):
"""Query libvirt for the already reserved addresses."""
active = []
for network in hypervisor.listNetworks():
try:
xml = hypervisor.networkLookupByName(network).XMLDesc(0)
except libvirt.libvirtError: # network has been destroyed meanwhile
continue
else:
ip_element = etree.fromstring(xml).find('.//ip')
address = ip_element.get('address')
netmask = ip_element.get('netmask')
active.append(ipaddress.IPv4Network(u'/'.join((address, netmask)),
strict=False))
return active
MAX_ATTEMPTS = 10
|
Python
| 0
|
@@ -878,20 +878,30 @@
he User
-must
+can optionally
specify
@@ -2420,25 +2420,286 @@
r = count()%0A
-%0A
+ xml_config = DEFAULT_NETWORK_XML%0A%0A if not %7B'configuration', 'dynamic_address'%7D & set(configuration.keys()):%0A raise RuntimeError(%0A %22Either configuration or dynamic_address must be specified%22)%0A%0A if 'configuration' in configuration:%0A
with ope
@@ -2745,16 +2745,20 @@
l_file:%0A
+
@@ -7024,8 +7024,83 @@
TS = 10%0A
+DEFAULT_NETWORK_XML = %22%22%22%0A%3Cnetwork%3E%0A %3Cforward mode=%22nat%22/%3E%0A%3C/network%3E%0A%22%22%22%0A
|
9663012fd10b83d30d25b20c22ffafecc4cfaec0
|
Test that invalid parameters raise errors
|
test.py
|
test.py
|
import unittest
from enigma import Enigma, Steckerbrett, Umkehrwalze, Walzen
class RotorTestCase(unittest.TestCase):
def test_rotor_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('E', rotor.encode('A'))
def test_rotor_reverse_encoding(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
self.assertEqual('U', rotor.encode_reverse('A'))
def test_rotor_different_setting(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
ringstellung='B')
self.assertEqual('J', rotor.encode('A'))
self.assertEqual('V', rotor.encode_reverse('A'))
def test_rotor_different_offset(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
offset='B')
self.assertEqual('D', rotor.encode('A'))
self.assertEqual('W', rotor.encode_reverse('A'))
def test_rotor_different_setting_and_offset(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
ringstellung='B', offset='B')
self.assertEqual('I', rotor.encode('A'))
self.assertEqual('X', rotor.encode_reverse('A'))
def test_setting_integer(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
ringstellung=1)
self.assertEqual('J', rotor.encode('A'))
def test_offset_integer(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',
offset=1)
self.assertEqual('D', rotor.encode('A'))
def test_rotor_turnover(self):
rotor = Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q')
rotor.advance()
self.assertEqual('J', rotor.encode('A'))
self.assertEqual('V', rotor.encode_reverse('A'))
class ReflectorTestCase(unittest.TestCase):
def test_reflector(self):
reflector = Umkehrwalze(wiring='YRUHQSLDPXNGOKMIEBFZCWVJAT')
self.assertEqual('Y', reflector.encode('A'))
def test_reflector_fails_on_invalid_wiring(self):
self.assertRaises(KeyError, Umkehrwalze,
wiring='YRUHQSLDPXNGOKMIEBFZCWVJA')
self.assertRaises(KeyError, Umkehrwalze,
wiring='YRYHQSLDPXNGOKMIEBFZCWVJAT')
class PlugboardTestCase(unittest.TestCase):
def test_plugboard_swapping(self):
plugboard = Steckerbrett('PO', 'ML', 'IU', 'KJ', 'NH', 'YT', 'GB',
'VF', 'RE', 'DC')
self.assertEqual('O', plugboard.swap('P'))
self.assertEqual('M', plugboard.swap('L'))
def test_plugboard_fails_on_repeated_letter(self):
self.assertRaises(KeyError, Steckerbrett, 'PO', 'PL')
class EnigmaTestCase(unittest.TestCase):
def setUp(self):
self.rotors = (
Walzen(wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q'),
Walzen(wiring='AJDKSIRUXBLHWTMCQGZNPYFVOE', notch='E'),
Walzen(wiring='BDFHJLCPRTXVZNYEIWGAKMUSQO', notch='V'),
)
self.reflector = Umkehrwalze(wiring='YRUHQSLDPXNGOKMIEBFZCWVJAT')
self.plugboard = Steckerbrett('PO', 'ML', 'IU', 'KJ', 'NH', 'YT', 'GB',
'VF', 'RE', 'DC')
def test_enigma_cipher(self):
machine = Enigma(rotors=self.rotors[::-1], reflector=self.reflector)
self.assertEqual('BDZGO', machine.cipher('AAAAA'))
def test_enigma_decipher(self):
machine = Enigma(rotors=self.rotors[::-1], reflector=self.reflector)
self.assertEqual('AAAAA', machine.cipher('BDZGO'))
def test_enigma_full_cycle(self):
machine = Enigma(rotors=self.rotors[::-1], reflector=self.reflector)
machine.cipher('A' * 16900) # this should do a full cycle on rotors
self.assertEqual('BDZGO', machine.cipher('AAAAA'))
def run_tests():
unittest.main()
if __name__ == '__main__': # pragma: no cover
run_tests()
|
Python
| 0.000005
|
@@ -1623,32 +1623,866 @@
r.encode('A'))%0A%0A
+ def test_invalid_parameters(self):%0A # I prefered to use a single test since both parameters hare equal%0A # restrictions, so probably both succeed (or fail) simultaneously%0A self.assertRaises(ValueError, Walzen,%0A wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',%0A ringstellung='AA')%0A self.assertRaises(ValueError, Walzen,%0A wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',%0A ringstellung=26)%0A self.assertRaises(ValueError, Walzen,%0A wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',%0A offset='AA')%0A self.assertRaises(ValueError, Walzen,%0A wiring='EKMFLGDQVZNTOWYHXUSPAIBRCJ', notch='Q',%0A offset=26)%0A%0A%0A
def test_rot
|
6761dc2da66ce1ac32d19f0d2395426aba7a7129
|
fix typo
|
test.py
|
test.py
|
#!/usr/bin/env python
# coding: utf-8
#
from wxbot import *
import datetime
superAdmin = {
u'admin' # name of super users who want to get notify
};
notifyGroupName = u"test群"
cmdGroupName = u"test群"
fowordMsg = u"%s 说:\n---------\n%s"
fowordPic = u"%s 发来图片:"
fowordVoice = u"%s 发来语音:"
fowordFile = u"%s 发来文件:"
fowordOther = u"%s 发来:%s"
class MyWXBot(WXBot):
def got_contact(self):
self.superUser = []
for name in superAdmin:
uid = self.get_user_id(name)
if uid:
self.superUser.append(uid);
print "Super user:", name.encode("utf8"), uid
else :
print name.encode("utf8"), "NOT FOUND";
pass
pass
self.notifyToGroup = self.get_user_id(notifyGroupName)
if self.notifyToGroup:
print "Our Group:", notifyGroupName.encode("utf8"), self.notifyToGroup
self.cmdFromGroup = self.get_user_id(cmdGroupName)
if self.cmdFromGroup:
print "CMD Group:", notifyGroupName.encode("utf8"), self.cmdFromGroup
def handle_msg_all(self, msg):
# print "Content", msg['content']
# print "User:", msg['user']
# print "Msg", msg
if not self.handleMoneyInfo(msg):
if msg['msg_type_id'] == 4:
# if msg['user']['id'] in self.superUser :
# print "!!!!! from ADMIN !!!!!";
if not self.handleCommad(msg):
self.handleFoword(msg);
elif msg['msg_type_id'] == 3:
fromGroupId = msg['user']['id'];
content = msg['content']['data'];
contentType = msg['content']['type'];
fromUid = msg['content']['user']['id'];
fromUname = msg['content']['user']['name'];
if fromGroupId == self.cmdFromGroup and contentType == 0:
print fromUname , "CMD"
# print content
# print "================="
self.handleCommadContent(fromGroupId, content);
pass
# if msg['msg_type_id'] == 4 and msg['content']['type'] == 0:
# self.send_msg_by_uid(u'hi', msg['user']['id'])
# self.send_img_msg_by_uid("img/1.png", msg['user']['id'])
# self.send_file_msg_by_uid("img/1.png", msg['user']['id'])
def handleCommad(self, msg):
fromUid = msg['user']['id'];
content = msg['content']['data'];
return False
return self.handleCommadContent(fromUid, content);
def handleCommadContent(self, uid, content):
if content == "auto" :
t = datetime.datetime.now().strftime("%m-%d %H:%M")
self.send_msg_by_uid("我在线哦。\n" + t, uid)
else :
self.send_msg_by_uid("我在线。", uid)
return True;
def handleFoword(self, msg):
fromUid = msg['user']['id'];
fromName = msg['user']['name']
content = msg['content']['data'];
contentType = msg['content']['type'];
file_name = msg['content']['file_name'] if "file_name" in msg['content'] else None;
if contentType == 0:
self.sendToAdmin(fromUid, fowordMsg % (fromName, content))
elif contentType in [3, 4, 6, 8, 13]:
if (contentType == 3 or contentType == 6):
if file_name:
self.sendToAdmin(fromUid, fowordPic % (fromName), file_name, True)
else:
self.sendToAdmin(fromUid, fowordPic % (fromName) + "动画表情")
elif contentType == 4 :
self.sendToAdmin(fromUid, fowordVoice % (fromName), file_name)
pass
elif file_name:
self.sendToAdmin(fromUid, fowordFile % (fromName), file_name)
else :
self.sendToAdmin(fromUid, fowordOther % (fromName, msg['content']))
pass
elif contentType == 7 :
msg = u"%s分享了一个%s链接(来源:%s)\n%s\n%s" % \
(fromName, content['type'], content['from'], content['title'], content['desc'])
self.sendToAdmin(fromUid, msg)
self.sendToAdmin(fromUid, content['url'])
pass
elif contentType == 5 :
msg = u"%s分享了一张名片\n昵称:%s 性别:%s" % \
(fromName, content['nickname'], content['genderCN'])
self.sendToAdmin(fromUid, msg)
pass
# self.send_img_msg_by_uid("img/1.png", msg['user']['id'])
# self.send_file_msg_by_uid("img/1.png", msg['user']['id'])
pass
return False;
def sendToAdmin(self, fromUid, text=None, file_name = None, is_pic = False, to_group = False):
# fromUid = None
if text:
if to_group:
self.send_msg_by_uid(text, self.notifyToGroup)
else :
for au in self.superUser:
if (fromUid != au):
self.send_msg_by_uid(text, au)
pass
if file_name:
file_name = os.path.join(self.temp_pwd,file_name)
if to_group:
if is_pic:
self.send_img_msg_by_uid(file_name, self.notifyToGroup)
else :
self.send_file_msg_by_uid(file_name, self.notifyToGroup)
else :
for au in self.superUser:
if (fromUid != au):
if is_pic:
self.send_img_msg_by_uid(file_name, au)
else:
self.send_file_msg_by_uid(file_name, au)
pass
pass
def handleMoneyInfo(self, msg):
fromUid = msg['user']['id'];
fromName = msg['user']['name'];
say = None
if 'sub_type' in msg['content']:
if msg['content']['sub_type'] == 'payf2f':
say = "\n".join(msg['content']['sub_data'])
if msg['content']['sub_type'] == 'transfer_money':
say = "\n".join(msg['content']['sub_data'])
say = u'%s发起%s\n需要在手机上点击的确认。' %(fromName, say);
if msg['content']['type'] == 12 and u'红包' in msg['content']['data']:
say = u'%s发来红包,需要在手机上点击领取。' %(fromName);
pass
if say :
self.sendToAdmin(fromUid, say, to_group=True)
return True;
return False;
def schedule(self):
print "schedule"
self.handleCommadContent(self.cmdFromGroup, "auto");
return 60 * 60 * 1;
def main():
bot = MyWXBot()
bot.DEBUG = True
bot.conf['qr'] = 'png'
bot.is_big_contact = False #如果确定通讯录过大,无法获取,可以直接配置,跳过检查。假如不是过大的话,这个方法可能无法获取所有的联系人
bot.run()
if __name__ == '__main__':
main()
|
Python
| 0.999991
|
@@ -1017,38 +1017,35 @@
t %22CMD Group:%22,
-notify
+cmd
GroupName.encode
|
26fc8789445c22f85467387bec7eeb6eccedc2c5
|
Stop before starting when restarting
|
synapse/app/synctl.py
|
synapse/app/synctl.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import subprocess
import signal
SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
CONFIGFILE="homeserver.yaml"
PIDFILE="homeserver.pid"
GREEN="\x1b[1;32m"
NORMAL="\x1b[m"
def start():
if not os.path.exists(CONFIGFILE):
sys.stderr.write(
"No config file found\n"
"To generate a config file, run '%s -c %s --generate-config"
" --server-name=<server name>'\n" % (
" ".join(SYNAPSE), CONFIGFILE
)
)
sys.exit(1)
print "Starting ...",
args = SYNAPSE
args.extend(["--daemonize", "-c", CONFIGFILE, "--pid-file", PIDFILE])
subprocess.check_call(args)
print GREEN + "started" + NORMAL
def stop():
if os.path.exists(PIDFILE):
pid = int(open(PIDFILE).read())
os.kill(pid, signal.SIGTERM)
print GREEN + "stopped" + NORMAL
def main():
action = sys.argv[1] if sys.argv[1:] else "usage"
if action == "start":
start()
elif action == "stop":
stop()
elif action == "restart":
start()
stop()
else:
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
sys.exit(1)
if __name__=='__main__':
main()
|
Python
| 0.00005
|
@@ -1683,27 +1683,26 @@
:%0A st
-art
+op
()%0A s
@@ -1694,34 +1694,35 @@
top()%0A st
-op
+art
()%0A else:%0A
|
57362721b5e01ef7ad7a1ca84e493a24d577ab33
|
Increase VLC intf min ver to 0.1.9
|
syncplay/constants.py
|
syncplay/constants.py
|
#You might want to change these
DEFAULT_PORT = 8999
OSD_DURATION = 3
OSD_WARNING_MESSAGE_DURATION = 15
MPC_OSD_POSITION = 2 #Right corner, 1 for left
MPLAYER_OSD_LEVEL = 1
UI_TIME_FORMAT = "[%X] "
CONFIG_NAMES = [".syncplay", "syncplay.ini"] #Syncplay searches first to last
DEFAULT_CONFIG_NAME_WINDOWS = "syncplay.ini"
DEFAULT_CONFIG_NAME_LINUX = ".syncplay"
RECENT_CLIENT_THRESHOLD = "1.2.7" #This and higher considered 'recent' clients (no warnings)
WARN_OLD_CLIENTS = True #Use MOTD to inform old clients to upgrade
SHOW_OSD = True # Sends Syncplay messages to media player OSD
SHOW_OSD_WARNINGS = True # Show warnings if playing different file, alone in room
SHOW_SLOWDOWN_OSD = True # Show notifications of slowing down / reverting on time difference
SHOW_SAME_ROOM_OSD = True # Show OSD notifications for events relating to room user is in
SHOW_DIFFERENT_ROOM_OSD = False # Show OSD notifications for events relating to room user is not in
LIST_RELATIVE_CONFIGS = True # Print list of relative configs loaded
SHOW_CONTACT_INFO = True # Displays dev contact details below list in GUI
#Changing these might be ok
REWIND_THRESHOLD = 4
SEEK_THRESHOLD = 1
SLOWDOWN_RATE = 0.95
SLOWDOWN_KICKIN_THRESHOLD = 1.5
SLOWDOWN_RESET_THRESHOLD = 0.1
DIFFFERENT_DURATION_THRESHOLD = 2.5
PROTOCOL_TIMEOUT = 12.5
RECONNECT_RETRIES = 10
SERVER_STATE_INTERVAL = 1
WARNING_OSD_MESSAGES_LOOP_INTERVAL = 1
SHOW_REWIND_ON_DESYNC_CHECKBOX = False
SHOW_TOOLTIPS = True
MERGE_PLAYPAUSE_BUTTONS = False
SYNC_ON_PAUSE = True # Client seek to global position - subtitles may disappear on some media players
#Usually there's no need to adjust these
FILENAME_STRIP_REGEX = u"[-~_\.\[\](): ]"
COMMANDS_UNDO = ["u", "undo", "revert"]
COMMANDS_LIST = ["l", "list", "users"]
COMMANDS_PAUSE = ["p", "play", "pause"]
COMMANDS_ROOM = ["r", "room"]
COMMANDS_HELP = ['help', 'h', '?', '/?', r'\?']
MPC_MIN_VER = "1.6.4"
VLC_MIN_VERSION = "2.0.0"
VLC_INTERFACE_MIN_VERSION = "0.1.7"
MPC_PATHS = [
r"C:\Program Files (x86)\MPC-HC\mpc-hc.exe",
r"C:\Program Files\MPC-HC\mpc-hc.exe",
r"C:\Program Files\MPC-HC\mpc-hc64.exe",
r"C:\Program Files\Media Player Classic - Home Cinema\mpc-hc.exe",
r"C:\Program Files\Media Player Classic - Home Cinema\mpc-hc64.exe",
r"C:\Program Files (x86)\Media Player Classic - Home Cinema\mpc-hc.exe",
r"C:\Program Files (x86)\K-Lite Codec Pack\Media Player Classic\mpc-hc.exe",
r"C:\Program Files\K-Lite Codec Pack\Media Player Classic\mpc-hc.exe",
r"C:\Program Files (x86)\Combined Community Codec Pack\MPC\mpc-hc.exe",
r"C:\Program Files\Combined Community Codec Pack\MPC\mpc-hc.exe",
r"C:\Program Files\MPC HomeCinema (x64)\mpc-hc64.exe",
]
MPLAYER_PATHS = ["mplayer2", "mplayer"]
MPV_PATHS = ["mpv", "/opt/mpv/mpv", r"C:\Program Files\mpv\mpv.exe", r"C:\Program Files\mpv-player\mpv.exe", r"C:\Program Files (x86)\mpv\mpv.exe", r"C:\Program Files (x86)\mpv-player\mpv.exe","/Applications/mpv.app/Contents/MacOS/mpv"]
VLC_PATHS = [
r"C:\Program Files (x86)\VideoLAN\VLC\vlc.exe",
r"C:\Program Files\VideoLAN\VLC\vlc.exe",
"/Applications/VLC.app/Contents/MacOS/VLC"
]
VLC_ICONPATH = "vlc.png"
MPLAYER_ICONPATH = "mplayer.png"
MPV_ICONPATH = "mpv.png"
MPC_ICONPATH = "mpc-hc.png"
MPC64_ICONPATH = "mpc-hc64.png"
#Changing these is usually not something you're looking for
PLAYER_ASK_DELAY = 0.1
PING_MOVING_AVERAGE_WEIGHT = 0.85
MPC_OPEN_MAX_WAIT_TIME = 10
MPC_LOCK_WAIT_TIME = 0.2
MPC_RETRY_WAIT_TIME = 0.01
MPC_MAX_RETRIES = 30
MPC_PAUSE_TOGGLE_DELAY = 0.05
VLC_OPEN_MAX_WAIT_TIME = 15
VLC_MIN_PORT = 10000
VLC_MAX_PORT = 55000
#These are not changes you're looking for
MPLAYER_SLAVE_ARGS = [ '-slave', '--hr-seek=always', '-nomsgcolor', '-msglevel', 'all=1:global=4:cplayer=4']
# --quiet works with both mpv 0.2 and 0.3
MPV_SLAVE_ARGS = ['--slave-broken', '--hr-seek=always', '--no-msgcolor', '--quiet']
VLC_SLAVE_ARGS = ['--extraintf=luaintf','--lua-intf=syncplay','--no-quiet','--no-input-fast-seek']
VLC_SLAVE_NONOSX_ARGS = ['--no-one-instance','--no-one-instance-when-started-from-file']
MPLAYER_ANSWER_REGEX = "^ANS_([a-zA-Z_]+)=(.+)$"
VLC_ANSWER_REGEX = r"(?:^(?P<command>[a-zA-Z_]+)(?:\: )?(?P<argument>.*))"
UI_COMMAND_REGEX = r"^(?P<command>[^\ ]+)(?:\ (?P<parameter>.+))?"
UI_OFFSET_REGEX = r"^(?:o|offset)\ ?(?P<sign>[/+-])?(?P<time>\d{1,4}(?:[^\d\.](?:\d{1,6})){0,2}(?:\.(?:\d{1,3}))?)$"
UI_SEEK_REGEX = r"^(?:s|seek)?\ ?(?P<sign>[+-])?(?P<time>\d{1,4}(?:[^\d\.](?:\d{1,6})){0,2}(?:\.(?:\d{1,3}))?)$"
PARSE_TIME_REGEX = r'(:?(?:(?P<hours>\d+?)[^\d\.])?(?:(?P<minutes>\d+?))?[^\d\.])?(?P<seconds>\d+?)(?:\.(?P<miliseconds>\d+?))?$'
SERVER_MAX_TEMPLATE_LENGTH = 10000
PRIVACY_SENDRAW_MODE = "SendRaw"
PRIVACY_SENDHASHED_MODE = "SendHashed"
PRIVACY_DONTSEND_MODE = "DoNotSend"
PRIVACY_HIDDENFILENAME = "**Hidden filename**"
|
Python
| 0
|
@@ -1988,17 +1988,17 @@
= %220.1.
-7
+9
%22%0D%0AMPC_P
|
706ecc0d30217c2dd125def1cc5cb5445b010dfa
|
add a test
|
test.py
|
test.py
|
# -*- coding: utf-8 -*-
import unittest
from slugify import slugify
class TestSlugification(unittest.TestCase):
def test_extraneous_seperators(self):
txt = "This is a test ---"
r = slugify(txt)
self.assertEqual(r, "this-is-a-test")
txt = "___This is a test ---"
r = slugify(txt)
self.assertEqual(r, "this-is-a-test")
txt = "___This is a test___"
r = slugify(txt)
self.assertEqual(r, "this-is-a-test")
def test_non_word_characters(self):
txt = "This -- is a ## test ---"
r = slugify(txt)
self.assertEqual(r, "this-is-a-test")
def test_phonetic_conversion_of_eastern_scripts(self):
txt = '影師嗎'
r = slugify(txt)
self.assertEqual(r, "ying-shi-ma")
def test_accented_text(self):
txt = 'C\'est déjà l\'été.'
r = slugify(txt)
self.assertEqual(r, "cest-deja-lete")
txt = 'Nín hǎo. Wǒ shì zhōng guó rén'
r = slugify(txt)
self.assertEqual(r, "nin-hao-wo-shi-zhong-guo-ren")
def test_accented_text_with_non_word_characters(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt)
self.assertEqual(r, "jaja-lol-mememeoo-a")
def test_cyrillic_text(self):
txt = 'Компьютер'
r = slugify(txt)
self.assertEqual(r, "kompiuter")
def test_max_length(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=9)
self.assertEqual(r, "jaja-lol")
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=15)
self.assertEqual(r, "jaja-lol-mememe")
def test_max_length_cutoff_not_required(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=50)
self.assertEqual(r, "jaja-lol-mememeoo-a")
def test_word_boundary(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=15, word_boundary=True)
self.assertEqual(r, "jaja-lol-a")
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=17, word_boundary=True)
self.assertEqual(r, "jaja-lol-mememeoo")
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=18, word_boundary=True)
self.assertEqual(r, "jaja-lol-mememeoo")
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=19, word_boundary=True)
self.assertEqual(r, "jaja-lol-mememeoo-a")
def test_custom_separator(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=20, word_boundary=True, separator=".")
self.assertEqual(r, "jaja.lol.mememeoo.a")
def test_multi_character_separator(self):
txt = 'jaja---lol-méméméoo--a'
r = slugify(txt, max_length=20, word_boundary=True, separator="ZZZZZZ")
self.assertEqual(r, "jajaZZZZZZlolZZZZZZmememeooZZZZZZa")
def test_save_order(self):
txt = 'one two three four five'
r = slugify(txt, max_length=13, word_boundary=True, save_order=True)
self.assertEqual(r, "one-two-three")
txt = 'one two three four five'
r = slugify(txt, max_length=13, word_boundary=True, save_order=False)
self.assertEqual(r, "one-two-three")
txt = 'one two three four five'
r = slugify(txt, max_length=12, word_boundary=True, save_order=False)
self.assertEqual(r, "one-two-four")
txt = 'one two three four five'
r = slugify(txt, max_length=12, word_boundary=True, save_order=True)
self.assertEqual(r, "one-two")
def test_stopword_removal(self):
txt = 'this has a stopword'
r = slugify(txt, stopwords=['stopword'])
self.assertEqual(r, 'this-has-a')
def test_multiple_stopword_occurances(self):
txt = 'the quick brown fox jumps over the lazy dog'
r = slugify(txt, stopwords=['the'])
self.assertEqual(r, 'quick-brown-fox-jumps-over-lazy-dog')
def test_differently_cased_stopword_match(self):
txt = 'Foo A FOO B foo C'
r = slugify(txt, stopwords=['foo'])
self.assertEqual(r, 'a-b-c')
txt = 'Foo A FOO B foo C'
r = slugify(txt, stopwords=['FOO'])
self.assertEqual(r, 'a-b-c')
def test_multiple_stopwords(self):
txt = 'the quick brown fox jumps over the lazy dog in a hurry'
r = slugify(txt, stopwords=['the', 'in', 'a', 'hurry'])
self.assertEqual(r, 'quick-brown-fox-jumps-over-lazy-dog')
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000109
|
@@ -4465,16 +4465,145 @@
-dog')%0A%0A
+ def test_html_entities(self):%0A txt = 'foo & bar'%0A r = slugify(txt)%0A self.assertEqual(r, 'foo-bar')%0A%0A
%0Aif __na
|
58bfae71a37aed9be9e5139ba3a898c394613616
|
Remove non-unittest crap from ladder.py's __main__
|
ladder.py
|
ladder.py
|
#! /usr/bin/env python3
import unittest
class Rank:
def __init__(self, value):
if int(value) == 0:
raise ValueError('Rank values must be a nonzero integer')
self.value = int(value)
def __add__(self, other):
# adding ranks is nonsensical
return NotImplemented
def __sub__(self, other):
# note that this returns an nonnegative integer, NOT a rank (intentionally)
# check whether both values have the same sign
if abs(self.value + other.value) == abs(self.value) + abs(other.value):
return abs(self.value - other.value)
else:
return abs(self.value) + abs(other.value) - 1
# there's gotta be a better way to do this
def __iadd__(self, other):
if other < 0:
self -= abs(other)
elif self.value < 0:
if abs(self.value) > other:
self.value += other
else:
self.value += other + 1
else:
self.value += other
return self
def __isub__(self, other):
if other < 0:
self += abs(other)
elif self.value < 0:
self.value -= other
else:
if self.value > other:
self.value -= other
else:
self.value -= other + 1
return self
def __int__(self, other):
return self.value
def __str__(self):
rank_str = ''
if self.value < 0:
rank_str = '{:d}K'.format(-self.value)
else:
rank_str = '{:d}D'.format(self.value)
return rank_str
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = Rank(rank)
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
return '{:s} {:s}'.format(self.name, self.rank)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} <= self.players():
return False
return True
class RankTestCase(unittest.TestCase):
def test_init(self):
self.assertRaises(ValueError, Rank, 0)
def test_add(self):
with self.assertRaises(TypeError):
value = Rank(1) + Rank(2)
with self.assertRaises(TypeError):
value = Rank(1) + 2
def test_sub(self):
self.assertEqual(Rank(5) - Rank(1), 4)
self.assertEqual(Rank(5) - Rank(5), 0)
self.assertEqual(Rank(5) - Rank(6), 1)
self.assertEqual(Rank(5) - Rank(-3), 7)
self.assertEqual(Rank(1) - Rank(-1), 1)
def test_inc(self):
rank = Rank(1)
rank += 1
self.assertEqual(rank.value, Rank(2).value)
rank += 5
self.assertEqual(rank.value, Rank(7).value)
rank += 0
self.assertEqual(rank.value, Rank(7).value)
rank += -1
self.assertEqual(rank.value, Rank(6).value)
rank += -6
self.assertEqual(rank.value, Rank(-1).value)
rank += -10
self.assertEqual(rank.value, Rank(-11).value)
rank += 15
self.assertEqual(rank.value, Rank(5).value)
def test_dec(self):
rank = Rank(-1)
rank -= 1
self.assertEqual(rank.value, Rank(-2).value)
rank -= 5
self.assertEqual(rank.value, Rank(-7).value)
rank -= 0
self.assertEqual(rank.value, Rank(-7).value)
rank -= -1
self.assertEqual(rank.value, Rank(-6).value)
rank -= -6
self.assertEqual(rank.value, Rank(1).value)
rank -= -10
self.assertEqual(rank.value, Rank(11).value)
rank -= 15
self.assertEqual(rank.value, Rank(-5).value)
def test_str(self):
self.assertEqual(str(Rank(5)), '5D')
self.assertEqual(str(Rank(1)), '1D')
self.assertEqual(str(Rank(-5)), '5K')
self.assertEqual(str(Rank(-1)), '1K')
class LadderTestCase(unittest.TestCase):
def setUp(self):
self.player_one = Player('Andrew', -1)
self.player_two = Player('Walther', 5)
self.player_three = Player('Milan', -6)
self.ladder = Ladder([self.player_one, self.player_two])
def test_match_valid(self):
self.assertTrue(self.ladder.match_valid(self.player_one, self.player_two))
self.assertFalse(self.ladder.match_valid(self.player_one, self.player_three))
self.assertFalse(self.ladder.match_valid(self.player_two, self.player_three))
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
unittest.main()
|
Python
| 0
|
@@ -4940,113 +4940,8 @@
_':%0A
- ladder = Ladder(%5BPlayer('Andrew', -1), Player('Walther', 5), Player('Milan', -6)%5D)%0A print(ladder)%0A
|
56476902b36ec8b9d7bfcaa3b8442eb51745d044
|
Set DISPLAY variable on prelaunched processes so the search UI pops up in the right place.
|
src/prelaunchd.py
|
src/prelaunchd.py
|
# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The prelaunchd's job is to keep a quickopen instance warmed up in the
# background, and service "give me a prelauncher" requests from quickopend
# clients.
import os
import subprocess
import logging
def _is_port_bindable(host, port):
import socket
s = socket.socket()
try:
s.bind((host, port))
except socket.error:
return False
s.close()
return True
class PrelaunchedProcess(object):
def __init__(self, proc, port):
if not isinstance(proc, subprocess.Popen):
raise "Expected subprocess"
self.proc = proc
self.port = port
@property
def pid(self):
return self.proc.pid
def poll(self):
return self.proc.poll()
def kill(self):
self.proc.kill()
class PrelaunchDaemon(object):
def __init__(self, server):
server.add_json_route('/existing_quickopen/(.+)', self.get_existing_quickopen, ['GET'])
server.exit.add_listener(self._on_exit)
server.lo_idle.add_listener(self._join_in_use_processes)
self._quickopen = {}
self._in_use_processes = []
self._next_control_port = 27412
def _get_another_control_port(self):
self._next_control_port += 1
for i in range(100):
self._next_control_port += 1
if not _is_port_bindable("", self._next_control_port):
continue
return self._next_control_port
raise Exception("Could not find open control port")
def _launch_new_quickopen(self, display):
assert display not in self._quickopen
quickopen_script = os.path.join(os.path.dirname(__file__), "../quickopen")
assert os.path.exists(quickopen_script)
control_port = self._get_another_control_port()
proc = subprocess.Popen([quickopen_script,
"prelaunch",
"--wait",
"--control-port",
str(control_port)])
self._quickopen[display] = PrelaunchedProcess(proc, control_port)
def get_existing_quickopen(self, m, verb, data):
display = m.group(1)
if display not in self._quickopen:
self._launch_new_quickopen(display)
try:
proc = self._quickopen[display]
del self._quickopen[display]
self._in_use_processes.append(proc)
return proc.port
finally:
# todo, move this to another place... ideally, when the previous prelaunch quits
self._launch_new_quickopen(display)
pass
def _on_exit(self):
self.stop()
def _join_in_use_processes(self):
procs = list(self._in_use_processes)
del self._in_use_processes[:]
for p in procs:
if not p.poll():
self._in_use_processes.append(p)
else:
logging.debug("prelaunched pid=%i is gone" % p.pid)
def stop(self):
logging.debug("closing prelaunched quickopen")
for proc in self._quickopen.values():
proc.kill()
self._quickopen = {}
self._join_in_use_processes()
for p in self._in_use_processes:
if not p.poll():
logging.debug("killing %i" % p.pid)
try:
p.kill()
except:
pass
|
Python
| 0
|
@@ -2197,16 +2197,113 @@
_port()%0A
+ env = %7B%7D%0A if display != 'cocoa' and display != 'terminal':%0A env%5B%22DISPLAY%22%5D = display%0A
proc
@@ -2516,16 +2516,54 @@
l_port)%5D
+,%0A env=env
)%0A se
|
eda693b2e248df3d8e999e9c22cd771d73523380
|
Add Indicator Reference to results framework lite API data
|
akvo/rest/serializers/indicator.py
|
akvo/rest/serializers/indicator.py
|
# -*- coding: utf-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from akvo.rest.serializers.indicator_period import (
IndicatorPeriodFrameworkSerializer, IndicatorPeriodFrameworkLiteSerializer,
IndicatorPeriodFrameworkNotSoLiteSerializer, create_or_update_disaggregation_targets)
from akvo.rest.serializers.indicator_dimension_name import IndicatorDimensionNameSerializer
from akvo.rest.serializers.indicator_custom_field import IndicatorCustomValueSerializer
from akvo.rest.serializers.rsr_serializer import BaseRSRSerializer
from akvo.rsr.models import (
Indicator, IndicatorDimensionName, IndicatorLabel, IndicatorDisaggregationTarget)
from rest_framework import serializers
def serialize_disaggregation_targets(indicator):
return [
{
'id': t.id,
'value': t.value,
'dimension_value': t.dimension_value_id,
'indicator': indicator.id,
}
for t in indicator.disaggregation_targets.all()
]
class IndicatorDisaggregationTargetNestedSerializer(BaseRSRSerializer):
id = serializers.IntegerField()
class Meta:
model = IndicatorDisaggregationTarget
fields = ('id', 'value', 'dimension_value', 'indicator')
read_only_fields = ('id', 'indicator')
def to_internal_value(self, data):
if 'value' in data:
data['value'] = str(data['value']).replace(',', '.')
return super().to_internal_value(data)
class LabelListingField(serializers.RelatedField):
def to_representation(self, labels):
if isinstance(labels, IndicatorLabel):
value = labels.label_id
else:
value = list(labels.values_list('label_id', flat=True))
return value
def to_internal_value(self, org_label_ids):
indicator = self.root.instance
existing_labels = set(indicator.labels.values_list('label_id', flat=True))
new_labels = set(org_label_ids) - existing_labels
deleted_labels = existing_labels - set(org_label_ids)
labels = [IndicatorLabel(indicator=indicator, label_id=org_label_id) for org_label_id in new_labels]
IndicatorLabel.objects.bulk_create(labels)
if deleted_labels:
IndicatorLabel.objects.filter(label_id__in=deleted_labels).delete()
return indicator.labels.all()
class IndicatorSerializer(BaseRSRSerializer):
result_unicode = serializers.ReadOnlyField(source='result.__str__')
measure_label = serializers.ReadOnlyField(source='iati_measure_unicode')
children_aggregate_percentage = serializers.ReadOnlyField()
dimension_names = serializers.PrimaryKeyRelatedField(
many=True, queryset=IndicatorDimensionName.objects.all())
disaggregation_targets = serializers.SerializerMethodField()
def get_disaggregation_targets(self, obj):
return serialize_disaggregation_targets(obj)
class Meta:
model = Indicator
exclude = ['enumerators']
# TODO: add validation for parent_indicator
class IndicatorFrameworkSerializer(BaseRSRSerializer):
periods = IndicatorPeriodFrameworkSerializer(many=True, required=False, read_only=True)
parent_indicator = serializers.ReadOnlyField(source='parent_indicator_id')
children_aggregate_percentage = serializers.ReadOnlyField()
dimension_names = IndicatorDimensionNameSerializer(many=True, required=False, read_only=True)
labels = LabelListingField(queryset=IndicatorLabel.objects.all(), required=False)
disaggregation_targets = IndicatorDisaggregationTargetNestedSerializer(many=True, required=False)
class Meta:
model = Indicator
exclude = ['enumerators']
def update(self, instance, validated_data):
disaggregation_targets = validated_data.pop('disaggregation_targets', [])
instance = super().update(instance, validated_data)
create_or_update_disaggregation_targets(instance, disaggregation_targets)
return instance
def validate_disaggregation_targets(self, data):
for target in data:
if 'value' not in target:
raise serializers.ValidationError('Disaggregation targets should have a value')
if 'dimension_value' not in target:
raise serializers.ValidationError(
'Disaggregation targets should have "dimension_value"')
return data
def to_internal_value(self, data):
if 'target_value' in data:
data['target_value'] = str(data['target_value']).replace(',', '.')
return super().to_internal_value(data)
class IndicatorFrameworkLiteSerializer(BaseRSRSerializer):
periods = IndicatorPeriodFrameworkLiteSerializer(many=True, required=False, read_only=True)
parent_indicator = serializers.ReadOnlyField(source='parent_indicator_id')
children_aggregate_percentage = serializers.ReadOnlyField()
dimension_names = IndicatorDimensionNameSerializer(many=True, required=False, read_only=True)
labels = LabelListingField(read_only=True)
disaggregation_targets = serializers.SerializerMethodField()
custom_values = IndicatorCustomValueSerializer(many=True, required=False)
def get_disaggregation_targets(self, obj):
return serialize_disaggregation_targets(obj)
class Meta:
model = Indicator
exclude = ['enumerators']
class IndicatorFrameworkNotSoLiteSerializer(BaseRSRSerializer):
periods = IndicatorPeriodFrameworkNotSoLiteSerializer(many=True, required=False, read_only=True)
parent_indicator = serializers.ReadOnlyField(source='parent_indicator_id')
children_aggregate_percentage = serializers.ReadOnlyField()
labels = LabelListingField(read_only=True)
disaggregation_targets = serializers.SerializerMethodField()
dimension_names = serializers.SerializerMethodField()
def get_disaggregation_targets(self, obj):
return serialize_disaggregation_targets(obj)
def get_dimension_names(self, obj):
return [
{
'id': n.id,
'name': n.name,
'dimension_values': [{'id': v.id, 'value': v.value} for v in n.dimension_values.all()]
}
for n in obj.dimension_names.all()
]
class Meta:
model = Indicator
fields = (
'id',
'periods',
'parent_indicator',
'children_aggregate_percentage',
'labels',
'title',
'type',
'measure',
'ascending',
'description',
'baseline_year',
'baseline_value',
'baseline_comment',
'order',
'export_to_iati',
'result',
'disaggregation_targets',
'dimension_names',
'scores',
)
|
Python
| 0
|
@@ -670,32 +670,115 @@
ValueSerializer%0A
+from akvo.rest.serializers.indicator_reference import IndicatorReferenceSerializer%0A
from akvo.rest.s
@@ -5012,32 +5012,121 @@
read_only=True)%0A
+ references = IndicatorReferenceSerializer(many=True, required=False, read_only=True)%0A
parent_indic
|
6c02b743ad3859e05eeb980298e54acf3fbd9788
|
Add __len__ to FlagField (#3981)
|
allennlp/data/fields/flag_field.py
|
allennlp/data/fields/flag_field.py
|
from typing import Any, Dict, List
from overrides import overrides
from allennlp.data.fields.field import Field
class FlagField(Field[Any]):
"""
A class representing a flag, which must be constant across all instances in a batch.
This will be passed to a `forward` method as a single value of whatever type you pass in.
"""
def __init__(self, flag_value: Any) -> None:
self.flag_value = flag_value
@overrides
def get_padding_lengths(self) -> Dict[str, int]:
return {}
@overrides
def as_tensor(self, padding_lengths: Dict[str, int]) -> Any:
return self.flag_value
@overrides
def empty_field(self):
# Because this has to be constant across all instances in a batch, we need to keep the same
# value.
return FlagField(self.flag_value)
def __str__(self) -> str:
return f"FlagField({self.flag_value})"
@overrides
def batch_tensors(self, tensor_list: List[Any]) -> Any:
if len(set(tensor_list)) != 1:
raise ValueError(
f"Got different values in a FlagField when trying to batch them: {tensor_list}"
)
return tensor_list[0]
|
Python
| 0.000013
|
@@ -904,16 +904,64 @@
lue%7D)%22%0A%0A
+ def __len__(self) -%3E int:%0A return 1%0A%0A
@ove
|
3b83d97d507aa897c439e1c249d7919e203d2d0f
|
remove cruft
|
awsbigbrother/credential_report.py
|
awsbigbrother/credential_report.py
|
import configparser
from .credential_client import CredentialClient
import arrow
from datetime import timedelta
class CredentialReportRow(object):
user = "unknown"
arn = "unknown"
password_active = "unknown"
password_last_used = "unknown"
password_last_rotated = "unknown"
password_next_rotation = "unknown"
mfa_active = "unknown"
def __init__(self, row):
self.user = row[0]
self.arn = row[1]
self.password_active = row[3]
self.password_last_used = row[4]
self.password_last_rotated = row[5]
self.password_next_rotation = row[6]
self.mfa_active = row[7]
self.access_key_1_active = row[8]
self.access_key_1_last_rotated = row[9]
self.access_key_1_last_used = row[10]
self.access_key_2_active = row[13]
self.access_key_2_last_rotated = row[14]
self.access_key_2_last_used = row[15]
def mfa(self):
return CredentialCheckResponse('mfa', self.mfa_active == 'true', self.user).get_response()
class CredentialReportActionRunner(object):
def __init__(self, row, config):
self.__row = row
self.__config = config
def mfa(self):
return CredentialCheckResponse('mfa', self.__row.mfa_active == 'true', self.__row.user).get_response()
def password_max_age(self):
# if self.__row.password_last_rotated != 'N/A':
# return CredentialCheckResponse('password_max_age', self._is_older_than_days(
# self.__row.password_last_rotated,
# self.__config.password_max_age
# ), self.__row.user).get_response()
# return None
password_older_than_max_age = self._no_activity_max_age(self.__config.password_max_age,['password'])
return CredentialCheckResponse('password_max_age', not password_older_than_max_age,self.__row.user).get_response()
def access_keys_max_age(self):
check_list = ['access_key_1','access_key_2']
if self._no_activity_max_age(self.__config.access_keys_max_age, check_list):
return CredentialCheckResponse("access_key_max_age", False, self.__row.user).get_response()
def _no_activity_max_age(self, max_age, check_list):
row = self.__row
for attribute_name in check_list:
row_is_active = getattr(row, "{0}_active".format(attribute_name))
if not (row_is_active == 'false' or row_is_active == 'N/A'):
timestamp = getattr(row,"{0}_last_rotated".format(attribute_name))
return self._is_older_than_days(timestamp,max_age)
return False
def _is_older_than_days(self, timestamp, max_age):
current_time = arrow.utcnow()
utc_timestamp = arrow.get(timestamp)
renewal_date = utc_timestamp + max_age
return renewal_date < current_time
class CredentialCheckResponse(object):
def __init__(self, check_name, check_passed, user):
self.__check_name = check_name
self.__check_passed = check_passed
self.__user = user
def get_response(self):
if self.__check_passed == True:
return None
return "Check: {check_name} failed for user: {user}".format(check_name=self.__check_name,
user=self.__user)
class CredentialReportConfig(object):
noout = False
def __init__(self):
self.actions = []
self.timeout = 60
self.excluded_users = []
self.password_max_age = timedelta(days=99999999)
self.access_keys_max_age = timedelta(days=99999999)
def load_from_file(self, path):
config = configparser.RawConfigParser()
config.read(path)
# Need to rescue here in case not defined
self.timeout = int(config.get('global', 'timeout'))
if config.get('global', 'mfa') == 'true':
self.actions.append('mfa')
# Not setting actions here :( We should be.
self.excluded_users = config.get('global', 'excluded_users').replace(' ', '').split(',')
self.password_max_age = timedelta(days=int(config.get('passwords', 'max_age_days')))
self.access_keys_max_age = timedelta(days=int(config.get('access_keys', 'max_age_days')))
def set_password_max_age(self, age):
self.password_max_age = timedelta(days=age)
def set_access_keys_max_age(self, age):
self.access_keys_max_age = timedelta(days=age)
def clear(self):
del self.actions[:]
|
Python
| 0
|
@@ -1337,322 +1337,8 @@
f):%0A
-# if self.__row.password_last_rotated != 'N/A':%0A# return CredentialCheckResponse('password_max_age', self._is_older_than_days(%0A# self.__row.password_last_rotated,%0A# self.__config.password_max_age%0A# ), self.__row.user).get_response()%0A# return None%0A%0A
|
4c66010cf0cd4f763b362b6e84eb67d7ef1278b8
|
Make "near" group optional in regex
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Python
| 0.999796
|
@@ -713,16 +713,17 @@
ne%3E%5Cd+):
+?
(?P%3Cnear
@@ -727,16 +727,17 @@
ear%3E%5Cd+)
+?
%5Cs*%5Cw+%5Cs
|
7c34a4815f7b78f801f83928b1f6dd56ef842fdc
|
Fix a bug in adjusting the configuration when exploring
|
prediction/explorer.py
|
prediction/explorer.py
|
from . import support
from . import tuner
from .learner import Learner
from .random import Random
from .session import Session
import json
import numpy as np
import os
import threading
class Agent:
def __init__(self, session, semaphore, config):
self.session = session
self.semaphore = semaphore
self.scores = Agent._restore(config.output.path)
self.output_path = config.output.path
self.lock = threading.Lock()
self.done = threading.Lock()
def collect(self, step_count):
with self.done:
return self.scores[step_count]
def submit(self, step_count):
with self.lock:
if step_count in self.scores:
return
self.scores[step_count] = None
self.done.acquire()
worker = threading.Thread(target=self._run, args=(step_count,),
daemon=True)
worker.start()
def _restore(path):
scores = {}
for path in support.scan(path, 'meta-*.json'):
meta = json.loads(open(path).read())
scores[meta['step_count']] = meta['score']
support.log(Agent, 'Score: {}', path)
return scores
def _run(self, step_count):
with self.semaphore:
with self.lock:
last_step_count = 0
for key in self.scores:
if self.scores[key] is None:
continue
if key > last_step_count:
last_step_count = key
assert(last_step_count < step_count)
support.log(self, 'Learning start: {}, stop: {}',
last_step_count, step_count)
self.session.run_training(step_count - last_step_count,
summarize=False)
error = self.session.run_validation()['MSE']
decay = np.reshape(np.exp(-np.arange(len(error))), error.shape)
score = np.sum(error * decay)
Agent._save(self.output_path, step_count, score)
self.session.run_saving()
with self.lock:
self.scores[step_count] = score
support.log(self, 'Learning stop: {}, score: {}',
step_count, score)
self.done.release()
def _save(path, step_count, score):
path = os.path.join(path, 'meta-{}.json'.format(step_count))
with open(path, 'w') as file:
file.write(json.dumps({
'step_count': step_count,
'score': score,
}))
class Explorer:
def __init__(self, input, config):
self.input = input
self.config = config
self.tuner = getattr(tuner, config.tuner.name)
self.tuner = self.tuner(**config.tuner.options)
self.resource_scale = config.max_step_count / self.tuner.resource
self.sampler = Sampler(config.sampler)
self.semaphore = threading.BoundedSemaphore(config.concurrent_count)
self.agents = {}
def configure(self, case, restore=True):
key = support.tokenize(case)
config = self.config.copy()
config.output.restore = restore
config.output.path = os.path.join(config.output.path, key)
for key in case:
_adjust(config, key, case[key])
return config
def run(self):
case, resource, score = self.tuner.run(self._generate, self._assess)
step_count = int(self.resource_scale * resource)
support.log(self, 'Best case: {}, step: {}, score: {}',
case, step_count, score)
return (case, step_count)
def _assess(self, resource, cases):
step_count = int(self.resource_scale * resource)
support.log(self, 'Assess cases: {}, stop: {}',
len(cases), step_count)
agents = []
for case in cases:
key = support.tokenize(case)
agent = self.agents.get(key)
if agent is None:
config = self.configure(case)
learner = Learner(config.learner.candidate)
session = Session(self.input, learner, config)
agent = Agent(session, self.semaphore, config)
self.agents[key] = agent
agent.submit(step_count)
agents.append(agent)
return [agent.collect(step_count) for agent in agents]
def _generate(self, count):
support.log(self, 'Generate cases: {}', count)
return [self.sampler.get() for _ in range(count)]
class Sampler:
def __init__(self, config):
self.parameters = config
support.log(self, 'Cases: {}', self.case_count)
@property
def case_count(self):
return np.prod([len(self.parameters[n]) for n in self.parameters])
def get(self):
case = {}
for key in sorted(self.parameters.keys()):
chosen = Random.get().randint(len(self.parameters[key]))
case[key] = self.parameters[key][chosen]
return case
def _adjust(config, key, value):
if key == 'dropout_rate':
value = 1 - value
config.learner.candidate.dropout.options.input_keep_prob = value[0]
config.learner.candidate.dropout.options.output_keep_prob = value[1]
elif key == 'layer_count':
config.learner.candidate.layer_count = value
elif key == 'learning_rate':
config.teacher.trainer.optimizer.options.learning_rate = value
elif key == 'unit_count':
config.learner.candidate.unit_count = value
elif key == 'use_peepholes':
config.learner.candidate.cell.options.use_peepholes = value
else:
assert(False)
|
Python
| 0.000003
|
@@ -5116,34 +5116,8 @@
e':%0A
- value = 1 - value%0A
@@ -5161,16 +5161,38 @@
options.
+update(%7B%0A '
input_ke
@@ -5194,26 +5194,30 @@
ut_keep_prob
- =
+': 1 -
value%5B0%5D%0A
@@ -5213,25 +5213,28 @@
value%5B0%5D
-%0A
+,%0A
config.l
@@ -5229,49 +5229,11 @@
-config.learner.candidate.dropout.options.
+ '
outp
@@ -5244,18 +5244,22 @@
eep_prob
- =
+': 1 -
value%5B1
@@ -5259,16 +5259,28 @@
value%5B1%5D
+,%0A %7D)
%0A eli
|
ba2db7713d4fbb929c26bf9ce848b0f7b420809d
|
fix typo
|
bootmachine/settings_tests.py
|
bootmachine/settings_tests.py
|
import os
"""
CONFIGURATION MANAGEMENT
"""
# salt
LOCAL_SALTSTATES_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "states/")
LOCAL_PILLARS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "pillars/")
REMOTE_STATES_DIR = "/srv/salt/states/"
REMOTE_PILLARS_DIR = "/srv/salt/pillars/"
SALT_INSTALLER_ARCH_201208 = "aur"
SALT_INSTALLER_DEBIAN_6 = "backports"
SALT_INSTALLER_FEDORA_16 = "rpm-stable"
SALT_INSTALLER_FEDORA_17 = "rpm-stable"
SALT_INSTALLER_UBUNTU_1204LTS = "ppa"
# puppet (not yet implemented)
PUPPET_VERSION = NotImplementedError()
PUPPET_RECIPES_DIR = NotImplementedError()
# chef (not yet implemented)
CHEF_VERSION = NotImplementedError()
CHEF_RECIPIES_DIR = NotImplementedError()
"""
PROVIDERS AND SERVER STACK
"""
# Rackspace authentication via openstack-compute
OPENSTACK_USERNAME = os.environ.get("OPENSTACK_COMPUTE_USERNAME")
OPENSTACK_APIKEY = os.environ.get("OPENSTACK_COMPUTE_APIKEY")
# Rackspace authentication via python-novaclient api v2
OS_USERNAME = os.environ.get("OS_USERNAME")
OS_PASSWORD = os.environ.get("OS_PASSWORD")
OS_TENANT_NAME = os.environ.get("OS_TENANT_NAME")
OS_AUTH_URL = os.environ.get("OS_AUTH_URL")
OS_REGION_NAME = os.environ.get("OS_REGION_NAME")
OS_COMPUTE_API_VERSION = os.environ.get("OS_COMPUTE_API_VERSION")
# Amazon authentication via boto
AWS_ACCESS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
"""
SECURITY
"""
# Change the default SSH port of 22, suggestion is between 20000 and 65535.
SSH_PORT = "30000"
|
Python
| 0.999991
|
@@ -51,20 +51,16 @@
t%0ALOCAL_
-SALT
STATES_D
|
d776090df9a4525d2729aa086867de1cd67926fc
|
Patch up Holzworth driver.
|
src/auspex/instruments/holzworth.py
|
src/auspex/instruments/holzworth.py
|
# Copyright 2016 Raytheon BBN Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
from auspex.instruments.instrument import Instrument, MetaInstrument
from auspex.log import logger
from unittest.mock import MagicMock
import ctypes
class MakeSettersGetters(MetaInstrument):
def __init__(self, name, bases, dct):
super(MakeSettersGetters, self).__init__(name, bases, dct)
for k,v in dct.items():
if isinstance(v, property):
logger.debug("Adding '%s' command to Holzworth", k)
setattr(self, 'set_'+k, v.fset)
setattr(self, 'get_'+k, v.fget)
class HS9000(Instrument, metaclass=MakeSettersGetters):
"""Holzworth HS9000 microwave source"""
instrument_type = "Microwave Source"
def __init__(self, resource_name, name="Unlabeled Holzworth HS9000"):
self.name = name
self.resource_name = resource_name
try:
self._lib = ctypes.CDLL("HolzworthMulti64.dll")
except:
logger.warning("Could not find the Holzworth driver.")
self._lib = MagicMock()
# parse resource_name: expecting something like "HS9004A-009-1"
self.model, self.serial, self.chan = resource_name.split("-")
self._lib.usbCommWrite.restype = ctypes.c_char_p
def connect(self, resource_name=None):
if resource_name is not None:
self.resource_name = resource_name
self.model, self.serial, self.chan = resource_name.split("-")
def query(self, scpi_string):
return self._lib.usbCommWrite(self.resource_name.encode('ascii'), scpi_string.encode('ascii')).decode('ascii')
def ch_query(self, scpi_string):
chan_string = ":CH{}".format(self.chan)
scpi_string = chan_string + scpi_string
return self._lib.usbCommWrite(self.resource_name.encode('ascii'), scpi_string.encode('ascii')).decode('ascii')
@property
def frequency(self):
v = self.ch_query(":FREQ?")
return float(v.split()[0])*1e6
@frequency.setter
def frequency(self, value):
self.ch_query(":FREQ:{} GHz".format(value*1e-9))
@property
def power(self):
v = self.ch_query(":PWR?")
return float(v.split()[0])
@power.setter
def power(self, value):
self.ch_query(":PWR:{} dBm".format(value))
@property
def phase(self):
v = self.ch_query(":PHASE?")
return float(v.split()[0])
@phase.setter
def phase(self, value):
self.ch_query(":PHASE:{} deg".format(value))
@property
def output(self):
v = self.ch_query(":PWR:RF?")
return bool(v.split()[0])
@output.setter
def output(self, value):
if value:
self.ch_query(":PWR:RF:ON")
else:
self.ch_query(":PWR:RF:OFF")
@property
def reference(self):
v = self.query(":REF:STATUS?")
return float(v.split()[0])*1e6
@reference.setter
def reference(self, value):
ref_opts = ["INT", "10MHz", "100MHz"]
if value in ref_opts:
if value == "INT":
self.query(":REF:INT:100MHz")
else:
self.query(":REF:EXT:{}".format(value))
else:
raise ValueError("Reference must be one of {}.".format(ref_opts))
def __del__(self):
self._lib.close_all()
|
Python
| 0
|
@@ -809,16 +809,25 @@
%0A%0Aclass
+Holzworth
HS9000(I
@@ -990,16 +990,21 @@
rce_name
+=None
, name=%22
@@ -1169,10 +1169,8 @@
ulti
-64
.dll
@@ -1304,199 +1304,465 @@
-# parse resource_name: expecting something like %22HS9004A-009-1%22%0A self.model, self.serial, self.chan = resource_name.split(%22-%22)%0A%0A self._lib.usbCommWrite.restype = ctypes.c_char_p
+self._lib.usbCommWrite.restype = ctypes.c_char_p%0A self._lib.openDevice.restype = ctypes.c_int%0A%0A @classmethod%0A def enumerate(cls):%0A try:%0A lib = ctypes.CDLL(%22HolzworthMulti.dll%22)%0A except:%0A logger.error(%22Could not find the Holzworth driver.%22)%0A return%0A lib.getAttachedDevices.restype = ctypes.c_char_p%0A devices = lib.getAttachedDevices()%0A return devices.decode('ascii').split(',')
%0A%0A
@@ -1899,29 +1899,87 @@
- self.
+# parse resource_name: expecting something like %22HS9004A-009-1%22%0A
model,
-self.
seri
@@ -2023,42 +2023,348 @@
-%22)%0A
-%0A def query(self, scpi_string):
+ self.serial = model + '-' + serial%0A success = self._lib.openDevice(self.serial.encode('ascii'))%0A if success != 0:%0A logger.info(%22Could not open Holzworth at address: %7B%7D, might already be open on another channel.%22.format(self.serial))%0A%0A def ref_query(self, scpi_string):%0A serial = self.serial + '-R'
%0A
@@ -2400,32 +2400,20 @@
Write(se
-lf.resource_name
+rial
.encode(
@@ -3443,33 +3443,84 @@
-return bool(v.split()%5B0%5D)
+if v == 'ON':%0A return True%0A else:%0A return False
%0A
@@ -3730,24 +3730,28 @@
v = self.
+ref_
query(%22:REF:
@@ -3775,39 +3775,17 @@
return
-float(v.split()%5B0%5D)*1e6
+v
%0A @re
@@ -3951,32 +3951,36 @@
self.
+ref_
query(%22:REF:INT:
@@ -4027,16 +4027,20 @@
self.
+ref_
query(%22:
|
6d4eb6ebfb03f974c2f6fb04992fc25e5a53ece9
|
Change docstring
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
# -*- coding: utf-8 -*-
"""
Batik-based rasterizer module.
Download the latest batik rasterizer to use the module. Note Ubuntu 16.04LTS
package is broken and does not work.
Prerequisite:
wget http://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&\
filename=xmlgraphics/batik/binaries/batik-bin-1.9.tar.gz
export BATIK_PATH=./batik-bin-1.9.tar.gz
Deb package:
sudo apt-get install -y libbatik-java
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
from psd2svg.rasterizer.base_rasterizer import BaseRasterizer
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar"
)
class BatikRasterizer(BaseRasterizer):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
basename, ext = os.path.splitext(os.path.basename(url))
output_file = os.path.join(d, "{}.{}".format(basename, format))
cmd = [
"java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0.255.255.255",
"-m", "image/{}".format(format),
"-d", d,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
try:
assert os.path.exists(output_file)
rasterized = Image.open(output_file)
except:
logger.error("{}\n{}{}".format(" ".join(cmd), stdout, stderr))
raise
return self.composite_background(rasterized)
|
Python
| 0.000002
|
@@ -317,16 +317,49 @@
.tar.gz%0A
+ tar xzf batik-bin-1.9.tar.gz%0A
expo
@@ -387,23 +387,16 @@
-bin-1.9
-.tar.gz
%0A%0ADeb pa
|
b8ac65a810a08e11a2f429db08e8b0d4d00651d6
|
Add ALLOW_HOSTS in production settings
|
src/biocloud/settings/production.py
|
src/biocloud/settings/production.py
|
# In production set the environment variable like this:
# DJANGO_SETTINGS_MODULE=my_proj.settings.production
from .base import * # NOQA
import logging.config
# For security and performance reasons, DEBUG is turned off
DEBUG = False
# Must mention ALLOWED_HOSTS in production!
# ALLOWED_HOSTS = []
# Cache the templates in memory for speed-up
loaders = [
(
'django.template.loaders.cached.Loader',
[
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]
),
]
TEMPLATES[0]['OPTIONS'].update({"loaders": loaders})
TEMPLATES[0]['OPTIONS'].update({"debug": False})
TEMPLATES[0]['APP_DIRS'] = False
# Email settings
EMAIL_BACKEND = env.str('EMAIL_BACKEND')
EMAIL_HOST = env.str('EMAIL_HOST')
EMAIL_HOST_USER = env.str('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env.str('EMAIL_HOST_PASSWORD')
EMAIL_PORT = env.int('EMAIL_PORT')
EMAIL_USE_SSL = env.bool('EMAIL_USE_SSL')
EMAIL_USE_TLS = env.bool('EMAIL_USE_TLS')
DEFAULT_FROM_EMAIL = SERVER_EMAIL = '{name} <{addr}>'.format(
name='BioCloud Dev',
addr='biocloud@liang2.io',
)
# Securiy related settings
# SECURE_HSTS_SECONDS = 2592000
# SECURE_BROWSER_XSS_FILTER = True
# SECURE_CONTENT_TYPE_NOSNIFF=True
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# CSRF_COOKIE_HTTPONLY = True
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# X_FRAME_OPTIONS = 'DENY'
# Log everything to the logs directory at the top
LOGFILE_ROOT = join(BASE_DIR, 'logs')
# Reset logging
LOGGING_CONFIG = None
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': (
'[%(asctime)s] %(levelname)s '
'[%(pathname)s:%(lineno)s] %(message)s'
),
'datefmt': "%d/%b/%Y %H:%M:%S"
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'django_log_file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, 'django.log'),
'formatter': 'verbose'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django': {
'handlers': ['django_log_file', ],
'propagate': True,
'level': 'DEBUG',
},
}
}
for app in LOCAL_APPS:
app_handler = '%s_log_file' % app
app_log_filepath = '%s.log' % app
LOGGING['loggers'][app] = {
'handlers': [app_handler, 'console', ],
'level': 'DEBUG',
}
LOGGING['handlers'][app_handler] = {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': join(LOGFILE_ROOT, app_log_filepath),
'formatter': 'verbose',
}
logging.config.dictConfig(LOGGING)
|
Python
| 0
|
@@ -286,18 +286,16 @@
uction!%0A
-#
ALLOWED_
@@ -303,16 +303,29 @@
OSTS = %5B
+'172.16.0.66'
%5D%0A%0A%0A# Ca
|
73f49b5603802ccce3a9c4db0ee0b2eaa4bf0e7f
|
Update startup script (lyli.py)
|
lyli.py
|
lyli.py
|
#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
#app.run(port=3003, debug=True, use_reloader=True)
|
Python
| 0
|
@@ -30,28 +30,8 @@
ging
-%0Afrom os import fork
%0A%0Aim
@@ -77,136 +77,21 @@
app%0A
-%0Apid = fork()%0Aif pid %3E 0:%0A print('PID: %25d' %25 pid)%0A exit(0)%0Aelif pid %3C 0:%0A print('Could not fork: %25d' %25 pid)%0A exit(1)
+import config
%0A%0A#
@@ -445,16 +445,37 @@
ge)s')%0A%0A
+if config.debug:%0A
app.run(
@@ -486,17 +486,17 @@
=300
-4
+3
, debug=
Fals
@@ -491,20 +491,19 @@
, debug=
-Fals
+Tru
e, use_r
@@ -514,16 +514,24 @@
der=
-False)%0A#
+True)%0Aelse:%0A
app.
@@ -538,33 +538,33 @@
run(port=300
-3
+4
, debug=
True, use_re
@@ -543,35 +543,36 @@
ort=3004, debug=
-Tru
+Fals
e, use_reloader=
@@ -555,30 +555,31 @@
bug=False, use_reloader=
-Tru
+Fals
e)%0A
|
531ada2164f4c184d298110e518415233419bd9f
|
Update poisson_2d_square_0.py
|
demo/poisson_2d_square_0.py
|
demo/poisson_2d_square_0.py
|
#
# Solve -laplace(u) = f in (-1, 1)^2 with T(u) = 0 [1]
#
from sympy import symbols, integrate
from lega.shen_basis import mass_matrix, stiffness_matrix, load_vector
from lega.legendre_basis import ForwardLegendreTransformation as FLT
import scipy.linalg as la
import numpy as np
def get_rhs(u):
'''
Verify that u satisfies boundary conditions and compute the right hand
side f.
'''
x, y = symbols('x, y')
assert integrate(abs(u.subs(x, -1)), (y, -1, 1)) < 1E-15
assert integrate(abs(u.subs(x, 1)), (y, -1, 1)) < 1E-15
assert integrate(abs(u.subs(y, -1)), (x, -1, 1)) < 1E-15
assert integrate(abs(u.subs(y, 1)), (x, -1, 1)) < 1E-15
# Right hand side if u is to be the solution
f = -u.diff(x, 2) - u.diff(y, 2)
return f
def solve_poisson_2d(f, n):
'''Solve the Poisson problem by nxn Shen polynomials.'''
A = stiffness_matrix(n)
M = mass_matrix(n)
F = FLT([n+2, n+2])(f)
b = load_vector(F) # nxn matrix
# Solve the problem by tensor product solver
lmbda, V = la.eigh(A.toarray(), M.toarray())
# Map the right hand side to eigen space
bb = (V.T).dot(b.dot(V))
# Apply the inverse in eigen space
U_ = np.array([[bb[i, j]/(lmbda[i] + lmbda[j])
for j in range(n)]
for i in range(n)])
# Map back to physical space
U = (V).dot(U_.dot(V.T))
return U
# -----------------------------------------------------------------------------
if __name__ == '__main__':
from sympy import sin, pi, lambdify
from lega.shen_basis import shen_function, legendre_to_shen_matrix
from lega.legendre_basis import mass_matrix as L_mass_matrix
from sympy.plotting import plot3d
from sympy.mpmath import quad
from math import sqrt
# Setup
x, y = symbols('x, y')
u = (x**2-1)*sin(2*pi*y)
f = get_rhs(u)
n_max = 30
# Representation of exact solution in the Legendre basis
u_leg = FLT([n_max+2, n_max+2])(u)
n = 2
tol = 1E-14
converged = False
while not converged:
U = solve_poisson_2d(f, n) # w.r.t to shen
# Error using representation w.r.t to Shen basis and the mass matrix
# Turn U from shen to Legendre
Tmat = legendre_to_shen_matrix(n+2)
U_leg = Tmat.T.dot(U.dot(Tmat.toarray())) # n+2 x n . n x n . n x n+2
# Subract on the subspace
E = u_leg[:n+2, :n+2] - U_leg
# Legendre mass matrix computes the L2 error
M = L_mass_matrix(n+2)
error = sqrt(np.trace((M.dot(E)).dot(M.dot(E.T))))
print 'n=%d {e}_2=%.4E' % (n, error)
converged = error < tol or n > n_max-1
n += 1
# Plot the symbolic error
uh = shen_function(U)
e = u - uh
plot3d(e, (x, -1, 1), (y, -1, 1))
|
Python
| 0.000003
|
@@ -2126,16 +2126,95 @@
o shen%0A%0A
+ #TODO: should add symbolic as well, just here and only for comparison!%0A
|
897f6962bf595ea9862c5f0bf46ced926e5f4dd6
|
Fix continuous function graphing for scalar functions
|
demo_continuous_function.py
|
demo_continuous_function.py
|
# Learn a continuous function
from inspect import signature
# Use custom implementation:
# from Jacobian_Chain import *
from Jacobian_Chain import *
# Use Tensorflow wrapper:
# from Tensorflow_Wrapper import *
import numpy as np
np.set_printoptions(suppress=True)
class Continuous:
def __init__(self, funct, domain, range=(-1, 1)):
self._size_input = len(signature(funct[0]).parameters)
self._size_output = len(funct)
self._funct = funct
self._domain = domain
if range is None:
self._range = [[-1, 1]] * len(funct)
else:
self._range = range
def sample(self, quantity=1):
# Generate random values for each input stimulus
stimulus = []
for idx in range(self._size_input):
stimulus.append(np.random.uniform(low=self._domain[idx][0], high=self._domain[idx][1], size=quantity))
# Evaluate each function with the stimuli
expectation = []
for idx, f in enumerate(self._funct):
expectation.append(f(*stimulus))
return [np.array(stimulus), np.array(expectation)]
def survey(self, quantity=100):
# Generate random values for each input stimulus
stimulus = []
for idx in range(self._size_input):
stimulus.append(np.linspace(start=self._domain[idx][0], stop=self._domain[idx][1], num=quantity))
# Evaluate each function with the stimuli
expectation = []
for idx, f in enumerate(self._funct):
expectation.append(f(*stimulus))
return [np.array(stimulus), np.array(expectation)]
def size_input(self):
return self._size_input
def size_output(self):
return self._size_output
def plot(self, plt, predict):
plt.ylim(self._range)
x, y = self.survey()
# plt.plot(x, y, marker='.', color=(0.3559, 0.7196, 0.8637))
# plt.plot(x, predict.T[0], marker='.', color=(.9148, .604, .0945))
@staticmethod
def error(expect, predict):
return np.linalg.norm(expect - predict)
# environment = Continuous([lambda a, b: (24 * a**4 - 2 * b**2 + a),
# lambda a, b: (-5 * a**3 + 2 * b**2 + b),
# lambda a, b: (12 * a**2 + 8 * b**3 + b)], domain=[[-1, 1]] * 2, range=[[-1, 1]] * 3)
environment = Continuous([lambda v: (24 * v**4 - 2 * v**2 + v)], domain=[[-1, 1]])
# ~~~ Create the network ~~~
init_params = {
# Shape of network
"units": [environment.size_input(), 15, 10, environment.size_output()],
# Basis function(s) from Function.py
"basis": basis_bent,
# Weight initialization distribution
"distribute": dist_uniform
}
network = Neural_Network(**init_params)
# ~~~ Train the network ~~~
train_params = {
# Source of stimuli
"environment": environment,
"batch_size": 1,
# Error function from Function.py
"cost": cost_sum_squared,
# Learning rate function
"learn_step": .0001,
"learn": learn_power,
# Weight decay regularization function
"decay_step": 0.0001,
"decay": decay_NONE,
# Momentum preservation
"moment_step": 0.1,
# Percent of weights to drop each training iteration
"dropout": 0.2,
"epsilon": .04, # error allowance
"iteration_limit": 500000, # limit on number of iterations to run
"debug": True,
"graph": True
}
network.train(**train_params)
# ~~~ Test the network ~~~
[stimuli, expectation] = environment.survey()
print(network.predict(stimuli))
|
Python
| 0.00001
|
@@ -330,15 +330,12 @@
nge=
-(-1, 1)
+None
):%0A
@@ -569,16 +569,233 @@
n(funct)
+%0A%0A if self._size_input == 1 and self._size_output == 1:%0A candidates = self._funct%5B0%5D(np.linspace(*self._domain%5B0%5D, num=100))%0A self._range = %5B%5Bmin(candidates), max(candidates)%5D%5D
%0A
@@ -1997,67 +1997,126 @@
-plt.ylim(self._range)%0A x, y = self.survey()%0A
+x, y = self.survey()%0A%0A if x.shape%5B0%5D == 1 and y.shape%5B0%5D == 1:%0A plt.ylim(self._range%5B0%5D)%0A
# pl
@@ -2103,33 +2103,32 @@
%5B0%5D)%0A
-#
plt.plot(x, y,
@@ -2122,19 +2122,25 @@
t.plot(x
-, y
+%5B0%5D, y%5B0%5D
, marker
@@ -2185,17 +2185,19 @@
-#
+
plt.plo
@@ -2199,16 +2199,19 @@
t.plot(x
+%5B0%5D
, predic
@@ -2215,10 +2215,8 @@
dict
-.T
%5B0%5D,
@@ -2969,23 +2969,22 @@
%22: dist_
-unif
+n
orm
+al
%0A %7D%0A%0A
|
8a74b2f49314f780864f39d04ddaea4695633c21
|
Add support for feed deltas
|
src/crawler/lib/headers_handling.py
|
src/crawler/lib/headers_handling.py
|
from datetime import timedelta, timezone
import dateutil
import logging
import re
from bootstrap import conf
from lib.utils import to_hash, utc_now
logger = logging.getLogger(__name__)
MAX_AGE_RE = re.compile('max-age=([0-9]+)')
RFC_1123_FORMAT = '%a, %d %b %Y %X %Z'
def rfc_1123_utc(time_obj=None, delta=None):
"""return time obj or now formated in the RFC1123 style. Add time delta if
present.
"""
if time_obj is None:
time_obj = utc_now()
if delta is not None:
time_obj += delta
return time_obj.strftime(RFC_1123_FORMAT)
def _extract_max_age(headers, feed_info, now):
if 'max-age' in headers.get('cache-control', ''):
try:
max_age = int(MAX_AGE_RE.search(headers['cache-control']).group(1))
feed_info['expires'] = now + timedelta(seconds=max_age)
except Exception:
pass
def _extract_expires(headers, feed_info):
if headers.get('expires'):
try:
expires = dateutil.parser.parse(headers['expires'])
if expires.tzinfo:
expires = expires.astimezone(timezone.utc)
else:
expires = expires.replace(tzinfo=timezone.utc)
feed_info['expires'] = expires
except Exception:
pass
def extract_feed_info(headers):
"""providing the headers of a feed response, will calculate the headers
needed for basic cache control.
will extract etag and last modified.
will calculate expires, with limit define in configuration file by
FEED_MIN_EXPIRES and FEED_MAX_EXPIRES.
"""
now = utc_now()
min_expires = now + timedelta(seconds=conf.FEED_MIN_EXPIRES)
max_expires = now + timedelta(seconds=conf.FEED_MAX_EXPIRES)
feed_info = {'etag': headers.get('etag', ''),
'last_modified': headers.get('last-modified', rfc_1123_utc())}
_extract_max_age(headers, feed_info, now)
if 'expires' not in feed_info:
_extract_expires(headers, feed_info)
if not feed_info.get('expires'):
feed_info['expires'] = None
elif max_expires < feed_info['expires']:
logger.info("expiring too late, forcing expiring at %r",
max_expires.isoformat())
feed_info['expires'] = max_expires
elif feed_info['expires'] < min_expires:
logger.info("expiring too early, forcing expiring at %r",
min_expires.isoformat())
feed_info['expires'] = min_expires + timedelta(minutes=5)
return feed_info
def prepare_headers(feed):
"""For a known feed, will construct some header dictionnary"""
headers = {'User-Agent': conf.CRAWLER_USER_AGENT}
if feed.get('last_modified'):
headers['If-Modified-Since'] = feed['last_modified']
if feed.get('etag') and 'jarr' not in feed['etag']:
headers['If-None-Match'] = feed['etag']
logger.debug('%r %r - calculated headers %r',
feed['id'], feed['title'], headers)
return headers
def response_match_cache(response, feed):
if 'etag' not in response.headers:
logger.debug('%r %r - manually generating etag',
feed['id'], feed['title'])
response.headers['etag'] = 'jarr/"%s"' % to_hash(response.text)
if response.headers['etag'] and feed['etag'] \
and response.headers['etag'] == feed['etag']:
if 'jarr' in feed['etag']:
logger.info("%r %r - calculated hash matches (%d)",
feed['id'], feed['title'], response.status_code)
else:
logger.info("%r %r - feed responded with same etag (%d)",
feed['id'], feed['title'], response.status_code)
return True
return False
|
Python
| 0
|
@@ -2860,16 +2860,118 @@
'etag'%5D%0A
+ if 'If-Modified-Since' in headers or 'If-None-Match' in headers:%0A headers%5B'A-IM'%5D = 'feed'%0A
logg
|
1b996bf797b5e1a0203054f11001771ede309b23
|
remove dead code
|
scrapi/harvesters/smithsonian.py
|
scrapi/harvesters/smithsonian.py
|
'''
Harvester for the Smithsonian Digital Repository for the SHARE project
Example API call: http://repository.si.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
import re
from scrapi.base import helpers
from scrapi.base import OAIHarvester
class SiHarvester(OAIHarvester):
short_name = 'smithsonian'
long_name = 'Smithsonian Digital Repository'
url = 'http://repository.si.edu'
@property
def schema(self):
return helpers.updated_schema(self._schema, {
"uris": ('//dc:identifier/node()', helpers.oai_process_uris)
})
base_url = 'http://repository.si.edu/oai/request'
property_list = ['date', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
def get_doi_from_identifier(identifiers):
doi_re = re.compile(r'10\.\S*\/\S*')
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for identifier in identifiers:
try:
found_doi = doi_re.search(identifier).group()
return 'http://dx.doi.org/{}'.format(found_doi)
except AttributeError:
continue
|
Python
| 0.999454
|
@@ -778,393 +778,4 @@
rue%0A
-%0A%0Adef get_doi_from_identifier(identifiers):%0A doi_re = re.compile(r'10%5C.%5CS*%5C/%5CS*')%0A identifiers = %5Bidentifiers%5D if not isinstance(identifiers, list) else identifiers%0A for identifier in identifiers:%0A try:%0A found_doi = doi_re.search(identifier).group()%0A return 'http://dx.doi.org/%7B%7D'.format(found_doi)%0A except AttributeError:%0A continue%0A
|
36070fdc617875527221b756a27bcee08220633e
|
add negate op to CoefficientArray class
|
python_module/sirius/coefficient_array.py
|
python_module/sirius/coefficient_array.py
|
import numpy as np
class CoefficientArray:
def __init__(self, dtype=np.complex, ctype=np.matrix):
"""
dtype -- number type
ctype -- container type (default np.matrix)
"""
self.dtype = dtype
self.ctype = ctype
self._data = {}
def __getitem__(self, key):
"""
key -- (k, ispn)
"""
# return as view
return self._data[key][:]
def __setitem__(self, key, item):
"""
"""
if key in self._data:
x = self._data[key]
# make sure shapes don't change
x[:] = self.ctype(item, copy=False)
else:
self._data[key] = self.ctype(item, dtype=self.dtype, copy=True)
def sum(self, **kwargs):
"""
"""
return sum([np.sum(v) for _, v in self.items()])
def __mul__(self, other):
"""
Returns a new object of type type(self)
"""
out = type(self)(dtype=self.dtype)
if isinstance(other, CoefficientArray):
for key in other._data.keys():
out[key] = np.einsum('ij,ij->ij', self._data[key], other._data[key])
elif np.isscalar(other):
for key in self._data.keys():
out[key] = self._data[key] * other
else:
raise TypeError('wrong type')
return out
def __add__(self, other):
"""
"""
out = type(self)(dtype=self.dtype)
if isinstance(other, CoefficientArray):
for key in other._data.keys():
out[key] = self._data[key] + other._data[key]
elif np.isscalar(other):
for key in self._data.keys():
out[key] = self._data[key] + other
return out
def abs(self):
"""
"""
out = type(self)(dtype=self.dtype)
for key in self._data.keys():
out[key] = np.abs(self._data[key])
return out
def keys(self):
return self._data.keys()
def __sub__(self, other):
"""
"""
return self.__add__(-1*other)
def conjugate(self):
"""
"""
out = type(self)(dtype=self.dtype)
for key, val in self._data.items():
out[key] = np.conj(val)
return out
def conj(self):
"""
"""
return self.conjugate()
__lmul__ = __mul__
__rmul__ = __mul__
__radd__ = __add__
__ladd__ = __add__
__lsub__ = __sub__
__rsub__ = __sub__
class PwCoeffs(CoefficientArray):
def __init__(self, kpointset=None, dtype=np.complex):
super().__init__(dtype)
# load plane wave-coefficients from kpointset
if kpointset is not None:
num_sc = kpointset.ctx().num_spins()
for ki in range(len(kpointset)):
k = kpointset[ki]
for ispn in range(num_sc):
key = ki, ispn
val = np.matrix(k.spinor_wave_functions().pw_coeffs(ispn))
self.__setitem__(key, val)
def __setitem__(self, key, item):
"""
key -- (k, ispn)
"""
# return as view
assert (len(key) == 2)
return super(PwCoeffs, self).__setitem__(key, item)
def kview(self, k):
"""
"""
out = PwCoeffs(dtype=self.dtype)
out._data = {(ki, ispn): self._data[(ki, ispn)]
for ki, ispn in self._data if ki == k}
return out
def kvalues(self):
"""
TODO: make an iterator
"""
ks, _ = zip(*self._data.keys())
return ks
def by_k(self):
"""
returns a dictionary, where each element is a list of tuples:
{k: [(ispn, cn), ...]}
"""
sdict = {k: [] for k in self.kvalues()}
for k, ispn in self._data:
sdict[k].append((ispn, self._data[(k, ispn)]))
return sdict
def __len__(self):
"""
"""
return len(self._data)
def items(self):
"""
"""
return self._data.items()
def __contains__(self, key):
"""
"""
return self._data.__contains(key)
if __name__ == '__main__':
shapes = [(10, 12), (3, 100), (4, 80), (5, 60)]
keys = [(1, 0), (1, 1), (2, 0), (2, 1)]
CC = PwCoeffs()
for k, sh in zip(keys, shapes):
print('k:', k)
print('sh:', sh)
CC[k] = np.random.rand(*sh)
# scale
CC = 4 * CC
CC = CC + 1
CC = 2 + CC + 1
CC = CC - 1j
CC = np.conj(CC)
print('np.conj(CC) has type: ', type(CC))
# not working
# CC = abs(CC)
CCv = CC.kview(1)
for k, item in CC.by_k().items():
print('list of k:', k, 'has ', len(item), ' entries')
|
Python
| 0.000009
|
@@ -1751,32 +1751,215 @@
return out%0A%0A
+ def __neg__(self):%0A %22%22%22%0A%0A %22%22%22%0A out = type(self)(dtype=self.dtype)%0A for key, val in self._data.items():%0A out%5Bkey%5D = -val%0A return out%0A%0A
def abs(self
@@ -2271,17 +2271,19 @@
add__(-1
-*
+ *
other)%0A%0A
|
554bb0319fb6af5e1a989598adb187148bac362d
|
Fix extra argument for checking completion status
|
lava-submit-tests.py
|
lava-submit-tests.py
|
#!/usr/bin/python
import json
import os
import sys
import time
import xmlrpclib
# Parse the results bundle to see the run-tests testcase
# of the lttng-kernel-tests passed successfully
def check_job_test_case_status(server, job):
bundle_sha = server.scheduler.job_status(str(job))['bundle_sha1']
bundle = server.dashboard.get(bundle_sha)
content = json.loads(bundle['content'])
for run in content['test_runs']:
if run['test_id'] in 'lttng-kernel-test':
for result in run['test_results']:
if 'test_case_id' in result and result['test_case_id'] in 'run-tests':
if result['result'] in 'pass':
return True
else:
return False
if len(sys.argv) != 8:
print("Must provide 7 arguments.{} {} {} {} {} {} {}".format(sys.argv[0],
"job_name","LAVA_KEY", "kernel_image","kernel_modules_archive",
"lttng_modules_archive", "tools_commit", "ust_commit"))
sys.exit()
job_name=sys.argv[1]
token=sys.argv[2]
kernel=sys.argv[3]
linux_modules=sys.argv[4]
lttng_modules=sys.argv[5]
tools_commit=sys.argv[6]
ust_commit=sys.argv[7]
job ="""{
"health_check": false,
"job_name": "LTTng kernel tests",
"device_type": "x86",
"tags": [ "dev-sda1" ],
"timeout": 18000,
"actions": [
{
"command": "boot_image"
},
{
"command": "lava_command_run",
"parameters": {
"commands": [
"ifup eth0",
"route -n",
"cat /etc/resolv.conf",
"echo nameserver 172.18.0.12 > /etc/resolv.conf",
"mount /dev/sda1 /tmp",
"rm -rf /tmp/*"
]
}
},
{
"command": "lava_command_run",
"parameters": {
"commands": [
"locale-gen en_US.UTF-8",
"apt-get update",
"apt-get install -y bsdtar psmisc wget python3 python3-pip libglib2.0-dev libffi-dev elfutils",
"apt-get install -y libelf-dev libmount-dev libxml2 python3-pandas python3-numpy libdw-dev ccache"
],
"timeout": 18000
}
},
{
"command": "lava_test_shell",
"parameters": {
"testdef_repos": [
{
"git-repo": "https://github.com/frdeso/syscall-bench-it.git",
"revision": "master",
"testdef": "lava/testcases/kernel-tests.yml"
}
],
"timeout": 18000
}
},
{
"command": "submit_results",
"parameters": {
"server": "http://lava-master.internal.efficios.com/RPC2/",
"stream": "/anonymous/tests-kernel/"
}
}
]
}"""
# We use the kernel image and modules archive received as argument
deploy_action={"command": "deploy_kernel",
"metadata": {
"jenkins_jobname": job_name,
},
"parameters": {
"overlays": [
"scp://jenkins-lava@storage.internal.efficios.com"+linux_modules,
"scp://jenkins-lava@storage.internal.efficios.com"+lttng_modules
],
"kernel":
"scp://jenkins-lava@storage.internal.efficios.com"+kernel,
"nfsrootfs": "scp://jenkins-lava@storage.internal.efficios.com/storage/jenkins-lava/rootfs/rootfs_amd64_trusty_2016-02-23-1134.tar.gz",
"target_type": "ubuntu"
}
}
# We checkout the commit id for tools
setup_action = {
"command": "lava_command_run",
"parameters": {
"commands": [
"git clone https://github.com/frdeso/syscall-bench-it.git bm",
"pip3 install vlttng",
"vlttng --jobs=16 --profile babeltrace-stable-1.4 --profile use-ccache-gcc --profile urcu-master \
--profile lttng-tools-master -o \
projects.lttng-tools.checkout="+tools_commit+ \
" --profile lttng-ust-master -o \
projects.lttng-ust.checkout="+ust_commit+ \
" /tmp/virtenv"
],
"timeout": 18000
}
}
job_dict= json.loads(job)
for t in [i for i in job_dict['actions'] if i['command'] == 'lava_test_shell']:
for a in t['parameters']['testdef_repos']:
a['parameters'] = {}
a['parameters']['JENKINS_JOBNAME'] = job_name
job_dict['job_name']=job_name
job_dict['actions'].insert(0, deploy_action)
job_dict['actions'].insert(4, setup_action)
username = 'frdeso'
hostname = 'lava-master.internal.efficios.com'
server = xmlrpclib.ServerProxy('http://%s:%s@%s/RPC2' % (username, token, hostname))
jobid = server.scheduler.submit_job(json.dumps(job_dict))
jobstatus = server.scheduler.job_status(jobid)['job_status']
while jobstatus in 'Submitted' or jobstatus in 'Running':
time.sleep(30)
jobstatus = server.scheduler.job_status(jobid)['job_status']
if jobstatus not in 'Complete':
print(jobstatus)
if check_job_test_case_status(server, jobid, 'run-tests'):
sys.exit(-1)
else:
sys.exit(0)
|
Python
| 0.000023
|
@@ -5401,21 +5401,8 @@
obid
-, 'run-tests'
):%0A
|
3c28a101b9c353973a7516c710134405ba55baca
|
Use an alternative for the locale.RADIXCHAR if this one doesn't exist
|
bin/report/render/rml2pdf/utils.py
|
bin/report/render/rml2pdf/utils.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# trml2pdf - An RML to PDF converter
# Copyright (C) 2003, Fabien Pinckaers, UCL, FSA
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import re
import reportlab
from lxml import etree
import copy
import tools
_regex = re.compile('\[\[(.+?)\]\]')
def str2xml(s):
return s.replace('&', '&').replace('<', '<').replace('>', '>')
def xml2str(s):
return s.replace('&','&').replace('<','<').replace('>','>')
def _child_get(node, self=None, tagname=None):
for n in node:
if self and self.localcontext and n.get('rml_loop', False):
oldctx = self.localcontext
for ctx in eval(n.get('rml_loop'),{}, self.localcontext):
self.localcontext.update(ctx)
if (tagname is None) or (n.tag==tagname):
if n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except:
continue
if n.get('rml_tag'):
try:
(tag,attr) = eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.deepcopy(n)
n2.tag = tag
n2.attrib.update(attr)
yield n2
except:
yield n
else:
yield n
self.localcontext = oldctx
continue
if self and self.localcontext and n.get('rml_except', False):
try:
eval(n.get('rml_except'), {}, self.localcontext)
except:
continue
if self and self.localcontext and n.get('rml_tag', False):
try:
(tag,attr) = eval(n.get('rml_tag'),{}, self.localcontext)
n2 = copy.deepcopy(n)
n2.tag = tag
n2.attrib.update(attr or {})
yield n2
tagname = ''
except:
pass
if (tagname is None) or (n.tag==tagname):
yield n
def _process_text(self, txt):
if not self.localcontext:
return str2xml(txt)
if not txt:
return ''
result = ''
sps = _regex.split(txt)
while sps:
# This is a simple text to translate
result += self.localcontext.get('translate', lambda x:x)(sps.pop(0))
if sps:
try:
txt = eval(sps.pop(0),self.localcontext)
except:
pass
if type(txt)==type('') or type(txt)==type(u''):
txt2 = str2xml(txt)
result += tools.ustr(txt2)
elif (txt is not None) and (txt is not False):
result += str(txt)
return result
def text_get(node):
rc = ''
for node in node.getchildren():
rc = rc + tools.ustr(node.text)
return rc
units = [
(re.compile('^(-?[0-9\.]+)\s*in$'), reportlab.lib.units.inch),
(re.compile('^(-?[0-9\.]+)\s*cm$'), reportlab.lib.units.cm),
(re.compile('^(-?[0-9\.]+)\s*mm$'), reportlab.lib.units.mm),
(re.compile('^(-?[0-9\.]+)\s*$'), 1)
]
def unit_get(size):
global units
if size:
for unit in units:
res = unit[0].search(size, 0)
if res:
return unit[1]*float(res.group(1))
return False
def tuple_int_get(node, attr_name, default=None):
if not node.get(attr_name):
return default
res = [int(x) for x in node.get(attr_name).split(',')]
return res
def bool_get(value):
return (str(value)=="1") or (value.lower()=='yes')
def attr_get(node, attrs, dict={}):
res = {}
for name in attrs:
if node.get(name):
res[name] = unit_get(node.get(name))
for key in dict:
if node.get(key):
if dict[key]=='str':
res[key] = str(node.get(key))
elif dict[key]=='bool':
res[key] = bool_get(node.get(key))
elif dict[key]=='int':
res[key] = int(node.get(key))
elif dict[key]=='unit':
res[key] = unit_get(node.get(key))
elif dict[key] == 'float' :
res[key] = float(node.get(key))
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000003
|
@@ -1869,16 +1869,30 @@
rt tools
+%0Aimport locale
%0A%0A_regex
@@ -5030,16 +5030,290 @@
f size:%0A
+ if size.find('.') == -1:%0A decimal_point = '.'%0A try:%0A decimal_point = locale.RADIXCHAR%0A except:%0A decimal_point = locale.localeconv()%5B'decimal_point'%5D%0A%0A size = size.replace(decimal_point, '.')%0A%0A
|
1b7e68c3bdfc2f43f754cc39e1f2f80bfa5bee80
|
Add validate_log_translations flake8 check
|
designate/hacking/checks.py
|
designate/hacking/checks.py
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
mutable_default_argument_check = re.compile(
r"^\s*def .+\((.+=\{\}|.+=\[\])")
def mutable_default_arguments(logical_line, filename):
if mutable_default_argument_check.match(logical_line):
yield (0, "D701: Default paramater value is a mutable type")
def factory(register):
register(mutable_default_arguments)
|
Python
| 0.000002
|
@@ -653,16 +653,29 @@
ort re%0A%0A
+import pep8%0A%0A
%0Amutable
@@ -747,24 +747,134 @@
.+=%5C%5B%5C%5D)%22)%0A%0A
+log_translation = re.compile(%0A r%22(.)*LOG%5C.(audit%7Cerror%7Cinfo%7Cwarn%7Cwarning%7Ccritical%7Cexception)%5C(%5Cs*('%7C%5C%22)%22)%0A%0A
%0Adef mutable
@@ -905,16 +905,31 @@
al_line,
+ physical_line,
filenam
@@ -928,24 +928,73 @@
filename):%0A
+ if pep8.noqa(physical_line):%0A return%0A%0A
if mutab
@@ -1107,24 +1107,373 @@
le type%22)%0A%0A%0A
+def validate_log_translations(logical_line, physical_line, filename):%0A # Translations are not required in the test directory%0A if %22designate/tests%22 in filename:%0A return%0A if pep8.noqa(physical_line):%0A return%0A msg = %22D702: Log messages require translation%22%0A if log_translation.match(logical_line):%0A yield (0, msg)%0A%0A%0A
def factory(
@@ -1523,8 +1523,48 @@
uments)%0A
+ register(validate_log_translations)%0A
|
ec9bd84c7487ef0d3fead1641c5132f2f269b5bc
|
Use absolute path for the result of glob.
|
lbuild/repository.py
|
lbuild/repository.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Fabian Greif
# All Rights Reserved.
#
# The file is part of the lbuild project and is released under the
# 2-clause BSD license. See the file `LICENSE.txt` for the full license
# governing this code.
import os
import glob
from .exception import BlobException
from . import utils
from . import environment
class OptionNameResolver:
def __init__(self, repository, options):
self.repository = repository
self.options = options
def __getitem__(self, key):
o = key.split(":")
if len(o) != 2:
raise BlobException("Option name '%s' must contain exactly one colon " \
"to separate repository and option name.")
repo, option = o
if repo == "":
key = "%s:%s" % (self.repository.name, option)
try:
return self.options[key].value
except KeyError:
raise BlobException("Unknown option name '%s'" % key)
def __repr__(self):
return repr(self.options)
def __len__(self):
return len(self.options)
class Repository:
def __init__(self, path):
# Path to the repository file. All relative paths refer to this path.
self.path = path
self.name = None
# Dict of modules, using the filename as the key
self.modules = {}
# Name -> Option()
self.options = {}
def set_name(self, name):
"""Set name of the repository."""
self.name = name
def _relocate(self, path):
"""
Relocate relative paths to the path of the repository
configuration file.
"""
if not os.path.isabs(path):
path = os.path.join(self.path, path)
return os.path.normpath(path)
def glob(self, pattern):
pattern = self._relocate(pattern)
return glob.glob(pattern)
def add_modules(self, modules):
"""
Add one or more module files.
Args:
modules: List of filenames
"""
module_files = utils.listify(modules)
for file in module_files:
file = self._relocate(file)
if not os.path.isfile(file):
raise BlobException("Module file not found '%s'" % file)
self.modules[file] = None
def find_modules(self, basepath="", modulefile="module.lb"):
"""
Find all module files following a specific pattern.
Args:
basepath : Rootpath for the search.
modulefile : Filename of the module files to search
for (default: "module.lb").
"""
basepath = self._relocate(basepath)
for path, _, files in os.walk(basepath):
if modulefile in files:
self.modules[os.path.normpath(os.path.join(path, modulefile))] = None
def add_option(self, name, description, default=None):
"""
Define new repository wide option.
These options can be used by modules to decide whether they are
available and what options they provide for a specific set of
repository options.
"""
self._check_for_duplicates(name)
self.options[name] = environment.Option(name, description, default)
def add_boolean_option(self, name, description, default=None):
self._check_for_duplicates(name)
self.options[name] = environment.BooleanOption(name, description, default)
def add_numeric_option(self, name, description, default=None):
self._check_for_duplicates(name)
self.options[name] = environment.NumericOption(name, description, default)
def _check_for_duplicates(self, name):
if name in self.options:
raise BlobException("Option name '%s' is already defined" % name)
|
Python
| 0
|
@@ -1336,24 +1336,16 @@
= None%0A
-
%0A
@@ -1909,16 +1909,32 @@
ttern =
+os.path.abspath(
self._re
@@ -1944,24 +1944,25 @@
ate(pattern)
+)
%0A ret
@@ -2195,24 +2195,16 @@
odules)%0A
-
%0A
@@ -3967,12 +3967,13 @@
ned%22 %25 name)
+%0A
|
85d69a7dd1c29f9b9bca7b5a9e6b1319caf07c6b
|
Fix missing tpu variable in benchmark_args_tf.py (#13968)
|
src/transformers/benchmark/benchmark_args_tf.py
|
src/transformers/benchmark/benchmark_args_tf.py
|
# coding=utf-8
# Copyright 2018 The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass, field
from typing import Tuple
from ..file_utils import cached_property, is_tf_available, tf_required
from ..utils import logging
from .benchmark_args_utils import BenchmarkArguments
if is_tf_available():
import tensorflow as tf
logger = logging.get_logger(__name__)
@dataclass
class TensorFlowBenchmarkArguments(BenchmarkArguments):
deprecated_args = [
"no_inference",
"no_cuda",
"no_tpu",
"no_speed",
"no_memory",
"no_env_print",
"no_multi_process",
]
def __init__(self, **kwargs):
"""
This __init__ is there for legacy code. When removing deprecated args completely, the class can simply be
deleted
"""
for deprecated_arg in self.deprecated_args:
if deprecated_arg in kwargs:
positive_arg = deprecated_arg[3:]
kwargs[positive_arg] = not kwargs.pop(deprecated_arg)
logger.warning(
f"{deprecated_arg} is depreciated. Please use --no-{positive_arg} or {positive_arg}={kwargs[positive_arg]}"
)
self.tpu_name = kwargs.pop("tpu_name", self.tpu_name)
self.device_idx = kwargs.pop("device_idx", self.device_idx)
self.eager_mode = kwargs.pop("eager_mode", self.eager_mode)
self.use_xla = kwargs.pop("use_xla", self.use_xla)
super().__init__(**kwargs)
tpu_name: str = field(
default=None,
metadata={"help": "Name of TPU"},
)
device_idx: int = field(
default=0,
metadata={"help": "CPU / GPU device index. Defaults to 0."},
)
eager_mode: bool = field(default=False, metadata={"help": "Benchmark models in eager model."})
use_xla: bool = field(
default=False,
metadata={
"help": "Benchmark models using XLA JIT compilation. Note that `eager_model` has to be set to `False`."
},
)
@cached_property
@tf_required
def _setup_tpu(self) -> Tuple["tf.distribute.cluster_resolver.TPUClusterResolver"]:
if self.tpu:
try:
if self.tpu_name:
tpu = tf.distribute.cluster_resolver.TPUClusterResolver(self.tpu_name)
else:
tpu = tf.distribute.cluster_resolver.TPUClusterResolver()
except ValueError:
tpu = None
return tpu
@cached_property
@tf_required
def _setup_strategy(self) -> Tuple["tf.distribute.Strategy", "tf.distribute.cluster_resolver.TPUClusterResolver"]:
if self.is_tpu:
tf.config.experimental_connect_to_cluster(self._setup_tpu)
tf.tpu.experimental.initialize_tpu_system(self._setup_tpu)
strategy = tf.distribute.TPUStrategy(self._setup_tpu)
else:
# currently no multi gpu is allowed
if self.is_gpu:
# TODO: Currently only single GPU is supported
tf.config.set_visible_devices(self.gpu_list[self.device_idx], "GPU")
strategy = tf.distribute.OneDeviceStrategy(device=f"/gpu:{self.device_idx}")
else:
tf.config.set_visible_devices([], "GPU") # disable GPU
strategy = tf.distribute.OneDeviceStrategy(device=f"/cpu:{self.device_idx}")
return strategy
@property
@tf_required
def is_tpu(self) -> bool:
return self._setup_tpu is not None
@property
@tf_required
def strategy(self) -> "tf.distribute.Strategy":
return self._setup_strategy
@property
@tf_required
def gpu_list(self):
return tf.config.list_physical_devices("GPU")
@property
@tf_required
def n_gpu(self) -> int:
if self.cuda:
return len(self.gpu_list)
return 0
@property
def is_gpu(self) -> bool:
return self.n_gpu > 0
|
Python
| 0.000024
|
@@ -2741,32 +2741,51 @@
sterResolver%22%5D:%0A
+ tpu = None%0A
if self.
|
1aba70b4b035637662625e42cada702a00f73e17
|
Add support for getitem and contains in listing
|
lendingclub2/loan.py
|
lendingclub2/loan.py
|
# Filename: loan.py
"""
LendingClub2 Loan Module
"""
# Standard libraries
import json
# lendingclub2
from lendingclub2 import requests
from lendingclub2.config import API_VERSION, DNS, ENDPOINTS
from lendingclub2.error import LCError
from lendingclub2.response import Response
# Constants
LISTING_VERSION = '1.2'
# Interface classes
class Loan(object):
"""
Information of each loan
"""
def __init__(self, response):
"""
Constructor
:param response: dict
"""
self._response = response
self.id = response['id']
self.amount = response['loanAmount']
self.funded_amount = response['fundedAmount']
self.term = response['term']
self.subgrade = response['subGrade']
def __repr__(self):
"""
Get the string representation of a loan
:returns: string
"""
template = "Loan(id={}, amount={:.2f}, funded={:.2f}%, term={}," \
" grade={})".format(
self.id, self.amount, self.percent_funded * 100,
self.term, self.subgrade)
return template
@property
def approved(self):
"""
Check if the loan has been approved by LendingClub
:returns: boolean
"""
return self._response['reviewStatus'] == 'APPROVED'
@property
def grade(self):
"""
Get the grade of the loan
:returns: string
"""
return self._response['grade']
@property
def percent_funded(self):
"""
Find percentage of amount funded
:returns: float
"""
return self.funded_amount / self.amount
class Listing(object):
"""
Loan listing, which can be used for filtering, and order submission later
"""
def __init__(self):
"""
Constructor
"""
self.loans = list()
def __copy__(self):
"""
Shallow copy of the listing
:returns: instance of lendingclub2.loan.Listing
"""
new_listing = Listing()
new_listing.loans = list(self.loans)
return new_listing
def __eq__(self, other):
"""
Check if two listings are equal
:param other: instance of lendingclub2.loan.Listing
:returns: boolean
"""
return self.loans == other.loans
def __iter__(self):
"""
Get an iterable version of the listing
:returns: an iterable
"""
return self.loans.__iter__()
def __len__(self):
"""
Get the length of loans in the listing
:returns: int
"""
return len(self.loans)
def copy(self):
"""
Get a shallow copy of the listing
:returns: instance of lendingclub2.loan.Listing
"""
return self.__copy__()
def filter(self, *filters):
"""
Apply all filters to the search that we had found before.
If multiple filters are specified, the loan has to meet all the
criteria to be included in the result.
:param filters: iterable of lendingclub2.filter.Filter
:returns: an instance of lendingclub2.loan.Listing
"""
if not filters:
return self.copy()
filtered = list()
for loan in self.loans:
meet_spec = True
for filter_spec in filters:
if not filter_spec.meet_requirement(loan):
meet_spec = False
break
if meet_spec:
filtered.append(loan)
new_listing = Listing()
new_listing.loans = filtered
return new_listing
def search(self, filter_id=None, show_all=None):
"""
Apply filters and search for loans matching the specifications
"""
url = DNS + ENDPOINTS['loans'].format(version=API_VERSION)
criteria = list()
if filter_id is not None:
criteria.append('filterId={}'.format(filter_id))
if show_all is not None:
if show_all:
criteria.append('showAll=true')
else:
criteria.append('showAll=false')
if criteria:
url += '?' + '&'.join(criteria)
headers = {'X-LC-LISTING-VERSION': LISTING_VERSION}
response = Response(requests.get(url, headers=headers))
if not response.successful:
fstr = "cannot search for any loans"
raise LCError(fstr, details=json.dumps(response.json, indent=2))
# Reset the stored loans whenever we search again as long as the
# latest request was successful
self.loans = list()
for loan_json in response.json['loans']:
loan = Loan(loan_json)
self.loans.append(loan)
|
Python
| 0
|
@@ -1902,24 +1902,633 @@
s = list()%0A%0A
+ def __add__(self, other):%0A %22%22%22%0A Add two listings together%0A%0A :param other: instance of lendingclub2.loan.Listing%0A :returns: instance of lendingclub2.loan.Listing%0A %22%22%22%0A new_listing = Listing()%0A new_listing.loans = list(self.loans) + list(other.loans)%0A return new_listing%0A%0A def __contains__(self, loan_id):%0A %22%22%22%0A Check if the items are in the listing%0A%0A :param loan_id: int%0A :returns: boolean%0A %22%22%22%0A for loan in self.loans:%0A if loan.id == loan_id:%0A return True%0A return False%0A%0A
def __co
@@ -2983,16 +2983,453 @@
.loans%0A%0A
+ def __getitem__(self, loan_id):%0A %22%22%22%0A Get the loan instance based on the ID%0A%0A :param loan_id: int - loan ID%0A :raises IndexError: if the loan ID is not in the listing%0A :returns: instance of lendingclub2.loan.Loan%0A %22%22%22%0A for loan in self.loans:%0A if loan.id == loan_id:%0A return loan%0A raise IndexError(%22loan with ID %7B%7D is not in listing%22.format(loan_id))%0A%0A
def
|
54115d8ecd90da614a24bb910939001b37acd246
|
Test pairwise combinations
|
transmutagen/tests/test_origen.py
|
transmutagen/tests/test_origen.py
|
import os
import numpy as np
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
assert 'table_4' in data
|
Python
| 0.000009
|
@@ -7,27 +7,103 @@
os%0A
-%0Aimport numpy as np
+from itertools import combinations%0A%0Aimport numpy as np%0A%0Afrom ..tape9utils import origen_to_name
%0A%0ADA
@@ -460,29 +460,365 @@
-assert 'table_4' in data
+tape9, time, nuc, phi = datafile.split()%5B0%5D%0A%0A assert 'table_4' in data%0A assert 'nuclide' in data%5B'table_4'%5D%0A%0A nuclides = data%5B'table_4'%5D%5B'nuclides'%5D%0A%0A keys = %5B'activation_products', 'actinides', 'fission_products'%5D%0A # Sanity check%0A for comb in combinations(keys, 2):%0A assert set.intersection(*comb) == set()
%0A
|
c809f4f286bbec3b4cb1ebbff96c23256dd176e8
|
Change PowerVM version to an int
|
nova_powervm/virt/powervm/host.py
|
nova_powervm/virt/powervm/host.py
|
# Copyright 2014 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from nova.compute import arch
from nova.compute import hv_type
from nova.compute import vm_mode
from oslo_log import log as logging
from oslo_serialization import jsonutils
LOG = logging.getLogger(__name__)
# Power VM hypervisor info
IBM_POWERVM_HYPERVISOR_VERSION = '7.1'
# The types of LPARS that are supported.
POWERVM_SUPPORTED_INSTANCES = jsonutils.dumps([(arch.PPC64,
hv_type.PHYP,
vm_mode.HVM),
(arch.PPC64LE,
hv_type.PHYP,
vm_mode.HVM)])
# cpu_info that will be returned by build_host_stats_from_entry()
HOST_STATS_CPU_INFO = jsonutils.dumps({'vendor': 'ibm', 'arch': 'ppc64'})
def build_host_resource_from_ms(ms_wrapper):
"""Build the host resource dict from an MS adapter wrapper
This method builds the host resource dictionary from the
ManagedSystem Entry wrapper
:param ms_wrapper: ManagedSystem Entry Wrapper.
"""
data = {}
# Calculate the vcpus
proc_units = ms_wrapper.proc_units_configurable
proc_units_avail = ms_wrapper.proc_units_avail
pu_used = float(proc_units) - float(proc_units_avail)
data['vcpus'] = int(math.ceil(float(proc_units)))
data['vcpus_used'] = int(math.ceil(pu_used))
data['memory_mb'] = ms_wrapper.memory_configurable
data['memory_mb_used'] = (ms_wrapper.memory_configurable -
ms_wrapper.memory_free)
data["hypervisor_type"] = hv_type.PHYP
data["hypervisor_version"] = IBM_POWERVM_HYPERVISOR_VERSION
data["hypervisor_hostname"] = ms_wrapper.mtms.mtms_str
data["cpu_info"] = HOST_STATS_CPU_INFO
data["numa_topology"] = None
data["supported_instances"] = POWERVM_SUPPORTED_INSTANCES
stats = {'proc_units': '%.2f' % float(proc_units),
'proc_units_used': '%.2f' % pu_used
}
data["stats"] = stats
return data
|
Python
| 0.000001
|
@@ -908,10 +908,10 @@
= '7
-.
1
+0
'%0A%0A#
|
aa262ba141290ba04beb2ec4866b1bad1ea85db2
|
Fix applying nan mask to specified mask.
|
turbustat/cube_tools/sim_cubes.py
|
turbustat/cube_tools/sim_cubes.py
|
'''
Wrapper on spectral_cube for simulated datasets
'''
import numpy as np
import spectral_cube as SpectralCube
try:
from signal_id import Noise
except ImportError:
prefix = "/srv/astro/erickoch/" # Adjust if you're not me!
execfile(prefix + "Dropbox/code_development/signal-id/noise.py")
class SimCube(object):
def __init__(self, cube, beam=None, method="MAD", compute=True):
# Initialize cube object
self.cube = SpectralCube.read(cube)
# Initialize noise object
self.noise = Noise(self.cube, beam=beam, method=method)
def add_noise(self):
# Create the noisy cube
self.noise.get_noise_cube()
self._noise_cube = self.noise.noise_cube +\
self.cube.filled_data[:]
# Update SpectralCube object
self._update(data=self.noise_cube)
return self
def apply_mask(self, mask=None):
# Create the mask, auto masking nan values
mask = np.isfinite(self.cube)
if mask is not None:
mask *= mask
# Apply mask to spectral cube object
self.cube = self.cube.with_mask(mask)
return self
def _update(self, data=None, wcs=None, beam=None, method="MAD"):
'''
Helper function to update classes.
'''
# Check if we need a new SpectralCube
if data is None & wcs is None:
pass
else:
if data is None:
data = self.cube.unmasked_data[:]
if wcs is None:
wcs = self.cube.wcs
# Make new SpectralCube object
self.cube = SpectralCube(data=data, wcs=wcs)
if beam is not None:
self.noise = Noise(self.cube, beam=beam, method=method)
def compute_properties(self):
self._moment0 = self.cube.moment0().value
self._moment1 = self.cube.moment1().value
self._moment2 = self.cube.moment2().value
self.get_int_intensity()
return self
@property
def noise_cube(self):
return self._noise_cube
@property
def moment0(self):
return self._moment0
@property
def moment1(self):
return self._moment1
@property
def moment2(self):
return self._moment2
@property
def intint(self):
return self._intint
def get_int_intensity(self):
'''
Get an integrated intensity image of the cube.
'''
good_channels = self.noise.spectral_norm > self.noise.scale
channel_range = self.cube.spectral_axis[good_channels][[0, -1]]
channel_size = np.abs(self.cube.spectral_axis[1] -
self.cube.spectral_axis[0])
slab = self.cube.spectral_slab(*channel_range).filled_data[:]
self._intint = np.nansum(slab, axis=0) * channel_size
return self
|
Python
| 0
|
@@ -356,32 +356,43 @@
cube, beam=None,
+ mask=None,
method=%22MAD%22, c
@@ -575,32 +575,58 @@
method=method)%0A%0A
+ self.mask = mask%0A%0A
def add_nois
@@ -929,24 +929,105 @@
ask=None):%0A%0A
+ # Update mask%0A if mask is not None:%0A self.mask = mask%0A%0A
# Cr
@@ -1069,24 +1069,32 @@
ues%0A
+default_
mask = np.is
@@ -1114,32 +1114,37 @@
ube)%0A if
+self.
mask is not None
@@ -1161,16 +1161,21 @@
+self.
mask *=
mask
@@ -1170,16 +1170,75 @@
mask *=
+default_mask%0A else:%0A self.mask = default_
mask%0A%0A
|
89560fd773d833a049824bfa8a7ccf4ce301bed4
|
remove utils.push_dir
|
build/fbcode_builder/utils.py
|
build/fbcode_builder/utils.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
'Miscellaneous utility functions.'
import itertools
import logging
import os
import shutil
import subprocess
import sys
from contextlib import contextmanager
def recursively_flatten_list(l):
return itertools.chain.from_iterable(
(recursively_flatten_list(i) if type(i) is list else (i,))
for i in l
)
def run_command(*cmd, **kwargs):
'The stdout of most fbcode_builder utilities is meant to be parsed.'
logging.debug('Running: {0} with {1}'.format(cmd, kwargs))
kwargs['stdout'] = sys.stderr
subprocess.check_call(cmd, **kwargs)
@contextmanager
def make_temp_dir(d):
os.mkdir(d)
try:
yield d
finally:
shutil.rmtree(d, ignore_errors=True)
@contextmanager
def push_dir(d):
old_dir = os.getcwd()
os.chdir(d)
try:
yield d
finally:
os.chdir(old_dir)
def read_fbcode_builder_config(filename):
# Allow one spec to read another
# When doing so, treat paths as relative to the config's project directory.
project_dir = os.path.dirname(filename)
def inner_read_config(path):
full_path = os.path.join(project_dir, path)
return read_fbcode_builder_config(full_path)
scope = {'read_fbcode_builder_config': inner_read_config}
with open(filename) as config_file:
# Note that this will need to be changed to an exec() function call for
# python 3 compatibility. Unfortunately python 2.7 does not seem to
# treat the scope correctly when using exec() function syntax here.
exec config_file.read() in scope
return scope['config']
def steps_for_spec(builder, spec, processed_modules=None):
'''
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
'''
if processed_modules is None:
processed_modules = set()
steps = []
for module in spec.get('depends_on', []):
if module not in processed_modules:
processed_modules.add(module)
steps.extend(steps_for_spec(
builder,
module.fbcode_builder_spec(builder),
processed_modules
))
steps.extend(spec.get('steps', []))
return steps
def build_fbcode_builder_config(config):
return lambda builder: builder.build(
steps_for_spec(builder, config['fbcode_builder_spec'](builder))
)
|
Python
| 0
|
@@ -888,149 +888,8 @@
)%0A%0A%0A
-@contextmanager%0Adef push_dir(d):%0A old_dir = os.getcwd()%0A os.chdir(d)%0A try:%0A yield d%0A finally:%0A os.chdir(old_dir)%0A%0A%0A
def
|
287757680b96957ba3e7f9db179896f85790ea69
|
use cleditor instead of cleditor.min.
|
addons/web/__openerp__.py
|
addons/web/__openerp__.py
|
{
"name" : "web",
"category": "Hidden",
"description":
"""
OpenERP Web core module.
This module provides the core of the OpenERP web client.
""",
"depends" : [],
'auto_install': True,
'post_load' : 'wsgi_postload',
'js' : [
"static/lib/datejs/globalization/en-US.js",
"static/lib/datejs/core.js",
"static/lib/datejs/parser.js",
"static/lib/datejs/sugarpak.js",
"static/lib/datejs/extras.js",
"static/lib/jquery/jquery-1.7.2.js",
"static/lib/jquery.MD5/jquery.md5.js",
"static/lib/jquery.form/jquery.form.js",
"static/lib/jquery.validate/jquery.validate.js",
"static/lib/jquery.ba-bbq/jquery.ba-bbq.js",
"static/lib/jquery.blockUI/jquery.blockUI.js",
"static/lib/jquery.ui/js/jquery-ui-1.8.17.custom.min.js",
"static/lib/jquery.ui.timepicker/js/jquery-ui-timepicker-addon.js",
"static/lib/jquery.ui.notify/js/jquery.notify.js",
"static/lib/jquery.deferred-queue/jquery.deferred-queue.js",
"static/lib/jquery.scrollTo/jquery.scrollTo-min.js",
"static/lib/jquery.tipsy/jquery.tipsy.js",
"static/lib/jquery.textext/jquery.textext.js",
"static/lib/jquery.timeago/jquery.timeago.js",
"static/lib/qweb/qweb2.js",
"static/lib/underscore/underscore.js",
"static/lib/underscore/underscore.string.js",
"static/lib/backbone/backbone.js",
"static/lib/cleditor/jquery.cleditor.min.js",
"static/lib/py.js/lib/py.js",
"static/src/js/boot.js",
"static/src/js/corelib.js",
"static/src/js/coresetup.js",
"static/src/js/dates.js",
"static/src/js/formats.js",
"static/src/js/chrome.js",
"static/src/js/views.js",
"static/src/js/data.js",
"static/src/js/data_export.js",
"static/src/js/data_import.js",
"static/src/js/search.js",
"static/src/js/view_form.js",
"static/src/js/view_list.js",
"static/src/js/view_list_editable.js",
"static/src/js/view_tree.js",
"static/src/js/view_editor.js"
],
'css' : [
"static/lib/jquery.ui.bootstrap/css/custom-theme/jquery-ui-1.8.16.custom.css",
"static/lib/jquery.ui.timepicker/css/jquery-ui-timepicker-addon.css",
"static/lib/jquery.ui.notify/css/ui.notify.css",
"static/lib/jquery.tipsy/tipsy.css",
"static/lib/jquery.textext/jquery.textext.css",
"static/src/css/base.css",
"static/src/css/data_export.css",
"static/src/css/data_import.css",
"static/lib/cleditor/jquery.cleditor.css",
],
'qweb' : [
"static/src/xml/*.xml",
],
}
|
Python
| 0
|
@@ -1508,20 +1508,16 @@
leditor.
-min.
js%22,%0A
|
96afeb6bdbefe1dceee9913f02cbd0c963764769
|
fix style
|
lexos/application.py
|
lexos/application.py
|
import json
import os
import re
import time
from flask import Flask, request, render_template
from jinja2 import evalcontextfilter
from markupsafe import Markup, escape
import lexos.helpers.constants
from lexos.helpers.exceptions import LexosException
from lexos.interfaces.base_interface import base_view
from lexos.interfaces.bubble_viz_interface import viz_view
from lexos.interfaces.clustering_interface import cluster_view
from lexos.interfaces.cut_interface import cutter_view
from lexos.interfaces.manage_interface import manage_view
from lexos.interfaces.multi_cloud_interface import multi_cloud_view
from lexos.interfaces.rolling_window_interface import rwa_view
from lexos.interfaces.scrub_interface import scrubber_view
from lexos.interfaces.similarity_query_interface import sim_view
from lexos.interfaces.statistics_interface import stats_view
from lexos.interfaces.tokenizer_interface import tokenizer_view
from lexos.interfaces.top_words_interface import top_words_view
from lexos.interfaces.upload_interface import upload_view
from lexos.interfaces.word_cloud_interface import word_cloud_view
def get_secret_key(file_name: str = 'secret_key') -> bytes:
"""Creates an encryption key for a secure session.
:param: file_name: A string representing the secret key.
:return: the bytes of the secret key
"""
file_full_name = os.path.join(app.static_folder, file_name)
if os.path.isfile(file_full_name):
return open(file_full_name, 'rb').read()
else:
print('secret key not found, creating secret key')
# create secrete key
open(file_full_name, 'wb').write(os.urandom(24))
return open(file_full_name, 'rb').read()
app = Flask(__name__)
app.config.from_pyfile('config.cfg')
app.config['MAX_CONTENT_LENGTH'] = lexos.helpers.constants.MAX_FILE_SIZE
app.config['SECRET_KEY'] = get_secret_key()
# open debugger when we are not on the server
app.debug = not lexos.helpers.constants.IS_SERVER
app.jinja_env.filters['type'] = type
app.jinja_env.filters['str'] = str
app.jinja_env.filters['tuple'] = tuple
app.jinja_env.filters['len'] = len
app.jinja_env.filters['unicode'] = str
app.jinja_env.filters['time'] = time.time()
# register all the blue prints
# they helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
app.register_blueprint(base_view)
app.register_blueprint(upload_view)
app.register_blueprint(manage_view)
app.register_blueprint(viz_view)
app.register_blueprint(cluster_view)
app.register_blueprint(cutter_view)
app.register_blueprint(multi_cloud_view)
app.register_blueprint(rwa_view)
app.register_blueprint(scrubber_view)
app.register_blueprint(sim_view)
app.register_blueprint(stats_view)
app.register_blueprint(tokenizer_view)
app.register_blueprint(top_words_view)
app.register_blueprint(word_cloud_view)
# http://flask.pocoo.org/snippets/28/
# http://stackoverflow.com/questions/12523725/
# why-is-this-jinja-nl2br-filter-escaping-brs-but-not-ps
@app.template_filter() # Register template filter
@evalcontextfilter # Add attribute to the evaluation time context filter
def nl2br(eval_ctx, value):
"""
Wraps a string value in HTML <p> tags and replaces internal new line
esacapes with <br/>. Since the result is a markup tag, the Markup()
function temporarily disables Jinja2's autoescaping in the evaluation time
context when it is returned to the template.
"""
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
result = '\n\n'.join('<p>%s</p>' % p.replace('\n', Markup('<br/>\n'))
for p in _paragraph_re.split(escape(value)))
if eval_ctx.autoescape:
result = Markup(result)
return result
# ==== add error handlers ====
@app.errorhandler(404)
def page_not_found(_):
"""Custom 404 Page"""
app.logger.error('Page not found: %s', request.path)
return render_template('404.html'), 404
@app.error_handlers(Exception)
def unhandled_exception(error):
"""handles internal server errors
Send all the LexosException to the frontend
for all the other Exceptions,
we will just render the internal server error (500) page
"""
# if we want to send this backend error to the front end
if isinstance(error, LexosException):
ret_data = {"lexosException": str(error)}
return json.dumps(ret_data)
# if flask raises this error
else:
render_template("500.html")
if __name__ == '__main__':
app.run()
|
Python
| 0.000001
|
@@ -4125,22 +4125,23 @@
frontend
+.
%0A
-f
+F
or all t
@@ -4221,16 +4221,17 @@
00) page
+.
%0A %22%22%22
@@ -4501,16 +4501,17 @@
html%22)%0A%0A
+%0A
if __nam
|
78ff5c0968e4867b550b4cb6dab70885e7119d11
|
Use revert instead of reset, bloom-patch remove
|
bloom/commands/patch/remove_cmd.py
|
bloom/commands/patch/remove_cmd.py
|
from __future__ import print_function
import sys
from argparse import ArgumentParser
from bloom.util import add_global_arguments
from bloom.util import execute_command
from bloom.util import handle_global_arguments
from bloom.logging import log_prefix
from bloom.logging import error
from bloom.logging import debug
from bloom.git import branch_exists
from bloom.git import checkout
from bloom.git import get_current_branch
from bloom.git import track_branches
from bloom.commands.patch.common import get_patch_config
@log_prefix('[git-bloom-patch remove]: ')
def remove_patches(directory=None):
# Get the current branch
current_branch = get_current_branch(directory)
# Ensure the current branch is valid
if current_branch is None:
error("Could not determine current branch, are you in a git repo?")
return 1
# Construct the patches branch
patches_branch = 'patches/' + current_branch
try:
# See if the patches branch exists
if branch_exists(patches_branch, False, directory=directory):
if not branch_exists(patches_branch, True, directory=directory):
track_branches(patches_branch, directory)
else:
error("No patches branch (" + patches_branch + ") found, cannot "
"remove patches.")
return 1
# Get the parent branch from the patches branch
config = get_patch_config(patches_branch, directory=directory)
parent, spec = config['parent'], config['base']
if None in [parent, spec]:
error("Could not retrieve patches info.")
return 1
debug("Removing patches from " + current_branch + " back to base "
"commit " + spec)
# Reset this branch using git reset --hard spec
execute_command('git reset --hard ' + spec, cwd=directory)
finally:
if current_branch:
checkout(current_branch, directory=directory)
return 0
def get_parser():
"""Returns a parser.ArgumentParser with all arguments defined"""
parser = ArgumentParser(description="""
Removes any applied patches from the working branch, including any un-exported
patches, so use with caution.
""")
return parser
def main():
# Assumptions: in a git repo, this command verb was passed, argv has enough
sysargs = sys.argv[2:]
parser = get_parser()
parser = add_global_arguments(parser)
args = parser.parse_args(sysargs)
handle_global_arguments(args)
return remove_patches()
|
Python
| 0
|
@@ -47,43 +47,23 @@
sys%0A
-from argparse
import
-ArgumentP
+argp
arse
-r
%0A%0Afr
@@ -75,163 +75,53 @@
oom.
-util import add_global_arguments%0Afrom bloom.util import execute_command%0Afrom bloom.util import handle_global_arguments%0Afrom bloom.logging import log_prefix
+commands.patch.common import get_patch_config
%0Afro
@@ -132,60 +132,54 @@
oom.
-logging import error%0Afrom bloom.logging import debug
+commands.patch.common import set_patch_config%0A
%0Afro
@@ -242,16 +242,54 @@
heckout%0A
+from bloom.git import get_commit_hash%0A
from blo
@@ -374,53 +374,228 @@
oom.
-commands.patch.common import get_patch_config
+logging import log_prefix%0Afrom bloom.logging import error%0Afrom bloom.logging import debug%0A%0Afrom bloom.util import add_global_arguments%0Afrom bloom.util import execute_command%0Afrom bloom.util import handle_global_arguments
%0A%0A%0A@
@@ -1847,26 +1847,30 @@
g git re
-se
+ver
t --
-hard
+no-edit
spec%0A
@@ -1902,18 +1902,22 @@
t re
-se
+ver
t --
-hard
+no-edit
' +
@@ -1934,24 +1934,188 @@
=directory)%0A
+ # Update the base%0A config%5B'base'%5D = get_commit_hash(current_branch, directory)%0A set_patch_config(patches_branch, config, directory=directory)%0A
finally:
@@ -2302,16 +2302,16 @@
ined%22%22%22%0A
-
pars
@@ -2315,16 +2315,25 @@
arser =
+argparse.
Argument
|
5ad4191fae2b7e84e24d9646f21e3af7057d32e9
|
Replace deprecated `DataFrame.from_csv` with `pandas.read_csv`
|
scripts/extract_subjects.py
|
scripts/extract_subjects.py
|
from __future__ import print_function
import argparse
import yaml
from mimic3benchmark.mimic3csv import *
from mimic3benchmark.preprocessing import add_hcup_ccs_2015_groups, make_phenotype_label_matrix
import mimic3benchmark.util as util
parser = argparse.ArgumentParser(description='Extract per-subject data from MIMIC-III CSV files.')
parser.add_argument('mimic3_path', type=str, help='Directory containing MIMIC-III CSV files.')
parser.add_argument('output_path', type=str, help='Directory where per-subject data should be written.')
parser.add_argument('--event_tables', '-e', type=str, nargs='+', help='Tables from which to read events.',
default=['CHARTEVENTS', 'LABEVENTS', 'OUTPUTEVENTS'])
parser.add_argument('--phenotype_definitions', '-p', type=str, default='resources/hcup_ccs_2015_definitions.yaml',
help='YAML file with phenotype definitions.')
parser.add_argument('--itemids_file', '-i', type=str, help='CSV containing list of ITEMIDs to keep.')
parser.add_argument('--verbose', '-v', type=int, help='Level of verbosity in output.', default=1)
parser.add_argument('--test', action='store_true', help='TEST MODE: process only 1000 subjects, 1000000 events.')
args, _ = parser.parse_known_args()
try:
os.makedirs(args.output_path)
except:
pass
patients = read_patients_table(args.mimic3_path)
admits = read_admissions_table(args.mimic3_path)
stays = read_icustays_table(args.mimic3_path)
if args.verbose:
print('START:', stays.ICUSTAY_ID.unique().shape[0], stays.HADM_ID.unique().shape[0],
stays.SUBJECT_ID.unique().shape[0])
print(stays.ICUSTAY_ID.dtype)
stays = remove_icustays_with_transfers(stays)
if args.verbose:
print('REMOVE ICU TRANSFERS:', stays.ICUSTAY_ID.unique().shape[0], stays.HADM_ID.unique().shape[0],
stays.SUBJECT_ID.unique().shape[0])
stays = merge_on_subject_admission(stays, admits)
stays = merge_on_subject(stays, patients)
stays = filter_admissions_on_nb_icustays(stays)
if args.verbose:
print('REMOVE MULTIPLE STAYS PER ADMIT:', stays.ICUSTAY_ID.unique().shape[0], stays.HADM_ID.unique().shape[0],
stays.SUBJECT_ID.unique().shape[0])
stays = add_age_to_icustays(stays)
stays = add_inunit_mortality_to_icustays(stays)
stays = add_inhospital_mortality_to_icustays(stays)
stays = filter_icustays_on_age(stays)
if args.verbose:
print('REMOVE PATIENTS AGE < 18:', stays.ICUSTAY_ID.unique().shape[0], stays.HADM_ID.unique().shape[0],
stays.SUBJECT_ID.unique().shape[0])
stays.to_csv(os.path.join(args.output_path, 'all_stays.csv'), index=False)
diagnoses = read_icd_diagnoses_table(args.mimic3_path)
diagnoses = filter_diagnoses_on_stays(diagnoses, stays)
diagnoses.to_csv(os.path.join(args.output_path, 'all_diagnoses.csv'), index=False)
count_icd_codes(diagnoses, output_path=os.path.join(args.output_path, 'diagnosis_counts.csv'))
phenotypes = add_hcup_ccs_2015_groups(diagnoses, yaml.load(open(args.phenotype_definitions, 'r')))
make_phenotype_label_matrix(phenotypes, stays).to_csv(os.path.join(args.output_path, 'phenotype_labels.csv'),
index=False, quoting=csv.QUOTE_NONNUMERIC)
if args.test:
pat_idx = np.random.choice(patients.shape[0], size=1000)
patients = patients.iloc[pat_idx]
stays = stays.merge(patients[['SUBJECT_ID']], left_on='SUBJECT_ID', right_on='SUBJECT_ID')
print('Using only', stays.shape[0], 'stays')
subjects = stays.SUBJECT_ID.unique()
break_up_stays_by_subject(stays, args.output_path, subjects=subjects, verbose=args.verbose)
break_up_diagnoses_by_subject(phenotypes, args.output_path, subjects=subjects, verbose=args.verbose)
items_to_keep = set(
[int(itemid) for itemid in util.from_csv(args.itemids_file)['ITEMID'].unique()]) if args.itemids_file else None
for table in args.event_tables:
read_events_table_and_break_up_by_subject(args.mimic3_path, table, args.output_path, items_to_keep=items_to_keep,
subjects_to_keep=subjects, verbose=args.verbose)
|
Python
| 0.000071
|
@@ -197,22 +197,20 @@
_matrix%0A
-import
+from
mimic3b
@@ -227,15 +227,16 @@
til
-as util
+import *
%0A%0Apa
@@ -1603,39 +1603,8 @@
%5D)%0A%0A
-print(stays.ICUSTAY_ID.dtype)%0A%0A
stay
@@ -3357,16 +3357,63 @@
CT_ID')%0A
+ args.event_tables = %5Bargs.event_tables%5B0%5D%5D%0A
prin
@@ -3450,16 +3450,56 @@
, 'stays
+ and only', args.event_tables%5B0%5D, 'table
')%0A%0Asubj
@@ -3780,13 +3780,18 @@
in
-util.
+dataframe_
from
|
4e418e6168425173c3e6ed44299864d52da286ee
|
fix var reference in gutenberg_filter
|
scripts/gutenberg_filter.py
|
scripts/gutenberg_filter.py
|
import os
import re
class GutenbergIndexFilter(object):
# Extensions excluded from rsync of both ftp and cached/generated content
EXCLUDED_EXT = ['.zip', '.wav', '.mp3', '.ogg', '.iso', '.ISO', '.rar', '.mpeg', '.m4b']
# Additional extensions excluded from cached/generated files
CACHE_EXCLUDED_EXT = ['.log', '.mobi', '.pdb', '.rdf', '.qioo.jar']
def __init__(self):
self.removed_texts = []
self.notitle_count = 0
def filter(self, record):
"""Return true if keep record, false if should discard record"""
if self.is_description_record(record):
has_title = 'title' in record and len(record['title']) > 0
if not has_title:
self.removed_texts.append(record['textId'])
print "[omit %s notitle]" % record['textId']
self.notitle_count += 1
return has_title
else:
# NOTE: Changes to the record persist and are externally visible!
# remove prepended '#' from text reference
record['textId'] = record['textId'][1:]
# adjust the file path (should add warning if path does not match pattern)
FILE_PREFIX = '^http://www.gutenberg.org/dirs/'
record['file'] = re.sub(FILE_PREFIX, 'gutenberg/', record['file'])
CACHE_FILE_PREFIX = '^http://www.gutenberg.org/cache/epub/'
record['file'] = re.sub(CACHE_FILE_PREFIX, 'cache/generated/', record['file'])
# seems ugly - would multiple filters be better? or maybe a filter stage followed by a transform stage?
if record['file'].startswith('http'):
print "[file prefix unexpected %s]" % record['file']
# omit files based on three criteria:
# (a) book description was omitted due to filter criteria above
# (b) rsync script excluded the content (extensions and 'pgdvd')
# (c) rsync script excluded the cached content (extensions and 'pgdvd')
ext = self.get_extension(record['file'])
return (record['textId'] not in self.removed_texts and
u'pgdvd' not in record['file'] and
ext not in self.EXCLUDED_EXT and
(not record['file'].startswith(u'cache/') or ext not in CACHE_EXCLUDED_EXT))
def is_description_record(self, record):
return record['record_type'] == 'DESCRIPTION'
def get_extension(self, filename):
name, ext = os.path.splitext(filename)
return ext
|
Python
| 0
|
@@ -2307,16 +2307,21 @@
not in
+self.
CACHE_EX
|
8a5e49876eae4f2d9bc8ced2fa2e2be0d24ddd68
|
rollback to 1.7.0 release
|
scripts/imgtool/__init__.py
|
scripts/imgtool/__init__.py
|
# Copyright 2017-2020 Linaro Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.8.0a1"
|
Python
| 0
|
@@ -642,11 +642,9 @@
%221.
-8
+7
.0
-a1
%22%0A
|
e3f53b37a276680bd12806ed14d09065d35d583e
|
Debug logs
|
dataactcore/scripts/agency_move_s3_files.py
|
dataactcore/scripts/agency_move_s3_files.py
|
import boto3
import logging
import argparse
from dataactcore.config import CONFIG_BROKER
from dataactcore.logging import configure_logging
from dataactvalidator.health_check import create_app
logger = logging.getLogger(__name__)
def move_published_agency_files(old_code, new_code):
""" Given the provided old and new agency codes, move the published files from the old agency directory to the new
Args:
old_code: The old agency code to copy from
new_code: The new agency code to move to
"""
if not old_code.endswith('/'):
old_code += '/'
if not new_code.endswith('/'):
new_code += '/'
# Note: the submissions bucket (aws_bucket) is not being used here as that path is based on submission ids
# DABS directory structure
# [certified bucket]/[agency code]/[fy]/[time period]/[publish history id]/[files]
s3 = boto3.resource('s3', region_name=CONFIG_BROKER['aws_region'])
certified_bucket = s3.Bucket(CONFIG_BROKER['certified_bucket'])
files_in_bucket = list(certified_bucket.objects.all())
logger.info('Moving published DABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket if old_file_path.key.startswith(old_code)]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published DABS submission files from {} to {}'.format(old_code, new_code))
# FABS directory structure
# [certified bucket]/FABS/[agency code]/[fy]/[time period]/[files]
logger.info('Moving published FABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket
if old_file_path.key.startswith('FABS/{}'.format(old_code))]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published FABS submission files from {} to {}'.format(old_code, new_code))
def main():
""" Move all submission files in S3 for an agency that has changed its code """
parser = argparse.ArgumentParser(description='Initialize the DATA Act Broker.')
parser.add_argument('-o', '--old_code', help='The old agency code to copy from', required=True)
parser.add_argument('-n', '--new_code', help='The new agency code to move to', required=True)
args = parser.parse_args()
logger.info('Moving published submission files')
move_published_agency_files(args.old_code, args.new_code)
logger.info('Finished moving published submission files')
if __name__ == '__main__':
configure_logging()
with create_app().app_context():
main()
|
Python
| 0.000001
|
@@ -1399,32 +1399,243 @@
e, new_code, 1)%0A
+ logger.info('CERIFIED_BUCKET: %7B%7D'.format(CONFIG_BROKER%5B'certified_bucket'%5D))%0A logger.info('OLD FILE PATH: %7B%7D'.format(old_file_path))%0A logger.info('NEW FILE PATH: %7B%7D'.format(new_file_path))%0A
s3.Objec
@@ -2362,32 +2362,243 @@
e, new_code, 1)%0A
+ logger.info('CERIFIED_BUCKET: %7B%7D'.format(CONFIG_BROKER%5B'certified_bucket'%5D))%0A logger.info('OLD FILE PATH: %7B%7D'.format(old_file_path))%0A logger.info('NEW FILE PATH: %7B%7D'.format(new_file_path))%0A
s3.Objec
|
3308cbeb8c7af7a1c759fc4150a462fb35d12fae
|
Move writing of the possibly highlihgted texts into PlainStatusText baseclass
|
src/robot/output/monitor.py
|
src/robot/output/monitor.py
|
# Copyright 2008-2010 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from robot import utils
from robot.errors import FrameworkError
from loggerhelper import IsLogged
class CommandLineMonitor:
def __init__(self, width=78, colors=True):
self._width = width
self._colors = colors
self._running_suites = 0
self._is_logged = IsLogged('WARN')
def start_suite(self, suite):
if not self._running_suites:
self._write_separator('=')
self._write_info(suite.longname, suite.doc, start_suite=True)
self._write_separator('=')
self._running_suites += 1
def end_suite(self, suite):
self._write_info(suite.longname, suite.doc)
self._write_status(suite.status)
self._write_message(suite.get_full_message())
self._write_separator('=')
self._running_suites -= 1
def start_test(self, test):
self._write_info(test.name, test.doc)
def end_test(self, test):
self._write_status(test.status)
self._write_message(test.message)
self._write_separator('-')
def output_file(self, name, path):
# called by LOGGER
if not self._running_suites: # ignore split output files
self._write('%s %s' % ((name+':').ljust(8), path))
def message(self, msg):
# called by LOGGER
if self._is_logged(msg.level):
message = '[ %s ] %s' % (self._status_text(msg.level), msg.message)
self._write(message, stream=sys.__stderr__)
def _status_text(self, text):
return StatusText(text, self._colors)
def _write(self, message, newline=True, stream=sys.__stdout__):
if newline:
message += '\n'
stream.write(utils.encode_output(message).replace('\t', ' '*8))
stream.flush()
def _write_info(self, name, doc, start_suite=False):
maxwidth = self._width
if not start_suite:
maxwidth -= len(' | PASS |')
info = self._get_info(name, doc, maxwidth)
self._write(info, newline=start_suite)
def _get_info(self, name, doc, maxwidth):
if utils.get_console_length(name) > maxwidth:
return utils.pad_console_length(name, maxwidth, cut_left=True)
if doc == '':
return utils.pad_console_length(name, maxwidth)
info = '%s :: %s' % (name, doc.splitlines()[0])
return utils.pad_console_length(info, maxwidth)
def _write_status(self, status):
self._write(' | %s |' % self._status_text(status))
def _write_message(self, message):
if message:
self._write(message.strip())
def _write_separator(self, sep_char):
self._write(sep_char * self._width)
def StatusText(msg, colors=True):
if colors:
return HiglightedStatusText(msg)
return PlainStatusText(msg)
class PlainStatusText:
def __init__(self, msg):
self._msg = msg
def __str__(self):
return self._msg
class HiglightedStatusText(PlainStatusText):
ANSI_RED = '\033[31m'
ANSI_GREEN = '\033[32m'
ANSI_YELLOW = '\033[33m'
ANSI_RESET = '\033[0m'
def __str__(self):
color = self._get_highlight_color(self._msg)
reset = color != '' and self.ANSI_RESET or ''
return color + self._msg + reset
def _get_highlight_color(self, text):
if text in ['FAIL','ERROR']:
return self.ANSI_RED
elif text == 'WARN':
return self.ANSI_YELLOW
elif text == 'PASS':
return self.ANSI_GREEN
raise FrameworkError
|
Python
| 0
|
@@ -1954,33 +1954,8 @@
-message = '%5B %25s %5D %25s' %25 (
self
@@ -1982,77 +1982,30 @@
vel)
-, msg.message)%0A self._write(message, stream=sys.__stderr__
+.write_msg(msg.message
)%0A%0A
@@ -2970,32 +2970,8 @@
- self._write(' %7C %25s %7C' %25
sel
@@ -2992,16 +2992,30 @@
(status)
+.write_status(
)%0A%0A d
@@ -3449,16 +3449,424 @@
f._msg%0A%0A
+ def write_status(self, stream=sys.__stdout__):%0A self.write(' %7C %25s %7C' %25 self, stream)%0A%0A def write_msg(self, msg):%0A self.write('%5B %25s %5D %25s' %25 (self, msg), stream=sys.__stderr__)%0A%0A def write(self, message, newline=True, stream=sys.__stdout__):%0A if newline:%0A message += '%5Cn'%0A stream.write(utils.encode_output(message).replace('%5Ct', ' '*8))%0A stream.flush()%0A%0A
%0Aclass H
@@ -4432,16 +4432,16 @@
I_GREEN%0A
-
@@ -4448,20 +4448,21 @@
raise FrameworkError
+%0A
|
ac3697fbb5202437d8285cacaba89dbaba30de69
|
fix refactoring error
|
util.py
|
util.py
|
import logging
A_THRU_H = 'ABCDEFGH'
# pre-compute an array mapping to algebraic notation
NUMERICAL_TO_ALGEBRAIC = ["{}{}".format(l, n) for n in range(8, 0, -1) for l in A_THRU_H]
# pre-compute a dict mapping to the index
ALGEBRAIC_TO_NUMERICAL = {a:n for n, a in enumerate(NUMERICAL_TO_ALGEBRAIC)}
TOP_LEFT_SQUARE = 0
BOTTOM_RIGHT_SQUARE = 63
def to_algebraic(numeric_index):
try:
return NUMERICAL_TO_ALGEBRAIC[numeric_index]
except IndexError:
return index
def to_numeric(algebraic_notation):
try:
return ALGEBRAIC_TO_NUMERICAL[algebraic_notation.upper()]
except IndexError:
return algebraic_notation
def get_move_facts(origin, move):
square_if_moved = origin + move
current_col = origin % 8
col_if_moved = (origin + move) % 8
col_dist_if_moved = abs(current_col - col_if_moved)
row_dist = get_row_distance(origin, move)
return (square_if_moved,
current_col,
col_if_moved,
col_dist_if_moved,
row_dist)
def get_row_distance(src, move):
src_row = src // 8
row_if_moved = (src + move) // 8
return abs(src_row - row_if_moved)
def is_on_board(square):
return TOP_LEFT_SQUARE <= square <= BOTTOM_RIGHT_SQUARE
def is_valid_move(src_square, move):
return is_on_board(src_square + move)
|
Python
| 0.000005
|
@@ -475,16 +475,24 @@
return
+numeric_
index%0A%0Ad
|
ae6bb29262421bcdb9f28bed8fce99517fa4ecc1
|
Update tests.
|
st2common/tests/unit/test_content_utils.py
|
st2common/tests/unit/test_content_utils.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from oslo.config import cfg
from st2common.content.utils import get_packs_base_paths
from st2tests import config as tests_config
class ContentUtilsTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
tests_config.parse_args()
def test_get_pack_base_paths(self):
cfg.CONF.content.system_packs_base_path = ''
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1'])
# Multiple paths, no trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple paths, trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple same paths
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Assert system path is always first
cfg.CONF.content.system_packs_base_path = '/opt/system'
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/system', '/opt/path1'])
|
Python
| 0
|
@@ -2092,32 +2092,43 @@
e_paths = '/opt/
+path2:/opt/
path1'%0A r
@@ -2206,16 +2206,30 @@
system',
+ '/opt/path2',
'/opt/p
|
4d19774cc47e1ec1bacc9eeb18b146fac2476493
|
Fix display of exectuted command
|
src/stratuslab/BaseSystem.py
|
src/stratuslab/BaseSystem.py
|
import os
import shutil
import subprocess
class BaseSystem(object):
def updatePackageManager(self):
pass
def installPackages(self, packages):
pass
def installNodePackages(self, packages):
pass
def installFrontendDependencies(self):
self.updatePackageManager()
self.installPackages(self.frontendDeps)
def installNodeDependencies(self):
self.installNodePackages(self.nodeDeps)
def installHypervisor(self):
self.installNodePackages(self.hypervisorDeps.get(self.hypervisor))
def cloneGitRepository(self, buildDir, repoUrl, cloneName, branch):
self.ONeRepo = repoUrl
self.ONeSrcDir = buildDir
self.createDirs(self.ONeSrcDir)
os.chdir(self.ONeSrcDir)
self.execute(['git', 'clone', repoUrl, cloneName, '-b', branch])
os.chdir(cloneName)
def buildOpenNebula(self):
self.execute(['scons', '-j2'])
def installOpenNebula(self):
self.setONeAdminOwner(os.getcwd())
self.execute(['bash', 'install.sh', '-d', self.ONeHome, '-u',
self.ONeAdmin, '-g', self.ONeAdminGroup])
def startONeDaemon(self):
self.ONeAdminExecute(['one start'])
def createONeGroup(self, groupname, gid):
self.ONeAdminGroup = groupname
self.ONeAdminGID = gid
self.execute(['groupadd', '-g', gid, groupname])
def createONeAdmin(self, username, uid, homeDir, password):
self.ONeAdmin = username
self.ONeHome = homeDir
self.ONeAdminUID = uid
self.ONeAdminPassword = password
self.createDirs(os.path.dirname(self.ONeHome))
self.execute(['useradd', '-d', self.ONeHome, '-g', self.ONeAdminGroup, '-u', uid,
username, '-s', '/bin/bash', '-p', password, '--create-home'])
def configureONeAdminEnv(self, ONeDPort):
self.append2file('%s/.bashrc' % self.ONeHome,
'export ONE_LOCATION=%s\n' % self.ONeHome)
self.append2file('%s/.bashrc' % self.ONeHome,
'export ONE_XMLRPC=http://localhost:%s/RPC2\n' % ONeDPort)
self.append2file('%s/.bashrc' % self.ONeHome,
'export PATH=%s/bin:%s\n' % (self.ONeHome, os.getenv('PATH')))
self.append2file('%s/.bash_login' % self.ONeHome,
'[ -f ~/.bashrc ] && source ~/.bashrc\n')
self.setONeAdminOwner('%s/.bash_login' % self.ONeHome)
# Hack to always load .bashrc
self.execute(['sed -i \'s/\[ -z \\\"\$PS1\\\" \\] \\&\\& '
'return/#&/\' %s/.bashrc' % self.ONeHome], shell=True)
def configureONeAdminAuth(self):
self.createDirs('%s/.one' % self.ONeHome)
self.setONeAdminOwner('%s/.one' % self.ONeHome)
self.append2file('%s/.one/one_auth' % self.ONeHome, '%s:%s'
% (self.ONeAdmin, self.ONeAdminPassword))
self.setONeAdminOwner('%s/.one/one_auth' % self.ONeHome)
def setupONeAdminSSHCred(self):
keyName = '%s/.ssh/id_rsa' % self.ONeHome
self.createDirs(os.path.dirname(keyName))
self.setONeAdminOwner(os.path.dirname(keyName))
self.execute(['ssh-keygen -f %s -N "" -q' % keyName],
shell=True)
self.setONeAdminOwner(keyName)
self.setONeAdminOwner('%s.pub' % keyName)
shutil.copy('%s.pub' % keyName,
'%s/.ssh/authorized_keys' % self.ONeHome)
self.setONeAdminOwner('%s/.ssh/authorized_keys' % self.ONeHome)
self.append2file('%s/.ssh/config' % self.ONeHome,
'Host *\n\tStrictHostKeyChecking no')
def configureNFSServer(self, networkAddr, networkMask):
self.append2file('/etc/exports',
'%s %s/%s(rw,async,no_subtree_check)\n' %
(self.ONeHome, networkAddr, networkMask))
self.execute(['exportfs', '-a'])
def configureSSHServer(self):
pass
def configureNFSClient(self, frontendIP):
self.nodeShell('mkdir -p %s' % self.ONeHome)
self.nodeShell('echo "%s:%s %s nfs '
'soft,intr,rsize=32768,wsize=32768,rw 0 0"'
' >> /etc/fstab' %
(frontendIP, self.ONeHome, self.ONeHome))
self.nodeShell('mount -a')
def configureSSHClient(self):
# TODO: setup ssh authorized keys
pass
def configureHypervisor(self):
if self.hypervisor == 'xen':
self.configureXEN()
elif self.hypervisor == 'kvm':
self.configureKVM()
def configureKVM(self):
pass
def configureXEN(self):
self.nodeShell('echo "%s ALL=(ALL) NOPASSWD: /usr/sbin/xm, '
'/usr/sbin/xentop" >> /etc/sudoers' % self.ONeAdmin)
def append2file(self, filename, content):
fd = open(filename, 'a+')
fd.write(content)
fd.close()
def execute(self, command, shell=False):
self.displayMessage(command)
process = subprocess.Popen(command, shell=shell)
process.wait()
return process.returncode
def ONeAdminExecute(self, command, shell=False):
su = ['su', '-l', self.ONeAdmin, '-c']
su.extend(command)
return self.execute(su, shell)
def nodeShell(self, command):
self.remoteCmd(self.nodeAddr, command,
port=self.nodePort,
privateKey=self.nodePrivateKey)
def remoteCmd(self, hostAddr, command, user='root', port=22,
privateKey=None):
sshCmd = ['ssh', '-p', str(port), '-l', user]
if privateKey is not None and os.path.isfile(privateKey):
# TODO: with verbose display a message if key not exists
sshCmd.extend(['-i', privateKey])
sshCmd.append(hostAddr)
sshCmd.append(command)
return self.execute(sshCmd)
def setONeAdminOwner(self, path):
self.displayMessage('chown %d:%d %s [python cmd]' % (
int(self.ONeAdminUID), int(self.ONeAdminGID), path))
os.chown(path, int(self.ONeAdminUID), int(self.ONeAdminGID))
def createDirs(self, path):
self.displayMessage('mkdirs -p %s [python cmd]' % path)
if not os.path.isdir(path) and not os.path.isfile(path):
os.makedirs(path)
def setNodeAddr(self, nodeAddr):
self.nodeAddr = nodeAddr
def setNodePort(self, nodePort):
self.nodePort = nodePort
def setNodePrivateKey(self, privateKey):
self.nodePrivateKey = privateKey
def setNodeHypervisor(self, hypervisor):
self.hypervisor = hypervisor
def displayMessage(self, *msg):
print '\n\n\n%s\nExecuting: %s\n%s\n' % (
'-' * 60, ' '.join(msg), '-' * 60)
|
Python
| 0.000002
|
@@ -4850,24 +4850,33 @@
Message(
+' '.join(
command)
%0A %09pr
@@ -4863,24 +4863,25 @@
oin(command)
+)
%0A %09proces
|
65524f41729d1ddcda9ecb66947b85119c80cd18
|
format util.py
|
util.py
|
util.py
|
#!/usr/bin/env python
import couchdb, sys
from oaipmh.client import Client
from oaipmh.common import Identify, Metadata, Header
from oaipmh.metadata import MetadataRegistry, oai_dc_reader , MetadataReader
def get_database(url,name):
try:
couch = couchdb.Server(url)
db = couch[name]
return db;
except:
return None
def create_database(url,name):
db = get_database(url,name)
if db == None:
couch = couchdb.Server(url)
db = couch.create(name)
return db
def get_documents(main_url, database_name, url, reader, prefix, format):
registry = MetadataRegistry()
registry.registerReader(prefix, reader)
client = Client(url, registry)
return_stuff = []
for record in client.listRecords(metadataPrefix=prefix):
r = record[1]
value = format(r)
if value != None:
return_stuff.append(value)
if len(return_stuff) >= 10000:
sync_files(main_url, database_name, return_stuff)
return_stuff = []
sync_files(main_url, database_name, return_stuff)
def save_file(db, id, data):
try:
doc = db[id]
except:
doc = None
if doc == None:
db[id] = data
else:
doc['identifier'] = data['identifier']
doc['title']= data['title']
db[id] = doc
def sync_files(main_url, database_name, files_to_replicate):
db = get_database(main_url,database_name)
if db == None:
db = create_database(main_url,database_name)
db.update(files_to_replicate)
def index_documents(oai_url,main_url,database_name, reader, prefix, format):
get_documents(main_url, database_name, oai_url, reader,prefix, format)
#sync_files(main_url, database_name, files_to_replicate)
|
Python
| 0.000009
|
@@ -518,16 +518,17 @@
turn db%0A
+%0A
def get_
@@ -1104,24 +1104,25 @@
%0A
+%0A
def save_fil
@@ -1375,16 +1375,17 @@
%5D = doc%0A
+%0A
def sync
@@ -1593,16 +1593,17 @@
te) %0A
+%0A
def inde
@@ -1670,17 +1670,16 @@
format):
-%0A
%0A
@@ -1752,67 +1752,6 @@
mat)
-%0A #sync_files(main_url, database_name, files_to_replicate)
%0A%0A
|
58412bf4ac5adb78c82060c259803c745c52f861
|
Bump version
|
stock_request_picking_type/__manifest__.py
|
stock_request_picking_type/__manifest__.py
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
'name': 'Stock Request Picking Type',
'summary': 'Add Stock Requests to the Inventory App',
'version': '12.0.1.0.0',
'license': 'LGPL-3',
'website': 'https://github.com/stock-logistics-warehouse',
'author': 'Open Source Integrators, '
'Odoo Community Association (OCA)',
'category': 'Warehouse Management',
'depends': [
'stock_request',
],
'data': [
'data/stock_picking_type.xml',
'views/stock_request_views.xml',
'views/stock_picking_views.xml',
],
'development_status': 'Beta',
'maintainers': ['max3903']
}
|
Python
| 0
|
@@ -230,17 +230,17 @@
'12.0.1.
-0
+1
.0',%0A
|
d7527625bbe4ef8804ee907789df87908b4a02c1
|
Add logging to production configuration
|
ember_django/backend/settings.py
|
ember_django/backend/settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Django settings for ember_django project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
'''
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'some_key'
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = ['*', ]
# Application definition
# rest_framework_ember adapter is added for ember-django communication
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'backend',
'rest_framework',
'rest_framework_ember',
'rest_framework.authtoken',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'backend.urls'
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'escience',
'USER': 'developer',
'PASSWORD': 'escience',
'HOST': 'localhost',
'PORT': '5432',
}
}
# rest_framework settings for the rest_framework_ember
# https://github.com/ngenworks/rest_framework_ember
REST_FRAMEWORK = {
'PAGINATE_BY': 10,
'PAGINATE_BY_PARAM': 'page_size',
'MAX_PAGINATE_BY': 100,
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'rest_framework_ember.pagination.EmberPaginationSerializer',
'DEFAULT_PARSER_CLASSES': (
'rest_framework_ember.parsers.EmberJSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework_ember.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_AUTHENTICATION_CLASSES': [],
}
# By default Django expects a trailing slash on urls and will 301 redirect
# any requests lacking a trailing slash.
# This is why we set APPEND_SLASH = False.
APPEND_SLASH = False
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
if DEBUG:
STATIC_PATH = os.path.join(BASE_DIR, 'frontend/app')
STATIC_URL = '/frontend/app/'
STATICFILES_DIRS = (
STATIC_PATH,
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'frontend/app'),
)
else:
PROJECT_DEFAULT_STATIC_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../frontend/app')
STATIC_PATH = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
PROJECT_DEFAULT_STATIC_DIR,
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# EXTRA FOR NGINX
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
Python
| 0.000001
|
@@ -801,17 +801,16 @@
= %5B'*',
-
%5D%0A%0A%0A# Ap
@@ -3824,28 +3824,515 @@
th.join(BASE_DIR, 'static')%0A
+ LOGGING = %7B%0A 'version': 1,%0A 'disable_existing_loggers': False,%0A 'handlers': %7B%0A 'file': %7B%0A 'level': 'DEBUG',%0A 'class': 'logging.FileHandler',%0A 'filename': '/home/developer/logs/debug.log',%0A %7D,%0A %7D,%0A 'loggers': %7B%0A 'django.request': %7B%0A 'handlers': %5B'file'%5D,%0A 'level': 'DEBUG',%0A 'propagate': True,%0A %7D,%0A %7D,%0A %7D%0A
|
bc6512080bd67413a3136e171be2cc1479254caf
|
Change startup experiment.
|
enactiveagents/EnactiveAgents.py
|
enactiveagents/EnactiveAgents.py
|
"""
Entry module of the application.
"""
import sys
import pygame
from appstate import AppState
import settings
import events
from view import view
from view import agentevents
from controller import controller
import experiment.basic
import webserver
class HeartBeat(events.EventListener):
"""
Class implementing the heartbeat of the application.
"""
def run(self):
"""
Process PyGame events until halt is true.
"""
self.halt = False
print("Starting heartbeat.")
time_elapsed = 0
while True:
AppState.get_state().get_event_manager().post_event(events.ControlEvent())
ticked = False
if AppState.get_state().is_running() and time_elapsed >= settings.SIMULATION_STEP_TIME:
print "------- t = %s" % AppState.get_state().get_t()
AppState.get_state().get_event_manager().post_event(events.TickEvent())
time_elapsed = 0
ticked = True
AppState.get_state().get_event_manager().post_event(events.DrawEvent(ticked and AppState.get_state().get_save_simulation_renders()))
time_elapsed += AppState.get_state().get_clock().tick(settings.MAX_FPS)
if ticked:
AppState.get_state().increment_t()
def notify(self, event):
if isinstance(event, events.QuitEvent):
sys.exit()
def init():
"""
Initialize pygame.
:returns: The surface of the pygame display.
"""
print("Loading pygame modules.")
pygame.display.init()
AppState.get_state().set_clock(pygame.time.Clock())
flags = pygame.DOUBLEBUF
surface = pygame.display.set_mode(
(
AppState.get_state().get_world().get_width() * settings.CELL_WIDTH,
AppState.get_state().get_world().get_height() * settings.CELL_HEIGHT,
),
flags)
surface.set_alpha(None)
pygame.display.set_caption('Enactive Agents v2')
return surface
def main():
"""
Main function of the application.
"""
# Initialize the event manager.
event_manager = events.EventManager()
AppState.get_state().set_event_manager(event_manager)
# Initialize and register the application heartbeat.
heart_beat = HeartBeat()
event_manager.register_listener(heart_beat)
# Initialize and register the world.
#experiment_ = experiment.experiment.Experiment.load_experiment("20161126T003019.p")
experiment_ = experiment.basic.BasicCoexsistenceExperiment()
AppState.get_state().set_experiment(experiment_)
world = experiment_.get_world()
event_manager.register_listener(world)
AppState.get_state().set_world(world)
# Initialize pygame.
surface = init()
# Initialize and register the view.
main_view = view.View(surface)
event_manager.register_listener(main_view)
# Initialize the website trace history view.
trace_view = agentevents.AgentEvents()
event_manager.register_listener(trace_view)
# Initialize and register the controller.
main_controller = controller.Controller()
event_manager.register_listener(main_controller)
# Add the experiment controller to the controller
main_controller.set_experiment_controller(lambda e, coords: experiment_.controller(e, main_view.window_coords_to_world_coords(coords)))
# Start the webserver.
webserver.trace_view = trace_view
webserver.start()
# Start the heartbeat.
heart_beat.run()
if __name__ == '__main__':
"""
Application entry-point.
"""
main()
|
Python
| 0
|
@@ -2511,20 +2511,14 @@
asic
-Coexsistence
+Vision
Expe
|
9d2f25c2a262a992c79ea5a224c5abc616dd4cb8
|
remove space.
|
lib/acli/__init__.py
|
lib/acli/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
usage: acli [--version] [--help]
<command> [<args>...]
options:
-h, --help help
The most common commands are:
account Get account info
ec2 Manage ec2 instances
elb Manage elb instances
ami Manage amis
asg Manage auto-scaling groups
lc Manage launch configurations
eip Manage elastic ips
secgroup Manage security groups
See 'acli help <command>'
"""
from __future__ import (absolute_import, print_function)
from docopt import docopt
from colorama import init
from acli.services import (ec2, elb, account, cloudwatch)
from acli.config import Config
from acli.output.ec2 import (output_ec2_list, output_ec2_info,
output_amis, output_ami_info)
from acli.output.cloudwatch import output_ec2_stats
from acli.output.elb import (output_elbs, output_elb_info)
from acli import utils
init(autoreset=True)
def real_main():
args = docopt(__doc__,
version='0.0.1',
options_first=True)
aws_config = Config(args)
argv = [args['<command>']] + args['<args>']
if args['<command>'] == 'account':
from acli.commands import account as command_account
# acc_res = docopt(command_account.__doc__, argv=argv)
iam_conn = account.get_iam_conn(aws_config)
print("alias: {0} | id: {1}".format(", ".join(account.get_account_aliases(iam_conn)),
account.get_account_id(iam_conn)))
exit()
if args['<command>'] == 'ec2':
from acli.commands import ec2 as command_ec2
ec2_res = docopt(command_ec2.__doc__, argv=argv)
if ec2_res.get('list'):
ec2.ec2_list(aws_config)
elif ec2_res.get('info'):
ec2.ec2_info(aws_config, instance_id=ec2_res.get('<instance_id>'))
elif ec2_res.get('stats'):
cloudwatch.ec2_stats(aws_config=aws_config, instance_id=ec2_res.get('<instance_id>'))
exit()
if args['<command>'] == 'elb':
from acli.commands import elb as command_elb
elb_res = docopt(command_elb.__doc__, argv=argv)
if elb_res.get('list'):
elb.elb_list(aws_config)
elif elb_res.get('info'):
elb.elb_info(aws_config, elb_name=elb_res.get('<elb_name>'))
exit()
if args['<command>'] == 'ami':
from acli.commands import ami as command_ami
ami_res = docopt(command_ami.__doc__, argv=argv)
if ami_res.get('list'):
ec2.list_amis(aws_config)
elif ami_res.get('info'):
ec2.ami_info(aws_config, ami_id=ami_res.get('<ami_id>'))
exit()
elif args['<command>'] in ['help', None] and args['<args>']:
if args['<args>'][0] == 'ec2':
from acli.commands import ec2 as command_ec2
print(docopt(command_ec2.__doc__, argv=argv))
elif args['<command>'] in ['help', None] and not args['<args>']:
print("usage: acli help <command>")
else:
exit("%r is not an acli command. See 'acli help." % args['<command>'])
|
Python
| 0.000413
|
@@ -113,17 +113,16 @@
s%3E...%5D%0A%0A
-%0A
options:
|
f887c7c5fc0be7e86ebddb28b6d785878ae88121
|
Add projects to locals in projects_archive
|
projects/views.py
|
projects/views.py
|
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render, get_object_or_404, redirect
from .models import Project
from .forms import ProjectForm, RestrictedProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/add.html', locals())
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if request.user == project.user and (project.status == 'unrevised'
or project.status == 'returned'):
data = request.POST if request.POST else None
form = ProjectForm(data=data, user=request.user, instance=project)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/edit.html', locals())
else:
return redirect('members:user-projects')
@permission_required('projects.change_project', login_url="members:user-projects")
def edit_status(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
data = request.POST if request.POST else None
form = RestrictedProjectForm(data=data, instance=project)
if form.is_valid():
project.save()
return redirect('members:user-projects')
return render(request, 'projects/edit_status.html', locals())
def projects_archive(request):
unrevised = Project.objects.filter(status='unrevised')
returned = Project.objects.filter(status='returned')
pending = Project.objects.filter(status='pending')
approved = Project.objects.filter(status='approved')
rejected = Project.objects.filter(status='rejected')
return render(request, 'projects/archive.html', locals())
def show_project(request, project_id):
project = get_object_or_404(Project, id=project_id)
return render(request, 'projects/show_project.html', {'project_show' : project})
|
Python
| 0
|
@@ -1891,24 +1891,61 @@
'rejected')%0A
+ projects = Project.objects.all()%0A
return r
|
608f667f8d3a9faa8fc41777b2006c325afff61c
|
Fix var names.
|
vote.py
|
vote.py
|
import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
|
Python
| 0.000002
|
@@ -190,18 +190,19 @@
id%5D%5B'msg
-id
+str
'%5D.keys(
@@ -247,18 +247,19 @@
one, msg
-id
+str
=None)%0A
@@ -310,18 +310,19 @@
ion%5B'msg
-id
+str
'%5D = e.t
@@ -343,18 +343,19 @@
id%5D%5B'msg
-id
+str
'%5D%5Bk%5D%0A
@@ -398,18 +398,19 @@
nfo%5B'msg
-id
+str
_options
|
a8681015902101192caeaff6c755069d406f3d0e
|
Support NonNode << Node, limit scope in conf_load.
|
conf.py
|
conf.py
|
"""
Pyconf DSL for generating JSON or Protobuf configuration.
"""
class Node(object):
def __init__(self, value):
self._value = value
def execute(self):
def _unwrap(item):
if isinstance(item, Node):
return item.execute()
else:
return item
if isinstance(self._value, dict):
meta = {}
data = {}
for k, v in self._value.iteritems():
if k.startswith('__') and k.endswith('__'):
meta[k] = v
else:
data[k] = _unwrap(v)
if '__post__' in meta:
return meta['__post__'](meta, data)
else:
return data
elif isinstance(self._value, (list, tuple)):
return map(_unwrap, self._value)
else:
return self._value
def __call__(self, **kwargs):
output = {}
if self._value is not None:
if not isinstance(self._value, dict):
raise TypeError('Cannot extend non-dict node %s' % (type(self._value),))
output.update(self._value)
for k, v in kwargs.iteritems():
output[k] = v
return Node(output)
def __getattr__(self, attr):
if attr.startswith('_'):
raise AttributeError('Private or meta attr %s' % (attr,))
if self._value is None:
return self
if not isinstance(self._value, dict):
raise TypeError('Cannot get attr of non-dict node %s' % (type(self._value),))
output = self._value.get(attr)
if not isinstance(output, Node):
output = Node(output)
return output
def __getitem__(self, fn):
if callable(fn):
return self(__post__ = fn)
order = fn
if isinstance(order, (str, unicode)):
order = order.strip().split()
return self(__post__ = lambda meta, value: [value.get(key) for key in order])
def __add__(self, ls):
if isinstance(ls, Node):
ls = ls._value
if not isinstance(ls, (list, tuple)):
raise TypeError('Cannot append node with non-array data %s' % (type(ls),))
orig = self._value
if orig is None:
orig = ()
if not isinstance(orig, (list, tuple)):
raise TypeError('Cannot append non-array node %s' % (type(self._value),))
return Node(tuple(orig) + tuple(ls))
def __lshift__(self, right):
if isinstance(right, Node):
right = right._value
if not isinstance(right, dict):
raise TypeError('Cannot extend node with non-dict data %s' % (type(right),))
return self(**right)
conf = Node(None)
def array(*args):
return Node(args)
def run(path, builtins=None):
import imp
import runpy
conf_builtins = {
'conf': conf,
'array': array,
}
if builtins is not None:
conf_builtins.update(builtins)
def conf_load(path):
result_dict = runpy.run_path(path, init_globals=conf_builtins)
mod = imp.new_module(path)
for k, v in result_dict.iteritems():
if k.startswith('_'):
continue
setattr(mod, k, v)
return mod
conf_builtins['load'] = conf_load
result = runpy.run_path(path, init_globals=conf_builtins)
output = {}
for k, v in result.iteritems():
if isinstance(v, Node):
output[k] = v.execute()
return output
if __name__ == '__main__':
import json
import sys
ret = run(sys.argv[1])['CONFIG']
print json.dumps(ret)
|
Python
| 0
|
@@ -2387,16 +2387,255 @@
right)%0A%0A
+ def __rlshift__(self, left):%0A right = self._value%0A if right is None:%0A right = %7B%7D%0A if not isinstance(right, dict):%0A raise TypeError('Cannot extend node with non-dict data %25s' %25 (type(right),))%0A return left(**right)%0A%0A
conf = N
@@ -2742,16 +2742,33 @@
t runpy%0A
+ import os.path%0A
conf_b
@@ -2904,15 +2904,182 @@
oad(
+dirty_
path):%0A
+ path = os.path.normpath(dirty_path)%0A if path.startswith('.') or path.startswith('/'):%0A raise ValueError('Invalid conf_load path %25s' %25 (dirty_path,))%0A
@@ -3607,16 +3607,16 @@
ONFIG'%5D%0A
+
print
@@ -3631,9 +3631,8 @@
ps(ret)%0A
-%0A
|
755583d69aa6b1145495ef857fb0f139fa6e7e01
|
Remove useless print
|
src/vf2symbols/vf2symbols.py
|
src/vf2symbols/vf2symbols.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a set of Apple custom symbols from a variable font.
Instantiates along wght to populate weight if a wght axis exists.
Assumes a ligature exists for each symbol and infers the name from it.
Requires a very simple GSUB ligature layout for the time being. There is no
good reason for this beyond making proof of concept tool setup easier.
"""
import os
import re
import regex
import subprocess
import sys
from absl import app
from absl import flags
from absl import logging
from fontTools.varLib import instancer
from fontTools import ttLib
from ninja import ninja_syntax
from vf2symbols import icon_font
FLAGS = flags.FLAGS
# internal flags, typically client wouldn't change
flags.DEFINE_string("build_dir", "build/", "Where build runs.")
flags.DEFINE_bool("gen_ninja", True, "Whether to regenerate build.ninja")
flags.DEFINE_bool("exec_ninja", True, "Whether to run ninja.")
flags.DEFINE_string(
"icon_filter", ".*", "Discard icon names that don't contain this regex."
)
flags.DEFINE_string("font", None, "Font filepath to extract the icons from.")
flags.DEFINE_list("svgs", [], "SVG filepaths, for a single variant(Regular-M) symbol generation.")
# TODO(rsheeter) support opsz to populate S/M/L
_SYMBOL_NAME_FONT_WEIGHTS = (
("Ultralight-M", 100),
("Thin-M", 200),
("Light-M", 300),
("Regular-M", 400),
("Medium-M", 500),
("Semibold-M", 600),
("Bold-M", 700),
("Heavy-M", 800),
("Black-M", 900),
)
def _build_dir() -> str:
return os.path.abspath(FLAGS.build_dir)
def _rel_build(path: str) -> str:
return os.path.relpath(path, _build_dir())
def _resolve_rel_build(path):
return os.path.abspath(os.path.join(_build_dir(), path))
def _symbol_wght_names(wght_range):
return tuple(n for n, v in _SYMBOL_NAME_FONT_WEIGHTS if v in wght_range)
def _create_font_for_symbol_wght_name(
ttfont: ttLib.TTFont, symbol_wght_name: str
) -> ttLib.TTFont:
wght_pos = next(v for n, v in _SYMBOL_NAME_FONT_WEIGHTS if n == symbol_wght_name)
if "fvar" not in ttfont:
assert ttfont["OS/2"].usWeightClass == wght_pos
return ttfont
# None: drop axis, leaving font at default position
axis_positions = {a.axisTag: None for a in ttfont["fvar"].axes}
axis_positions["wght"] = wght_pos
logging.debug("Generating instances at %s", axis_positions)
return instancer.instantiateVariableFont(ttfont, axis_positions)
def _write_instance_rule(nw, ttfont, symbol_wght_name):
axis_positions = {a.axisTag: "drop" for a in ttfont["fvar"].axes}
wght_pos = next(v for n, v in _SYMBOL_NAME_FONT_WEIGHTS if n == symbol_wght_name)
axis_positions["wght"] = str(wght_pos)
pos_str = " ".join(f"{k}={v}" for k, v in axis_positions.items())
nw.rule(
f"Gen_{symbol_wght_name}", f"fonttools varLib.instancer -o $out $in {pos_str}"
)
def _write_preamble(nw, font_filename, ttfont, wght_range):
def module_rule(mod_name, arg_pattern):
nw.rule(mod_name, f"{sys.executable} -m vf2symbols.{mod_name} {arg_pattern}")
nw.comment("Generated by vf2symbols")
nw.newline()
nw.variable("src_font", _rel_build(font_filename))
nw.newline()
for symbol_name in _symbol_wght_names(wght_range):
_write_instance_rule(nw, ttfont, symbol_name)
nw.newline()
module_rule("write_symbol_from_fonts", "--out $out $in")
module_rule("write_symbol_from_svg", "--out $out $in")
def _font_file(font_filename, symbol_wght_name):
name, ext = os.path.splitext(os.path.basename(font_filename))
return f"{name}.{symbol_wght_name}{ext}"
def _write_font_builds(nw, font_filename, wght_range):
font_files = []
for symbol_wght_name in _symbol_wght_names(wght_range):
font_files.append(_font_file(font_filename, symbol_wght_name))
nw.build(font_files[-1], f"Gen_{symbol_wght_name}", "$src_font")
return font_files
def _write_vf_symbol_builds(nw, ttfont, font_files):
for icon_name in icon_font.extract_icon_names(
ttfont, regex.compile(FLAGS.icon_filter)
):
nw.build(
os.path.join("symbols", icon_name + ".svg"), "write_symbol_from_fonts", font_files
)
def _write_svg_symbol_builds(nw, svgs):
for svg in svgs:
output = re.sub(r"([.]\w+)$","_symbol\\1",svg)
print(output)
nw.build(output, "write_symbol_from_svg", svg)
def _run(argv):
if len(argv) > 1:
sys.exit("Unexpected non-flag arguments")
font_filename = os.path.abspath(FLAGS.font)
root_font = ttLib.TTFont(font_filename)
wght_range = icon_font.wght_range(root_font)
os.makedirs(_build_dir(), exist_ok=True)
os.makedirs(_resolve_rel_build("symbols"), exist_ok=True)
build_file = _resolve_rel_build("build.ninja")
if FLAGS.gen_ninja:
logging.info(f"Generating %s", os.path.relpath(build_file))
with open(build_file, "w") as f:
nw = ninja_syntax.Writer(f)
_write_preamble(nw, font_filename, root_font, wght_range)
font_files = _write_font_builds(nw, font_filename, wght_range)
_write_vf_symbol_builds(nw, root_font, font_files)
_write_svg_symbol_builds(nw, FLAGS.svgs)
ninja_cmd = ["ninja", "-C", os.path.dirname(build_file)]
if FLAGS.exec_ninja:
print(" ".join(ninja_cmd))
subprocess.run(ninja_cmd, check=True)
else:
print("To run:", " ".join(ninja_cmd))
def main():
# We don't seem to be __main__ when run as cli tool installed by setuptools
app.run(_run)
if __name__ == "__main__":
app.run(_run)
|
Python
| 0.000045
|
@@ -4882,30 +4882,8 @@
vg)%0A
- print(output)%0A
|
3171a05dd7a4c4478d1dfe5023390e5bf7213ac8
|
Improve mergeuser command (#424)
|
standup/status/management/commands/mergeuser.py
|
standup/status/management/commands/mergeuser.py
|
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from standup.status.models import StandupUser
User = get_user_model()
class Command(BaseCommand):
help = 'Merge two user accounts'
def add_arguments(self, parser):
parser.add_argument('--keep', type=int)
parser.add_argument('--delete', type=int)
parser.add_argument('-y', '--yes', dest='assume_yes', action='store_true')
def handle(self, *args, **options):
try:
user_keep = User.objects.get(id=options['keep'])
except User.DoesNotExist:
self.stdout.write('User keep=%s does not exist. Exiting.' % options['keep'])
return 1
try:
user_delete = User.objects.get(id=options['delete'])
except User.DoesNotExist:
self.stdout.write('User delete=%s does not exist. Exiting.' % options['delete'])
return 1
def get_teams(user):
try:
return [team.name for team in user.profile.teams.all()]
except StandupUser.DoesNotExist:
return 0
def get_status_count(user):
try:
return StandupUser.objects.get(user=user).statuses.count()
except StandupUser.DoesNotExist:
return 0
self.stdout.write('Deleting and merging from:')
self.stdout.write(' id: %s' % user_delete.id)
self.stdout.write(' username: %s' % user_delete.username)
self.stdout.write(' email: %s' % user_delete.email)
self.stdout.write(' date_joined: %s' % user_delete.date_joined)
self.stdout.write(' statuses: %s' % get_status_count(user_delete))
self.stdout.write(' teams: %s' % get_teams(user_delete))
self.stdout.write('')
self.stdout.write('Keeping and merging into:')
self.stdout.write(' id: %s' % user_keep.id)
self.stdout.write(' username: %s' % user_keep.username)
self.stdout.write(' email: %s' % user_keep.email)
self.stdout.write(' date_joined: %s' % user_keep.date_joined)
self.stdout.write(' statuses: %s' % get_status_count(user_keep))
self.stdout.write(' teams: %s' % get_teams(user_keep))
self.stdout.write('')
if not options['assume_yes']:
self.stdout.write('Continue?: y/N')
cont = input()
if cont.strip().lower() != 'y':
self.stdout.write('Exiting.')
return 1
# Transfer statuses and profile information
if not hasattr(user_keep, 'profile'):
if hasattr(user_delete, 'profile'):
# Keep has no profile, but delete does, so we transfer it over
self.stdout.write('Keep has no profile--transferring profile')
profile = user_delete.profile
profile.user = user_keep
profile.save()
else:
if hasattr(user_delete, 'profile'):
# They both have profiles, so transfer all the statuses from delete -> keep
self.stdout.write('Transfering statuses from delete -> keep')
for status in user_delete.profile.statuses.all():
# NOTE(willkg): 'status.user' is a StandupUser instance
status.user = user_keep.profile
status.save()
# Copy email address from delete -> keep
self.stdout.write('Copying email from delete -> keep')
user_keep.email = user_delete.email
user_keep.save()
# Copy teams from delete -> keep
self.stdout.write('Copying teams from delete -> keep')
if hasattr(user_delete, 'profile'):
for team in user_delete.profile.teams.all():
user_keep.profile.teams.add(team)
user_keep.save()
# Delete
self.stdout.write('Deleting delete')
user_delete.delete()
self.stdout.write('Done!')
|
Python
| 0.000001
|
@@ -298,16 +298,29 @@
rgument(
+%0A
'--keep'
@@ -321,32 +321,119 @@
-keep', type=int
+,%0A help='the id of the user record to keep and merge all data into'%0A
)%0A parser
@@ -446,16 +446,29 @@
rgument(
+%0A
'--delet
@@ -483,91 +483,541 @@
=int
-)%0A parser.add_argument('-y', '--yes', dest='assume_yes', action='store_true'
+,%0A help='the id of the user record to delete'%0A )%0A parser.add_argument(%0A '--keep-email', dest='keep_email', action='store_true',%0A help=(%0A 'whether to keep the email address of the --keep record; otherwise the '%0A 'email address is stomped on by the email address in the --delete record'%0A )%0A )%0A parser.add_argument(%0A '-y', '--yes', dest='assume_yes', action='store_true',%0A help='whether to skip prompts'%0A
)%0A%0A
@@ -4077,32 +4077,74 @@
delete -%3E keep%0A
+ if not options%5B'keep_email'%5D:%0A
self.std
@@ -4190,32 +4190,36 @@
keep')%0A
+
user_keep.email
@@ -4238,16 +4238,111 @@
e.email%0A
+ else:%0A self.stdout.write('Keeping email address')%0A%0A # Save user data%0A
|
dbf02b991fb2011992085019933ed9885af34733
|
support requesting the html by revision
|
wiki.py
|
wiki.py
|
#!/usr/bin/env python3
import hmac
import os.path as path
from base64 import b64encode
from hashlib import sha256
from os import urandom
import pygit2 as git
import scrypt
import sqlalchemy as sql
from bottle import get, post, response, request, run, static_file
from docutils.core import publish_file
RST_MIME = "text/x-rst; charset=UTF-8"
engine = sql.create_engine("sqlite:///wiki.sqlite3", echo=True)
metadata = sql.MetaData()
users = sql.Table("users", metadata,
sql.Column("username", sql.String, primary_key = True),
sql.Column("email", sql.String, nullable = False),
sql.Column("email_verified", sql.Boolean, nullable = False,
default = False),
sql.Column("password_hash", sql.Binary, nullable = False))
metadata.create_all(engine)
connection = engine.connect()
repo = git.init_repository("repo", False)
if 'refs/heads/master' not in repo.listall_references():
author = git.Signature('wiki', 'danielmicay@gmail.com')
tree = repo.TreeBuilder().write()
repo.create_commit('refs/heads/master', author, author,
'initialize repository', tree, [])
def generate_html_page(name):
publish_file(source_path=path.join("repo", name + ".rst"),
destination_path=path.join("generated", name + ".html"),
writer_name="html")
# TODO: this should be a persistent key, generated with something like openssl
KEY = b64encode(urandom(256))
def generate_mac(s):
return hmac.new(KEY, s.encode(), sha256).hexdigest()
def make_login_token(username):
return "|".join((generate_mac(username), username))
def check_login_token(token):
"Return the username if the token is valid, otherwise None."
mac, username = token.split('|', 1)
if hmac.compare_digest(mac, generate_mac(username)):
return username
@get('/page/<filename>.rst')
def page(filename):
response.content_type = RST_MIME
revision = request.query.get("revision")
if revision is None:
return static_file(filename + '.rst', root="repo",
mimetype=RST_MIME)
else:
return repo[repo[revision].tree[filename + ".rst"].oid].data
@get('/page/<filename>.html')
def html_page(filename):
return static_file(filename + '.html', root="generated")
@get('/log.json')
def log():
commits = repo.walk(repo.head.oid, git.GIT_SORT_TIME)
try:
page = request.query["page"] + ".rst"
commits = filter(lambda c: page in c.tree, commits)
except KeyError:
pass
return {"log": [{"message": c.message,
"author": c.author.name,
"revision": c.hex}
for c in commits]}
@post('/update/json/<filename>')
def update(filename):
message, page, token = request.json["message"], request.json["page"], request.json["token"]
username = check_login_token(token)
if username is None:
return {"error": "invalid login token"}
email, = connection.execute(sql.select([users.c.email],
users.c.username == username)).first()
signature = git.Signature(username, email)
with open(path.join("repo", filename + '.rst'), "w") as f:
f.write(page)
generate_html_page(filename)
oid = repo.write(git.GIT_OBJ_BLOB, page)
bld = repo.TreeBuilder()
bld.insert(filename + '.rst', oid, 100644)
tree = bld.write()
repo.create_commit('refs/heads/master', signature, signature, message,
tree, [repo.head.oid])
@post('/register.json')
def register():
email, username, password = request.json["email"], request.json["username"], request.json["password"]
hashed = scrypt.encrypt(b64encode(urandom(64)), password, maxtime=0.5)
connection.execute(users.insert().values(username=username,
email=email,
password_hash=hashed))
return {"token": make_login_token(username)}
@post('/login.json')
def login():
username, password = request.json["username"], request.json["password"]
hashed, = connection.execute(sql.select([users.c.password_hash],
users.c.username == username)).first()
scrypt.decrypt(hashed, password, maxtime=0.5)
return {"token": make_login_token(username)}
run(host='localhost', port=8080)
|
Python
| 0
|
@@ -296,16 +296,32 @@
ish_file
+, publish_string
%0A%0ARST_MI
@@ -1903,16 +1903,125 @@
ername%0A%0A
+def get_page_revision(filename, revision):%0A return repo%5Brepo%5Brevision%5D.tree%5Bfilename + %22.rst%22%5D.oid%5D.data%0A%0A
@get('/p
@@ -2300,34 +2300,33 @@
return
-repo%5Brepo%5B
+get_page_
revision
%5D.tree%5Bf
@@ -2313,39 +2313,33 @@
age_revision
-%5D.tree%5B
+(
filename
+ %22.rst%22%5D.o
@@ -2326,36 +2326,27 @@
filename
- + %22.rst%22%5D.oid%5D.data
+, revision)
%0A%0A@get('
@@ -2404,60 +2404,234 @@
re
-turn static_file(filename + '.html', root=%22generated
+vision = request.query.get(%22revision%22)%0A%0A if revision is None:%0A return static_file(filename + '.html', root=%22generated%22)%0A else:%0A return publish_string(get_page_revision(filename, revision), writer_name=%22html
%22)%0A%0A
|
2f1f9830dc0b87a411022f2b41fdbe3e0b483d4d
|
Add a comment [skip CI]
|
svir/dialogs/load_hmaps_as_layer_dialog.py
|
svir/dialogs/load_hmaps_as_layer_dialog.py
|
# -*- coding: utf-8 -*-
# /***************************************************************************
# Irmt
# A QGIS plugin
# OpenQuake Integrated Risk Modelling Toolkit
# -------------------
# begin : 2013-10-24
# copyright : (C) 2014 by GEM Foundation
# email : devops@openquake.org
# ***************************************************************************/
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from qgis.core import QgsFeature, QgsGeometry, QgsPoint
from svir.dialogs.load_output_as_layer_dialog import LoadOutputAsLayerDialog
from svir.calculations.calculate_utils import add_numeric_attribute
from svir.utilities.utils import (WaitCursorManager,
LayerEditingManager,
log_msg,
extract_npz,
)
from svir.utilities.shared import DEBUG
class LoadHazardMapsAsLayerDialog(LoadOutputAsLayerDialog):
"""
Modal dialog to load hazard maps from an oq-engine output, as layer
"""
def __init__(self, iface, viewer_dock, session, hostname, calc_id,
output_type='hmaps', path=None, mode=None):
assert output_type == 'hmaps'
LoadOutputAsLayerDialog.__init__(
self, iface, viewer_dock, session, hostname, calc_id,
output_type, path, mode)
# FIXME: add layout only for output types that load from file
self.remove_file_hlayout()
self.setWindowTitle(
'Load hazard maps as layer')
self.create_load_selected_only_ckb()
self.create_num_sites_indicator()
self.create_rlz_or_stat_selector()
self.create_imt_selector()
self.create_poe_selector()
self.npz_file = extract_npz(
session, hostname, calc_id, output_type,
message_bar=iface.messageBar(), params=None)
self.populate_out_dep_widgets()
self.adjustSize()
self.set_ok_button()
def set_ok_button(self):
self.ok_button.setEnabled(
bool(self.path) and self.poe_cbx.currentIndex() != -1)
def populate_rlz_or_stat_cbx(self):
# excluding lon, lat (in old calculations, we might also find 'vs30',
# that has to be discarded too)
self.rlzs_or_stats = [
rlz_or_stat
for rlz_or_stat in self.npz_file['all'].dtype.names[2:]
if rlz_or_stat != 'vs30']
self.rlz_or_stat_cbx.clear()
self.rlz_or_stat_cbx.setEnabled(True)
self.rlz_or_stat_cbx.addItems(self.rlzs_or_stats)
def on_rlz_or_stat_changed(self):
self.dataset = self.npz_file['all'][self.rlz_or_stat_cbx.currentText()]
self.imts = {}
imts_poes = self.dataset.dtype.names
for imt_poe in imts_poes:
imt, poe = imt_poe.split('-')
if imt not in self.imts:
self.imts[imt] = [poe]
else:
self.imts[imt].append(poe)
self.imt_cbx.clear()
self.imt_cbx.setEnabled(True)
self.imt_cbx.addItems(self.imts.keys())
self.set_ok_button()
def show_num_sites(self):
# NOTE: we are assuming all realizations have the same number of sites,
# which currently is always true.
# If different realizations have a different number of sites, we
# need to move this block of code inside on_rlz_or_stat_changed()
rlz_or_stat_data = self.npz_file['all'][
self.rlz_or_stat_cbx.currentText()]
self.num_sites_lbl.setText(
self.num_sites_msg % rlz_or_stat_data.shape)
def on_imt_changed(self):
self.imt = self.imt_cbx.currentText()
self.poe_cbx.clear()
self.poe_cbx.setEnabled(True)
if self.imt:
self.poe_cbx.addItems(self.imts[self.imt])
self.set_ok_button()
def build_layer_name(self, rlz_or_stat, **kwargs):
imt = self.imt_cbx.currentText()
poe = self.poe_cbx.currentText()
self.default_field_name = '%s-%s' % (imt, poe)
investigation_time = self.get_investigation_time()
layer_name = "hazard_map_%s_%sy" % (rlz_or_stat, investigation_time)
return layer_name
def get_field_names(self, **kwargs):
if self.load_selected_only_ckb.isChecked():
field_names = [self.default_field_name]
else: # load everything
# field names will be like "imt-poe"
field_names = self.dataset.dtype.names
return field_names
def add_field_to_layer(self, field_name):
try:
# NOTE: add_numeric_attribute uses LayerEditingManager
added_field_name = add_numeric_attribute(field_name, self.layer)
except TypeError as exc:
log_msg(str(exc), level='C', message_bar=self.iface.messageBar())
return
return added_field_name
def read_npz_into_layer(self, field_names, **kwargs):
with LayerEditingManager(self.layer, 'Reading npz', DEBUG):
lons = self.npz_file['all']['lon']
lats = self.npz_file['all']['lat']
feats = []
for row_idx, row in enumerate(self.dataset):
# add a feature
feat = QgsFeature(self.layer.pendingFields())
for field_name in field_names:
# NOTE: without casting to float, it produces a
# null because it does not recognize the
# numpy type
value = float(row[field_name])
feat.setAttribute(field_name, value)
feat.setGeometry(QgsGeometry.fromPoint(
QgsPoint(lons[row_idx], lats[row_idx])))
feats.append(feat)
added_ok = self.layer.addFeatures(feats, makeSelected=False)
if not added_ok:
msg = 'There was a problem adding features to the layer.'
log_msg(msg, level='C', message_bar=self.iface.messageBar())
def load_from_npz(self):
for rlz_or_stat in self.rlzs_or_stats:
if (self.load_selected_only_ckb.isChecked()
and rlz_or_stat != self.rlz_or_stat_cbx.currentText()):
continue
with WaitCursorManager('Creating layer for "%s"...' % rlz_or_stat,
self.iface.messageBar()):
self.build_layer(rlz_or_stat)
self.style_maps()
if self.npz_file is not None:
self.npz_file.close()
|
Python
| 0
|
@@ -5161,16 +5161,84 @@
mt-poe%22%0A
+ # self.dataset contains data for the chosen rlz or stat%0A
|
5347040b86f02a0abec4da5c3060b094908bb9b5
|
Simplify argument handling logic.
|
wpcr.py
|
wpcr.py
|
#!/usr/bin/python
import numpy
import scipy.signal
tau = numpy.pi * 2
max_samples = 1000000
debug = False
# determine the clock frequency
# input: magnitude spectrum of clock signal (numpy array)
# output: FFT bin number of clock frequency
def find_clock_frequency(spectrum):
maxima = scipy.signal.argrelextrema(spectrum, numpy.greater_equal)[0]
while maxima[0] < 2:
maxima = maxima[1:]
if maxima.any():
threshold = max(spectrum[2:-1])*0.8
indices_above_threshold = numpy.argwhere(spectrum[maxima] > threshold)
return maxima[indices_above_threshold[0]]
else:
return 0
def midpoint(a):
mean_a = numpy.mean(a)
mean_a_greater = numpy.ma.masked_greater(a, mean_a)
high = numpy.ma.median(mean_a_greater)
mean_a_less_or_equal = numpy.ma.masked_array(a, ~mean_a_greater.mask)
low = numpy.ma.median(mean_a_less_or_equal)
return (high + low) / 2
# whole packet clock recovery
# input: real valued NRZ-like waveform (array, tuple, or list)
# must have at least 2 samples per symbol
# must have at least 2 symbol transitions
# output: list of symbols
def wpcr(a):
if len(a) < 4:
return []
b = a > midpoint(a)
d = numpy.diff(b)**2
if len(numpy.argwhere(d > 0)) < 2:
return []
f = scipy.fft(d, len(a))
p = find_clock_frequency(abs(f))
if p == 0:
return []
cycles_per_sample = (p*1.0)/len(f)
clock_phase = 0.5 + numpy.angle(f[p])/(tau)
if clock_phase <= 0.5:
clock_phase += 1
symbols = []
for i in range(len(a)):
if clock_phase >= 1:
clock_phase -= 1
symbols.append(a[i])
clock_phase += cycles_per_sample
if debug:
print("peak frequency index: %d / %d" % (p, len(f)))
print("samples per symbol: %f" % (1.0/cycles_per_sample))
print("clock cycles per sample: %f" % (cycles_per_sample))
print("clock phase in cycles between 1st and 2nd samples: %f" % (clock_phase))
print("clock phase in cycles at 1st sample: %f" % (clock_phase - cycles_per_sample/2))
print("symbol count: %d" % (len(symbols)))
return symbols
# convert soft symbols into bits (assuming binary symbols)
def slice_bits(symbols):
bits=[]
for element in symbols:
if element >= numpy.average(symbols):
bits.append(1)
else:
bits.append(0)
return bits
def read_from_stdin():
return numpy.frombuffer(sys.stdin.buffer.read(), dtype=numpy.float32)
# If called directly from command line, take input file (or stdin) as a stream
# of floats and print binary symbols found therein.
if __name__ == '__main__':
import sys
debug = True
if len(sys.argv) > 1:
if sys.argv[1] == '-':
samples = read_from_stdin()
else:
samples = numpy.fromfile(sys.argv[1], dtype=numpy.float32)
else:
samples = read_from_stdin()
symbols=wpcr(samples)
bits=slice_bits(symbols)
print(bits)
|
Python
| 0.000013
|
@@ -2731,28 +2731,20 @@
rgv) %3E 1
-:%0A if
+ and
sys.arg
@@ -2752,74 +2752,16 @@
%5B1%5D
-=
+!
= '-':%0A
- samples = read_from_stdin()%0A else:%0A
|
3478bf108ce6992239c638e6e662a6e53204ae46
|
Update wsgi.py for port
|
wsgi.py
|
wsgi.py
|
from app import create_app
application = create_app()
if __name__ == '__main__':
application.run()
|
Python
| 0
|
@@ -84,21 +84,91 @@
-application.run(
+port = int(os.environ.get('PORT', 5000))%0A application.run(host='0.0.0.0', port=port
)
|
6d643c1f4fca74e66513d0461fc358bb1dd21349
|
add method to parse out [xml-handlers] section in process.cfg
|
lib/config_parser.py
|
lib/config_parser.py
|
from ConfigParser import ConfigParser
defaults = {'parse': 'defaultparse',
'clean': 'True',
'consolidate': 'True',
'datadir': '/data/patentdata/patents/2013',
'dataregex': 'ipg\d{6}.xml',
'years': None,
'downloaddir' : None}
def extract_process_options(handler):
"""
Extracts the high level options from the [process] section
of the configuration file. Returns a dictionary of the options
"""
result = {}
result['parse'] = handler.get('process','parse')
result['clean'] = handler.get('process','clean') == 'True'
result['consolidate'] = handler.get('process','consolidate') == 'True'
result['outputdir'] = handler.get('process','outputdir')
return result
def extract_parse_options(handler, section):
"""
Extracts the specific parsing options from the parse section
as given by the [parse] config option in the [process] section
"""
options = {}
options['datadir'] = handler.get(section,'datadir')
options['dataregex'] = handler.get(section,'dataregex')
options['years'] = handler.get(section,'years')
options['downloaddir'] = handler.get(section,'downloaddir')
if options['years'] and options['downloaddir']:
options['datadir'] = options['downloaddir']
return options
def get_config_options(configfile):
"""
Takes in a filepath to a configuration file, returns
two dicts representing the process and parse configuration options.
See `process.cfg` for explanation of the optiosn
"""
handler = ConfigParser(defaults)
handler.read(configfile)
process_config = extract_process_options(handler)
parse_config = extract_parse_options(handler, process_config['parse'])
return process_config, parse_config
|
Python
| 0
|
@@ -1798,8 +1798,1002 @@
_config%0A
+%0Adef get_year_list(yearstring):%0A %22%22%22%0A Given a %5Byearstring%5D of forms%0A year1%0A year1-year2%0A year1,year2,year3%0A year1-year2,year3-year4%0A Expands into a list of year integers, and returns%0A %22%22%22%0A years = %5B%5D%0A for subset in yearstring.split(','):%0A if subset == 'default':%0A years.append('default')%0A continue%0A sublist = subset.split('-')%0A start = int(sublist%5B0%5D)%0A end = int(sublist%5B1%5D)+1 if len(sublist) %3E 1 else start+1%0A years.extend(range(start,end))%0A return years%0A%0A%0Adef get_xml_handlers(configfile):%0A %22%22%22%0A Called by parse.py to generate a lookup dictionary for which parser should%0A be used for a given file. Imports will be handled in %60parse.py%60%0A %22%22%22%0A handler = ConfigParser()%0A handler.read(configfile)%0A xmlhandlers = %7B%7D%0A for yearrange, handler in handler.items('xml-handlers'):%0A for year in get_year_list(yearrange):%0A xmlhandlers%5Byear%5D = handler%0A return xmlhandlers%0A
|
cf0f7f129bb54c70f60e19e2ec9d82a67f430aaf
|
replace urllib2 to requests lib
|
coti.py
|
coti.py
|
#!/usr/bin/python
import json
import urllib2
from bs4 import BeautifulSoup
from datetime import datetime
def chaco():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.cambioschaco.com.py/php/imprimir_.php').read(), "html.parser")
compra = soup.find_all('tr')[3].contents[5].string[:5].replace('.', '')
venta = soup.find_all('tr')[3].contents[7].string[:5].replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def maxi():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.maxicambios.com.py/').read(), "html.parser")
compra = soup.find_all(class_='lineas1')[0].contents[
7].string.replace('.', '')
venta = soup.find_all(class_='lineas1')[0].contents[
5].string.replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def alberdi():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.cambiosalberdi.com/').read(), "html.parser")
compra = soup.find_all(
class_="span2 pagination-right")[0].string.replace('.', '')
venta = soup.find_all(
class_="span2 pagination-right")[1].string.replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def bcp():
pass
def create_json():
mcompra, mventa = maxi()
ccompra, cventa = chaco()
acompra, aventa = alberdi()
respjson = {
'dolarpy': {
'maxicambios': {
'compra': mcompra,
'venta': mventa
},
'cambioschaco': {
'compra': ccompra,
'venta': cventa
},
'cambiosalberdi': {
'compra': acompra,
'venta': aventa
}
},
"updated": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
return json.dumps(respjson, sort_keys=True, indent=4, separators=(',', ': '))
def get_output():
with open('/tmp/dolar.json', 'r') as f:
response = f.read()
return response
def write_output():
response = create_json()
with open('/tmp/dolar.json', 'w') as f:
f.write(response)
write_output()
|
Python
| 0.000205
|
@@ -37,16 +37,32 @@
urllib2
+%0Aimport requests
%0A%0Afrom b
@@ -177,39 +177,36 @@
-urllib2.urlopen
+requests.get
('http://www
@@ -244,24 +244,33 @@
ir_.php'
-).read()
+, timeout=8).text
, %22html.
@@ -444,35 +444,43 @@
%0A except
-urllib2.URL
+requests.Connection
Error:%0A
@@ -600,39 +600,36 @@
-urllib2.urlopen
+requests.get
('http://www
@@ -649,24 +649,33 @@
com.py/'
-).read()
+, timeout=8).text
, %22html.
@@ -891,35 +891,43 @@
%0A except
-urllib2.URL
+requests.Connection
Error:%0A
@@ -1058,23 +1058,20 @@
-urllib2.urlopen
+requests.get
('ht
@@ -1103,16 +1103,25 @@
om/'
-).read()
+, timeout=8).text
, %22h
@@ -1355,19 +1355,27 @@
ept
-urllib2.URL
+requests.Connection
Erro
|
4d5cc0dfc6f9f460cfc54dfebf2061428ae2ee97
|
implement a removing of gitlab's objects
|
crud.py
|
crud.py
|
'''
generic CRUD oparations for the gitlab's objects
'''
import http
class Crud():
def __init__(self, path):
self.path = path
'''
get an object by system's name and id
'''
def byId(self, sysNam, id):
return http.get(sysNam, '%s/%d' % (self.path, id))
'''
add a new instance of an object
'''
def add(self, sysNam, data):
return http.post(sysNam, self.path, data)
|
Python
| 0.999861
|
@@ -374,8 +374,133 @@
, data)%0A
+%0A%09'''%0A%09delete an instcnce by id%0A%09'''%0A%09def delete(self, sysNam, id):%0A%09%09return http.delete(sysNam, '%25s/%25d' %25 (self.path, id))%0A%0A
|
ace9fc7bb2bcb0e3f9f1be9bd03fcfd69698d6c3
|
Save the current data after a keyboard interrrupt.
|
main.py
|
main.py
|
import json
import os.path
import re
from urllib.robotparser import RobotFileParser
import requests
from requests_toolbelt import user_agent
ROOT_URL = "http://en.wikipedia.org/wiki/"
ROBOTS_URL = "http://en.wikipedia.org/robots.txt"
USERAGENT = user_agent("Wikigraph", "0.0.1")
BLACK_LIST = [
"Main_Page"
]
class UrlGetter(object):
_robot_parser = RobotFileParser(url=ROBOTS_URL)
_headers = {
"User-Agent": USERAGENT
}
@classmethod
def get_html(cls, page_name):
url = ROOT_URL + page_name
if cls._robot_parser.can_fetch(USERAGENT, url) and url not in BLACK_LIST:
response = requests.get(url, headers=cls._headers)
return response.text
else:
return None
class LinkGetter(object):
_page_nodes = []
_page_links = []
@classmethod
def find_linked_pages(cls, page_name, current_search_depth=0, max_search_depth=2, reset=False, was_leaf=False):
if reset:
cls._page_nodes = []
cls._page_links = []
leaf = current_search_depth > max_search_depth
page_node = {
"name": page_name,
"leaf": leaf
}
if was_leaf or page_node not in cls._page_nodes:
old_page_node = {
"name": page_name,
"leaf": was_leaf
}
if old_page_node in cls._page_nodes:
cls._page_nodes.remove(old_page_node)
if page_node not in cls._page_nodes:
cls._page_nodes.append(page_node)
if not leaf:
html = UrlGetter.get_html(page_name)
if html is not None:
linked_pages = set(
re.findall(r"href=(?:\"|\')/wiki/([^:#\"\']+)[^:]*?(?:\"|\')", html))
linked_pages -= set(BLACK_LIST + [page_name])
for linked_page in linked_pages:
link = {
"source_name": page_name,
"target_name": linked_page
}
if link not in cls._page_links:
cls._page_links.append(link)
cls.find_linked_pages(
linked_page,
current_search_depth=current_search_depth + 1,
max_search_depth=max_search_depth)
@classmethod
def get_linked_pages(cls, max_search_depth, start_page_name=None, file_name=None, reset=False, continue_search=True):
if not reset and file_name and os.path.isfile(file_name):
with open(file_name, "r") as in_file:
data = json.load(in_file)
cls._page_nodes = data.get("nodes")
cls._page_links = data.get("links")
if reset:
cls._page_nodes = []
cls._page_links = []
if continue_search:
for node in cls._page_nodes:
if node.get("leaf"):
cls.find_linked_pages(
node.get("name"),
current_search_depth=0,
max_search_depth=max_search_depth,
reset=False,
was_leaf=True)
if start_page_name:
cls.find_linked_pages(
start_page_name,
current_search_depth=0,
max_search_depth=max_search_depth,
reset=False,
was_leaf=False)
if file_name:
data = {
"nodes": cls._page_nodes,
"links": cls._page_links
}
with open(file_name, "w") as out_file:
json.dump(data, out_file, indent=4)
return cls._page_nodes, cls._page_links
if __name__ == "__main__":
LinkGetter.get_linked_pages(
1, start_page_name="Python", file_name="data/data.json")
|
Python
| 0
|
@@ -30,16 +30,41 @@
port re%0A
+import signal%0Aimport sys%0A
from url
@@ -783,70 +783,387 @@
ass
-LinkGetter(object):%0A%0A _page_nodes = %5B%5D%0A _page_links = %5B%5D
+MetaLinkGetter(type):%0A%0A def __init__(cls, name, bases, d):%0A type.__init__(cls, name, bases, d)%0A signal.signal(signal.SIGINT, cls.interrupt_handler)%0A%0A%0Aclass LinkGetter(object, metaclass=MetaLinkGetter):%0A%0A _page_nodes = %5B%5D%0A _page_links = %5B%5D%0A _interrupt = False%0A%0A @classmethod%0A def interrupt_handler(cls, signal, frame):%0A cls._interrupt = True
%0A%0A
@@ -1789,32 +1789,105 @@
old_page_node)%0A%0A
+ if cls._interrupt:%0A page_node%5B%22leaf%22%5D = True%0A%0A
if p
@@ -1962,32 +1962,87 @@
end(page_node)%0A%0A
+ if cls._interrupt:%0A return%0A%0A
if n
@@ -2130,20 +2130,8 @@
html
- is not None
:%0A
@@ -3456,16 +3456,39 @@
if
+not cls._interrupt and
node.get
@@ -3777,16 +3777,39 @@
if
+not cls._interrupt and
start_pa
@@ -4282,16 +4282,68 @@
ent=4)%0A%0A
+ if cls._interrupt:%0A sys.exit(1)%0A%0A
|
baa81fb776af4b6811bf434a75f808f0aeae056b
|
fix load watering-topic from config
|
main.py
|
main.py
|
import argparse
import json
import logging
import logging.config
import os
import paho.mqtt.client as mqtt
import yaml
from services.data_service import DataService
from services.watering_service import WateringService
from services.config_service import ConfigService
def load_args():
# setup commandline argument parser
parser = argparse.ArgumentParser()
parser.add_argument('--env')
return parser.parse_args()
def setup_logging(default_level=logging.INFO):
path = os.path.join(os.getcwd(), 'config', 'logging.yml')
if os.path.exists(path):
# load from config
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
def create_mqtt_client(config):
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
user = config['user']
if user is not None:
# use authentication
client.username_pw_set(user, config['password'])
client.connect(config['host'], config['port'])
return client
def handle_receive_sensor_values(payload):
# transform payload to JSON
sensor_values = json.loads(payload.decode('utf-8'))
try:
temperature = int(sensor_values['Temperature'])
humidity = int(sensor_values['Humidity'])
soil_moisture = int(sensor_values['SoilMoisture'])
sensors_id = data_service.save_sensor_values(temperature, humidity, soil_moisture)
if sensors_id is not None:
watering_milliseconds = watering_service.calculate_milliseconds(soil_moisture)
if watering_milliseconds > 200:
watering(watering_milliseconds)
except ValueError:
logger.error('convert sensor-values error', exc_info=True)
def handle_watering(payload):
try:
watering_milliseconds = int(payload)
data_service.save_watering(watering_milliseconds)
except ValueError:
logger.error('convert watering-milliseconds error', exc_info=True)
def watering(milliseconds):
mqtt_client.publish(mqtt_config['topics']['watering'], milliseconds)
def on_connect(client, userdata, flags_dict, rc):
if rc != 0:
logger.error('MQTT connection error: ' + str(rc))
return
logger.info('MQTT connected')
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(mqtt_config['topics']['generic'])
def on_message(client, userdata, msg):
logger.debug('receive message "%s": %s', msg.topic, str(msg.payload))
if msg.topic == mqtt_config['topics']['sensors']: # sensor values
handle_receive_sensor_values(msg.payload)
if msg.topic == mqtt_config['topic']['watering']: # watering
handle_watering(msg.payload)
args = load_args()
setup_logging()
logger = logging.getLogger(__name__)
logger.info('starting MQTT client')
try:
config_service = ConfigService(args.env)
mqtt_config = config_service.get_section('mqtt')
mysql_config = config_service.get_section('mysql')
watering_config = config_service.get_section('watering')
data_service = DataService(mysql_config)
watering_service = WateringService(watering_config)
mqtt_client = create_mqtt_client(mqtt_config)
mqtt_client.loop_forever()
except Exception as error:
logger.error('main error', exc_info=True)
|
Python
| 0.000001
|
@@ -2808,16 +2808,17 @@
g%5B'topic
+s
'%5D%5B'wate
|
b694436d4d8b6ee0b4b4a8078e0b34f779b17751
|
Set a nice app-icon
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <apaku@gmx.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from jenkinstray.initsip import setupSipApi
setupSipApi()
from PyQt4 import QtGui
from jenkinstray.gui.jenkinstray import JenkinsTray
from jenkinstray import rcc_jenkinstray
def main(args):
global app
global tray
app = QtGui.QApplication(args)
app.setApplicationVersion("0.1")
app.setApplicationName("Jenkins Tray")
QtGui.QApplication.setQuitOnLastWindowClosed(False)
tray = JenkinsTray(app)
return app.exec_()
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Python
| 0
|
@@ -1723,16 +1723,104 @@
Tray%22)%0A
+ QtGui.QApplication.setWindowIcon(QtGui.QIcon(%22:///images/jenkinstray_success.png%22))%0A
QtGu
|
2fa02183eba3d9a50487b891ddebfe013ac1854b
|
clean sql statement formatting
|
lib/geocode_setup.py
|
lib/geocode_setup.py
|
# sets up the geocoding databases
def geocode_db_initialize(conn):
conn.executescript("""
PRAGMA CACHE_SIZE=20000;
ATTACH DATABASE 'assignee.sqlite3' AS assignees;
ATTACH DATABASE 'inventor.sqlite3' AS inventors;
ATTACH DATABASE 'loctbl' AS loc;
""")
# TODO: Ensure this is the correct schema, that is, the
# schema below needs to match the schema in the existing
# loc table. If it doesn't match, we need to find out
# why, and figure out what to do about that.
def loc_create_table(conn):
conn.executescript("""
/* DROP TABLE IF EXISTS loc; */
CREATE TABLE IF NOT EXISTS loc (
Cnt INTEGER,
City VARCHAR(10), State VARCHAR(2),
Country VARCHAR(2), Zipcode VARCHAR(5),
City3 VARCHAR,
NCity VARCHAR(10), NState VARCHAR(2),
NCountry VARCHAR(2),
UNIQUE(City,State,Country,Zipcode));
DROP INDEX IF EXISTS loc_idxCC;
DROP INDEX IF EXISTS loc_idx;
DROP INDEX IF EXISTS loc_idxCS;
DROP INDEX IF EXISTS loc_ixnCC;
DROP INDEX IF EXISTS loc_ixn;
DROP INDEX IF EXISTS loc_ixnCS;
DROP INDEX IF EXISTS loc3_idxCC;
""")
# TODO: Find a way to unit test fix_city_country
def fix_city_country(conn):
conn.executescript("""
CREATE TEMPORARY TABLE temp AS
SELECT Upper(City) as CityX, Upper(State) as StateX,
Upper(Country) as CountryX, count(*) as Cnt
FROM assignees.assignee
WHERE City!=""
GROUP BY CityX, StateX, CountryX;
CREATE TEMPORARY TABLE temp2 AS
SELECT sum(Cnt) as Cnt,
cityctry(CityX, CountryX, 'city') as CityY, StateX as StateY,
cityctry(CityX, CountryX, 'ctry') as CtryY, '' as ZipcodeY
FROM temp
WHERE CityY!=""
GROUP BY CityY, StateY, CtryY;
INSERT OR REPLACE INTO loc
SELECT a.*, SUBSTR(CityY,1,3), b.NewCity, b.NewState, b.NewCountry
FROM temp2 AS a
LEFT JOIN loc.typos AS b
ON a.CityY=b.City AND a.StateY=b.State AND a.CtryY=b.Country;
DROP TABLE temp;
DROP TABLE temp2;
""")
# TODO: Find a way to unit test fix_state_zip
def fix_state_zip(conn):
conn.executescript("""
CREATE TEMPORARY TABLE temp AS
SELECT Upper(City) as CityX, Upper(State) as StateX,
Upper(Country) as CountryX, Zipcode, count(*) as Cnt
FROM inventors.inventor
WHERE City!="" OR (City="" AND Zipcode!="")
GROUP BY CityX, StateX, CountryX, Zipcode;
CREATE TEMPORARY TABLE temp2 AS
SELECT sum(Cnt) as Cnt,
cityctry(CityX, CountryX, 'city') as CityY, StateX as StateY,
cityctry(CityX, CountryX, 'ctry') as CtryY, Zipcode as ZipcodeY
FROM temp
WHERE CityY!=""
GROUP BY CityY, StateY, CtryY, ZipcodeY;
INSERT OR REPLACE INTO loc
SELECT a.*, SUBSTR(CityY,1,3), b.NewCity, b.NewState, b.NewCountry
FROM temp2 AS a
LEFT JOIN loc.typos AS b
ON a.CityY=b.City AND a.StateY=b.State AND a.CtryY=b.Country;
DROP TABLE temp;
DROP TABLE temp2;
""")
# TODO: Find a way to ensure that the correct indexes are created as
# the schemas change.
def create_loc_indexes(conn):
conn.executescript("""
CREATE INDEX IF NOT EXISTS loc_idCC3 ON loc (City3,State,Country);
CREATE INDEX IF NOT EXISTS loc_idxCC ON loc (City,Country);
CREATE INDEX IF NOT EXISTS loc_idx ON loc (City,State,Country,Zipcode);
CREATE INDEX IF NOT EXISTS loc_idxCS ON loc (City,State);
CREATE INDEX IF NOT EXISTS loc_ixnCC ON loc (NCity,NCountry);
CREATE INDEX IF NOT EXISTS loc_ixn ON loc (NCity,NState,NCountry);
CREATE INDEX IF NOT EXISTS loc_ixnCS ON loc (NCity,NState);
""")
# TODO: unit test
def create_usloc_table(conn):
conn.executescript("""
CREATE TABLE IF NOT EXISTS usloc AS
SELECT Zipcode, Latitude, Longitude, Upper(City) as City,
BLK_SPLIT(Upper(City)) as BlkCity,
SUBSTR(UPPER(BLK_SPLIT(City)),1,3) as City3,
REV_WRD(BLK_SPLIT(City), 4) as City4R,
Upper(State) as State, "US" as Country
FROM loc.usloc
GROUP BY City, State;
CREATE INDEX If NOT EXISTS usloc_idxZ on usloc (Zipcode);
CREATE INDEX If NOT EXISTS usloc_idxCS on usloc (City, State);
CREATE INDEX If NOT EXISTS usloc_idBCS on usloc (BlkCity, State);
CREATE INDEX If NOT EXISTS usloc_idC3S on usloc (City3, State);
CREATE INDEX If NOT EXISTS usloc_idC4R on usloc (City4R, State);
DETACH DATABASE assignees;
DETACH DATABASE inventors;
/*DETACH DATABASE loc;
CREATE TEMPORARY TABLE gnsloc AS
SELECT '' AS zipcode, lat, long,
UPPER(full_name_nd) AS city, "" AS State, cc1 AS country
FROM loc.gnsloc;
CREATE INDEX gnsloc_idxCC on gnsloc (City, Country)
*/;
""")
# TODO: unit test
def create_locMerge_table(conn):
conn.executescript("""
CREATE TABLE IF NOT EXISTS locMerge (
Mtch INTEGER,
Val FLOAT, Cnt INTEGER,
City VARCHAR, State VARCHAR,
Country VARCHAR, Zipcode VARCHAR,
NCity VARCHAR, NState VARCHAR,
NCountry VARCHAR, NZipcode VARCHAR,
NLat FLOAT, NLong FLOAT,
City3 VARCHAR,
UNIQUE(City, State, Country, Zipcode));
CREATE INDEX IF NOT EXISTS okM_idxCC ON locMerge (City,Country);
CREATE INDEX IF NOT EXISTS okM_idx ON locMerge (City,State,Country,Zipcode);
CREATE INDEX IF NOT EXISTS okM_idxCS ON locMerge (City,State);
CREATE INDEX IF NOT EXISTS okM_idx3 ON locMerge (City3,State,Country);
""")
|
Python
| 0.000021
|
@@ -693,16 +693,26 @@
HAR(10),
+%0A
State
@@ -755,16 +755,28 @@
CHAR(2),
+%0A
Zipcode
@@ -845,16 +845,27 @@
HAR(10),
+%0A
NState
@@ -951,32 +951,33 @@
ntry,Zipcode));%0A
+%0A
DROP IND
@@ -5473,24 +5473,27 @@
Val FLOAT,
+%0A
Cn
@@ -5528,23 +5528,29 @@
VARCHAR,
+%0A
+
State VA
@@ -5580,24 +5580,33 @@
try VARCHAR,
+%0A
Zipcode
@@ -5614,17 +5614,16 @@
ARCHAR,%0A
-%0A
@@ -5640,16 +5640,24 @@
VARCHAR,
+%0A
NSt
@@ -5698,16 +5698,27 @@
VARCHAR,
+%0A
NZipco
@@ -5752,16 +5752,21 @@
t FLOAT,
+%0A
@@ -5857,16 +5857,17 @@
code));%0A
+%0A
|
c5b7cf7cdd8a91162441a17f9d0b70db197249c0
|
make main runnable
|
main.py
|
main.py
|
from collaborator.http_server.http_server import entryPoint
if __name__ == '__main__':
entryPoint()
|
Python
| 0.000063
|
@@ -1,8 +1,30 @@
+#!/usr/bin/env python3
%0Afrom co
|
0dce5a6524ebc5020991ab301cd0b080ad27ddf6
|
Fix self prefix
|
main.py
|
main.py
|
#!/usr/bin/env python3
import asyncio
from datetime import datetime
import logging
import lzma
from pathlib import Path
import os
import sys
import tarfile
from discord.ext.commands import when_mentioned_or
import yaml
from bot import BeattieBot
try:
import uvloop
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open('config/config.yaml') as file:
config = yaml.load(file)
self_bot = 'self' in sys.argv
debug = 'debug' in sys.argv
loop = asyncio.get_event_loop()
if self_bot:
prefixes = config['self_prefix']
token = config['self']
elif config['debug'] or debug:
prefixes = [config['test_prefix']]
token = config['test_token']
else:
prefixes = config['prefixes']
token = config['token']
bot = BeattieBot(when_mentioned_or(*prefixes), self_bot=self_bot)
logger = logging.getLogger('discord')
if self_bot:
logger.setLevel(logging.CRITICAL)
else:
old_logs = Path('.').glob('discord*.log')
logname = 'logs.tar'
if os.path.exists(logname):
mode = 'a'
else:
mode = 'w'
with tarfile.open(logname, mode) as tar:
for log in old_logs:
with open(log, 'rb') as fp:
data = lzma.compress(fp.read())
name = f'{log.name}.xz'
with open(name, 'wb') as fp:
fp.write(data)
tar.add(name)
os.remove(name)
log.unlink()
logger.setLevel(logging.DEBUG)
now = datetime.utcnow()
filename = now.strftime('discord%Y%m%d%H%M.log')
handler = logging.FileHandler(
filename=filename, encoding='utf-8', mode='w')
handler.setFormatter(
logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
bot.logger = logger
extensions = [f'cogs.{f.stem}' for f in Path('cogs').glob('*.py')]
for extension in extensions:
try:
bot.load_extension(extension)
except Exception as e:
print(f'Failed to load extension {extension}\n{type(e).__name__}: {e}')
bot.run(token, bot=not self_bot)
|
Python
| 0.999974
|
@@ -597,32 +597,33 @@
%0A prefixes =
+%5B
config%5B'self_pre
@@ -627,16 +627,17 @@
prefix'%5D
+%5D
%0A tok
|
08650ad083e9ca4790ea627e8ab0ae670f7ef60b
|
Add merge function to rd_models (#3464)
|
angr/knowledge_plugins/key_definitions/rd_model.py
|
angr/knowledge_plugins/key_definitions/rd_model.py
|
from typing import Dict, Tuple, Set, Optional, TYPE_CHECKING
from .uses import Uses
from .live_definitions import LiveDefinitions
if TYPE_CHECKING:
from angr.knowledge_plugins.key_definitions.definition import Definition
# TODO: Make ReachingDefinitionsModel serializable
class ReachingDefinitionsModel:
def __init__(self, func_addr: Optional[int]=None):
self.func_addr = func_addr # do not use. only for pretty-printing
self.observed_results: Dict[Tuple[str, int, int], LiveDefinitions] = {}
self.all_definitions: Set['Definition'] = set()
self.all_uses = Uses()
def __repr__(self):
return "<RDModel{} with {} observations>".format(
"[func %#x]" if self.func_addr is not None else "",
len(self.observed_results),
)
def copy(self) -> "ReachingDefinitionsModel":
new = ReachingDefinitionsModel(self.func_addr)
new.observed_results = self.observed_results.copy()
new.all_definitions = self.all_definitions.copy()
new.all_uses = self.all_uses.copy()
return new
|
Python
| 0
|
@@ -305,16 +305,113 @@
sModel:%0A
+ %22%22%22%0A Models the definitions, uses, and memory of a ReachingDefinitionState object%0A %22%22%22%0A
def
@@ -1183,8 +1183,491 @@
urn new%0A
+%0A def merge(self, model: 'ReachingDefinitionsModel'):%0A for k, v in model.observed_results.items():%0A if k not in self.observed_results:%0A self.observed_results%5Bk%5D = v%0A else:%0A merged, merge_occured = self.observed_results%5Bk%5D.merge(v)%0A if merge_occured:%0A self.observed_results%5Bk%5D = merged%0A self.all_definitions.union(model.all_definitions)%0A self.all_uses.merge(model.all_uses)%0A
|
9eeae893b8e777fa5f50733e6580b731a00a5170
|
kill useless plugin registration logic
|
tenderloin/listeners/message.py
|
tenderloin/listeners/message.py
|
import json
import logging
import time
import zmq
from collections import defaultdict
from zmq.eventloop import zmqstream
from tenderloin.listeners import plugin_data
PLUGIN_TIMEOUT = 300
class PluginData(object):
def __init__(self, name, uuid, fqdn, tags, data):
self.name = name
self.uuid = uuid
self.fqdn = fqdn
self.tags = tags
self.data = data
class MessageListener(object):
def __init__(self, address, port):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.setsockopt(zmq.LINGER, 0)
socket.setsockopt(zmq.HWM, 1000)
logging.info("Starting up message listener on %s:%d", address, port)
socket.bind("tcp://%s:%s" % (address, port))
self.stream = zmqstream.ZMQStream(socket)
def find(self, f, seq):
"""Return first item in sequence where f(item) == True."""
"""h/t http://tomayko.com/writings/cleanest-python-find-in-list-function"""
for item in seq:
if f(item):
return item
def handle(self, message):
d = json.loads(message[0])
if d["data"]:
self.update_data(plugin_id=d["plugin_id"], payload=d["data"],
tags=d["tags"])
def update_data(self, plugin_id, payload, tags):
(plugin_name, uuid, fqdn) = plugin_id
now = int(time.time())
payload["received_at"] = now
self.register_plugin(plugin_id, tags)
if self.registered(plugin_id):
logging.debug("Updating plugin: %s@%d" % (repr(plugin_id), now))
plugin_data.append(PluginData(name=plugin_id[0], uuid=plugin_id[1],
fqdn=plugin_id[2], tags=tags,
data=payload))
else:
logging.info("Ignoring plugin data due to registration "
"collision: %s" % repr(plugin_id))
def consumer_loop(self):
self.stream.on_recv(self.handle)
def register_plugin(self, plugin_id, tags):
global PLUGIN_TIMEOUT
(plugin_name, uuid, fqdn) = plugin_id
now = time.time()
registered = self.registered(plugin_id)
if registered:
if registered == uuid:
if self.expired(plugin_id):
logging.info("Re-registering plugin due to expiry: %s@%d" %
(repr(plugin_id), now))
else:
logging.info("Plugin registration collision: %s@%d "
"[registered=%s]" %
(repr(plugin_id), now, registered))
else:
logging.info("Registering plugin: %s@%d [tags=%s]" %
(repr(plugin_id), now, repr(tags)))
def expired(self, plugin_id):
return self.find(lambda plugin:
plugin_id, plugin_data).data.get("received_at", 0) <\
time.time() - PLUGIN_TIMEOUT
def registered(self, plugin_id):
p = self.find(lambda plugin: (plugin.name, plugin.uuid, plugin.fqdn) ==
plugin_id, plugin_data)
if hasattr(p, 'uuid'):
return plugin_id[1] == p.uuid
else:
return True
|
Python
| 0
|
@@ -804,265 +804,8 @@
t)%0A%0A
- def find(self, f, seq):%0A %22%22%22Return first item in sequence where f(item) == True.%22%22%22%0A %22%22%22h/t http://tomayko.com/writings/cleanest-python-find-in-list-function%22%22%22%0A for item in seq:%0A if f(item):%0A return item%0A%0A
@@ -1181,98 +1181,8 @@
ow%0A%0A
- self.register_plugin(plugin_id, tags)%0A%0A if self.registered(plugin_id):%0A
@@ -1254,28 +1254,24 @@
w))%0A
-
plugin_data.
@@ -1304,38 +1304,25 @@
gin_
-id%5B0%5D
+name
, uuid=
-plugin_id%5B1%5D,%0A
+uuid,%0A
@@ -1364,1553 +1364,107 @@
qdn=
-plugin_id%5B2%5D, tags=tags,%0A data=payload))%0A else:%0A logging.info(%22Ignoring plugin data due to registration %22%0A %22collision: %25s%22 %25 repr(plugin_id))%0A%0A def consumer_loop(self):%0A self.stream.on_recv(self.handle)%0A%0A def register_plugin(self, plugin_id, tags):%0A global PLUGIN_TIMEOUT%0A%0A (plugin_name, uuid, fqdn) = plugin_id%0A now = time.time()%0A registered = self.registered(plugin_id)%0A%0A if registered:%0A if registered == uuid:%0A if self.expired(plugin_id):%0A logging.info(%22Re-registering plugin due to expiry: %25s@%25d%22 %25%0A (repr(plugin_id), now))%0A else:%0A logging.info(%22Plugin registration collision: %25s@%25d %22%0A %22%5Bregistered=%25s%5D%22 %25%0A (repr(plugin_id), now, registered))%0A else:%0A logging.info(%22Registering plugin: %25s@%25d %5Btags=%25s%5D%22 %25%0A (repr(plugin_id), now, repr(tags)))%0A%0A def expired(self, plugin_id):%0A return self.find(lambda plugin:%0A plugin_id, plugin_data).data.get(%22received_at%22, 0) %3C%5C%0A time.time() - PLUGIN_TIMEOUT%0A%0A def registered(self, plugin_id):%0A p = self.find(lambda plugin: (plugin.name, plugin.uuid, plugin.fqdn) ==%0A plugin_id, plugin_data)%0A%0A if hasattr(p, 'uuid'):%0A return plugin_id%5B1%5D == p.uuid%0A else:%0A return True
+fqdn, tags=tags, data=payload))%0A%0A def consumer_loop(self):%0A self.stream.on_recv(self.handle)
%0A
|
44537a6496b1b67511ea7008418b6d1a7a30fdf4
|
move the resolve cache into TLS
|
claripy/result.py
|
claripy/result.py
|
import copy
import collections
import weakref
import threading
class Result(object):
def __init__(self, satness, model=None, approximation=False, backend_model=None):
self.sat = satness
self.model = model if model is not None else { }
self._tls = threading.local()
self._tls.backend_model = backend_model
self.approximation = approximation
self.eval_cache = { }
self.eval_n = { }
self.min_cache = { }
self.max_cache = { }
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
@property
def backend_model(self):
try:
return self._tls.backend_model
except AttributeError:
return None
def branch(self):
r = Result(self.sat, copy.copy(self.model), backend_model=self._tls.backend_model)
r.eval_cache = dict(self.eval_cache)
r.eval_n = dict(self.eval_n)
r.min_cache = dict(self.min_cache)
r.max_cache = dict(self.max_cache)
return r
def __getstate__(self):
return ( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache )
def __setstate__(self, state):
( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache ) = state
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
self._tls = threading.local()
self._tls.backend_model = None
def downsize(self):
self._tls.backend_model = None
def UnsatResult(**kwargs):
return Result(False, **kwargs)
def SatResult(**kwargs):
return Result(True, **kwargs)
|
Python
| 0.000001
|
@@ -493,28 +493,133 @@
e = %7B %7D%0A
+%0A
- self
+@property%0A def resolve_cache(self):%0A if not hasattr(self._tls, 'resolve_cache'):%0A self._tls
.resolve
@@ -677,16 +677,55 @@
tionary)
+%0A return self._tls.resolve_cache
%0A%0A @p
@@ -1146,32 +1146,191 @@
self.max_cache)%0A
+ self._tls.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary, %7B b:weakref.WeakKeyDictionary(c) for b,c in self.resolve_cache.items() %7D)%0A
return r
@@ -1493,24 +1493,24 @@
lf, state):%0A
+
( se
@@ -1604,88 +1604,8 @@
ate%0A
- self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)%0A
|
200ea76309c361c6df534f2b0f6e615866f9e85b
|
Modify rpr formatting
|
tcconfig/_iptables.py
|
tcconfig/_iptables.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import re
import dataproperty
from dataproperty.type import IntegerTypeChecker
from subprocrunner import SubprocessRunner
from ._common import sanitize_network
from ._split_line_list import split_line_list
class IptablesMangleMark(object):
@property
def line_number(self):
return self.__line_number
@property
def protocol(self):
return self.__protocol
@property
def source(self):
return self.__source
@property
def destination(self):
return self.__destination
@property
def mark_id(self):
return self.__mark_id
def __init__(
self, mark_id, source, destination, protocol="all",
line_number=None):
self.__chain = "PREROUTING"
self.__line_number = line_number
self.__mark_id = mark_id
self.__source = source
self.__destination = destination
self.__protocol = protocol
def __repr__(self, *args, **kwargs):
str_list = []
if IntegerTypeChecker(self.line_number).is_type():
str_list.append("line-num={}".format(self.line_number))
str_list.append(
"protocol={:s}, src={:s}, dst={:s}, mark-id={:d}".format(
self.protocol, self.source, self.destination, self.mark_id))
return ", ".join(str_list)
def to_append_command(self):
IntegerTypeChecker(self.mark_id).validate()
command_item_list = [
"iptables -A PREROUTING -t mangle -j MARK",
"--set-mark {}".format(self.mark_id),
]
if any([
dataproperty.is_not_empty_string(self.protocol),
IntegerTypeChecker(self.protocol).is_type(),
]):
command_item_list.append("-p {}".format(self.protocol))
if self.__is_valid_srcdst(self.source):
command_item_list.append(
"-s {:s}".format(sanitize_network(self.source)))
if self.__is_valid_srcdst(self.destination):
command_item_list.append(
"-d {:s}".format(sanitize_network(self.destination)))
return " ".join(command_item_list)
def to_delete_command(self):
IntegerTypeChecker(self.line_number).validate()
return "iptables -t mangle -D PREROUTING {}".format(self.line_number)
@staticmethod
def __is_valid_srcdst(srcdst):
return (
dataproperty.is_not_empty_string(srcdst) and
srcdst.lower() != "anywhere")
class IptablesMangleController(object):
__RE_CHAIN_NAME_PREROUTING = re.compile("Chain PREROUTING")
__MAX_MARK_ID = 0xffffffff
@classmethod
def clear(cls):
for mangle in cls.parse():
proc = SubprocessRunner(mangle.to_delete_command())
if proc.run() != 0:
raise RuntimeError(str(proc.stderr))
@classmethod
def get_iptables(cls):
proc = SubprocessRunner("iptables -t mangle --line-numbers -L")
if proc.run() != 0:
raise RuntimeError(str(proc.stderr))
return proc.stdout
@classmethod
def get_unique_mark_id(cls):
mark_id_list = [mangle.mark_id for mangle in cls.parse()]
unique_mark_id = 1
while unique_mark_id < cls.__MAX_MARK_ID:
if unique_mark_id not in mark_id_list:
return unique_mark_id
unique_mark_id += 1
raise RuntimeError("usable mark id not found")
@classmethod
def parse(cls):
for block in split_line_list(cls.get_iptables().splitlines()):
if len(block) <= 1:
# skip if no entry exists
continue
if cls.__RE_CHAIN_NAME_PREROUTING.search(block[0]) is None:
continue
for line in reversed(block[2:]):
item_list = line.split()
if len(item_list) < 6:
continue
line_number = int(item_list[0])
target = item_list[1]
protocol = item_list[2]
source = item_list[4]
destination = item_list[5]
try:
mark = int(item_list[-1], 16)
except ValueError:
continue
if target != "MARK":
continue
yield IptablesMangleMark(
mark, source, destination, protocol, line_number)
@classmethod
def add(cls, mangling_mark):
return SubprocessRunner(mangling_mark.to_append_command()).run()
if __name__ == '__main__':
# temporal tests
iptables = IptablesMangleController()
for mangling_mark in iptables.parse():
print(mangling_mark.to_append_command())
print(mangling_mark)
iptables.clear()
for _i in range(3):
iptables.add(mangling_mark)
|
Python
| 0.000001
|
@@ -1253,31 +1253,32 @@
str_list.
-app
+ext
end(
+%5B
%0A
@@ -1296,127 +1296,230 @@
%7B:s%7D
-, src=%7B:s%7D, dst=%7B:s%7D, mark-id=%7B:d%7D%22.format(%0A self.protocol, self.source, self.destination, self.mark_id)
+%22.format(self.protocol),%0A %22src=%7B:s%7D%22.format(self.source),%0A %22dst=%7B:s%7D%22.format(self.destination),%0A %22mark-id=%7B:d%7D%22.format(self.mark_id),%0A %22charin=%7B:s%7D%22.format(self.chain),%0A %5D
)%0A%0A
|
05f829c2e1116d0b4fdc2711981c3c3f3c0c0665
|
add bucket prefix support
|
directupload/backends/s3.py
|
directupload/backends/s3.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from urllib import quote_plus
from datetime import datetime
from datetime import timedelta
import base64
import hmac
import hashlib
import os
from base import BaseUploadBackend, _set_default_if_none, json
# AWS Options
ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
BUCKET_NAME = getattr(settings, 'AWS_BUCKET_NAME', None)
SECURE_URLS = getattr(settings, 'AWS_S3_SECURE_URLS', False)
BUCKET_URL = getattr(settings, 'AWS_BUCKET_URL', ('https://' if SECURE_URLS else 'http://') + BUCKET_NAME + '.s3.amazonaws.com')
DEFAULT_ACL = getattr(settings, 'AWS_DEFAULT_ACL', 'public-read')
DEFAULT_KEY_PATTERN = getattr(settings, 'AWS_DEFAULT_KEY_PATTERN', '${targetname}')
DEFAULT_FORM_TIME = getattr(settings, 'AWS_DEFAULT_FORM_LIFETIME', 36000) # 10 HOURS
class S3Backend(BaseUploadBackend):
def __init__(self, request, options={}, post_data={}, conditions={}):
self.conditions = conditions
super(S3Backend, self).__init__(request, options, post_data)
def get_target_url(self):
return BUCKET_URL
def build_options(self):
self.options['forceIframeTransport'] = True
self.options['fileObjName'] = 'file'
def build_post_data(self):
if 'folder' in self.options:
key = os.path.join(self.options['folder'], DEFAULT_KEY_PATTERN)
else:
key = DEFAULT_KEY_PATTERN
#_set_default_if_none(self.post_data, 'key', key) #this is set by update_post_params
_set_default_if_none(self.post_data, 'acl', DEFAULT_ACL)
try:
_set_default_if_none(self.post_data, 'bucket', BUCKET_NAME)
except ValueError:
raise ImproperlyConfigured("Bucket name is a required property.")
try:
_set_default_if_none(self.post_data, 'AWSAccessKeyId', ACCESS_KEY_ID)
except ValueError:
raise ImproperlyConfigured("AWS Access Key ID is a required property.")
self.conditions = self.build_conditions()
if not SECRET_ACCESS_KEY:
raise ImproperlyConfigured("AWS Secret Access Key is a required property.")
expiration_time = datetime.utcnow() + timedelta(seconds=DEFAULT_FORM_TIME)
self.policy_string = self.build_post_policy(expiration_time)
self.policy = base64.b64encode(self.policy_string)
self.signature = base64.encodestring(hmac.new(SECRET_ACCESS_KEY, self.policy, hashlib.sha1).digest()).strip()
self.post_data['policy'] = self.policy
self.post_data['signature'] = self.signature
def build_conditions(self):
conditions = list()
#make s3 happy with uploadify
conditions.append(['starts-with', '$targetname', '']) #variable introduced by this package
conditions.append(['starts-with', '$targetpath', self.options['folder']])
conditions.append({'success_action_status': '200'})
#real conditions
conditions.append(['starts-with', '$key', self.options['folder']])
conditions.append({'bucket': self.post_data['bucket']})
conditions.append({'acl': self.post_data['acl']})
return conditions
def build_post_policy(self, expiration_time):
policy = {'expiration': expiration_time.strftime("%Y-%m-%dT%H:%M:%SZ"),
'conditions': self.conditions,}
return json.dumps(policy)
def update_post_params(self, params):
#instruct s3 that our key is the targetpath
self.build_post_data()
params.update(self.post_data)
params['key'] = params['targetpath']
params['success_action_status'] = '200'
def _uri_encode(str):
try:
# The Uploadify flash component apparently decodes the scriptData once, so we need to encode twice here.
return quote_plus(quote_plus(str, safe='~'), safe='~')
except:
raise ValueError
|
Python
| 0.000001
|
@@ -958,16 +958,137 @@
0 HOURS%0A
+BUCKET_PREFIX = getattr(settings, 'AWS_MEDIA_STORAGE_BUCKET_PREFIX', getattr(settings, 'AWS_BUCKET_PREFIX', None))%0A
%0A%0Aclass
@@ -2952,32 +2952,158 @@
list()%0A %0A
+ path = self.options%5B'folder'%5D%0A if BUCKET_PREFIX:%0A path = os.path.join(BUCKET_PREFIX, path)%0A %0A
#make s3
@@ -3124,16 +3124,16 @@
loadify%0A
-
@@ -3280,38 +3280,20 @@
tpath',
-self.options%5B'folder'%5D
+path
%5D)%0A
@@ -3431,38 +3431,20 @@
'$key',
-self.options%5B'folder'%5D
+path
%5D)%0A
|
ecdf23c53c34a3773e2ca10be2c445c01381a7b0
|
on 64 bits python array.array("L").itemsize is 8
|
classification.py
|
classification.py
|
from feature_extraction import FEATURE_DATATYPE
import numpy
import cv2
CLASS_DATATYPE= numpy.uint16
CLASS_SIZE= 1
CLASSES_DIRECTION= 0 #vertical - a classes COLUMN
BLANK_CLASS= chr(35) #marks unclassified elements
def classes_to_numpy( classes ):
'''given a list of unicode chars, transforms it into a numpy array'''
import array
#utf-32 starts with constant ''\xff\xfe\x00\x00', then has little endian 32 bits chars
#this assumes little endian architecture!
assert unichr(15).encode('utf-32')=='\xff\xfe\x00\x00\x0f\x00\x00\x00'
int_classes= array.array( "L", "".join(classes).encode('utf-32')[4:])
assert len(int_classes) == len(classes)
classes= numpy.array( int_classes, dtype=CLASS_DATATYPE, ndmin=2) #each class in a column. numpy is strange :(
classes= classes if CLASSES_DIRECTION==1 else numpy.transpose(classes)
return classes
def classes_from_numpy(classes):
'''reverses classes_to_numpy'''
classes= classes if CLASSES_DIRECTION==0 else classes.tranpose()
classes= map(unichr, classes)
return classes
class Classifier( object ):
def train( self, features, classes ):
'''trains the classifier with the classified feature vectors'''
raise NotImplementedError()
@staticmethod
def _filter_unclassified( features, classes ):
classified= (classes != classes_to_numpy(BLANK_CLASS)).reshape(-1)
return features[classified], classes[classified]
def classify( self, features):
'''returns the classes of the feature vectors'''
raise NotImplementedError
class KNNClassifier( Classifier ):
def __init__(self, k=1, debug=False):
self.knn= cv2.KNearest()
self.k=k
self.debug= debug
def train( self, features, classes ):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
if CLASS_DATATYPE!=numpy.float32:
classes= numpy.asarray( classes, dtype=numpy.float32 )
features, classes= Classifier._filter_unclassified( features, classes )
self.knn.train( features, classes )
def classify( self, features):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
retval, result_classes, neigh_resp, dists= self.knn.find_nearest(features, k= 1)
return result_classes
|
Python
| 0.999988
|
@@ -567,16 +567,56 @@
00%5Cx00'%0A
+ assert array.array(%22I%22).itemsize==4%0A
int_
@@ -638,17 +638,17 @@
array( %22
-L
+I
%22, %22%22.jo
|
76648057b18055afc3724769aa9240eb477e4533
|
Handle HJSON decode exception
|
main.py
|
main.py
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.txt]
"""
from docopt import docopt
import hjson
if __name__ == '__main__':
options = docopt(__doc__)
try:
chronicle = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
chronicle = hjson.load(chronicle)
except HjsonDecodeError:
print("This chronicle can't be deciphered.")
print(chronicle)
|
Python
| 0.000003
|
@@ -154,20 +154,8 @@
%22%22%22%0A
-from docopt
impo
@@ -164,16 +164,16 @@
docopt%0A
+
import h
@@ -219,16 +219,23 @@
tions =
+docopt.
docopt(_
@@ -449,16 +449,22 @@
except
+hjson.
HjsonDec
@@ -467,24 +467,29 @@
nDecodeError
+ as e
:%0A pr
@@ -527,24 +527,99 @@
ciphered.%22)%0A
+ print(%22L%25d, C%25d: %25s%22 %25 (e.lineno, e.colno, e.msg))%0A exit(1)%0A
print(ch
|
e80dce758a17c304fd938dda62f0a5e2e7d7bcec
|
change 1
|
main.py
|
main.py
|
import webapp2
import jinja2
import requests
import os
import sys
import time
import logging
import urllib2
import json
import re
from operator import itemgetter
from datetime import datetime
from google.appengine.ext import db
from webapp2_extras import sessions
from google.appengine.api import mail
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request!
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def render(self, template, **kw):
self.response.out.write(render_str(template, **kw))
class ToNotify(db.Model):
email = db.StringProperty()
class Main(BaseHandler):
def get(self):
self.render('index.html')
def post(self):
email = self.request.get('email')
if email:
instance = ToNotify(key_name=email,email=email)
instance.put()
self.render('thankyou.html')
else:
self.render('index.html')
config = {}
config['webapp2_extras.sessions'] = {'secret_key': ' ','cookie_args':{'max_age':86400}}
app = webapp2.WSGIApplication([
('/',Main)
],config=config, debug=True)
|
Python
| 0.000005
|
@@ -297,16 +297,29 @@
t mail%0A%0A
+#demo change1
%0A%0Atempla
|
db6203757d145923813c06b62ddf3739bac79991
|
Update __init__.py
|
tendrl/commons/objects/cluster/__init__.py
|
tendrl/commons/objects/cluster/__init__.py
|
from tendrl.commons import objects
class Cluster(objects.BaseObject):
def __init__(self, integration_id=None, public_network=None,
cluster_network=None, node_configuration=None,
conf_overrides=None, node_identifier=None, sync_status=None,
last_sync=None, *args, **kwargs):
super(Cluster, self).__init__(*args, **kwargs)
self.integration_id = integration_id
self.public_network = public_network
self.cluster_network = cluster_network
self.node_configuration = node_configuration
self.conf_overrides = conf_overrides
self.node_identifier = node_identifier
self.sync_status = sync_status
self.last_sync = last_sync
self.value = 'clusters/{0}'
def render(self):
self.value = self.value.format(
self.integration_id or NS.tendrl_context.integration_id
)
return super(Cluster, self).render()
|
Python
| 0.000002
|
@@ -304,16 +304,34 @@
nc=None,
+ is_managed=False,
*args,
@@ -752,16 +752,53 @@
st_sync%0A
+ self.is_managed = is_managed%0A
|
81943166d5b8c2606c1506bb1b6567fd0ce82282
|
update check_dimension and webm supports
|
main.py
|
main.py
|
import os
import logging
from glob import glob
import youtube_dl
from telegram.ext import Updater, MessageHandler, Filters
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
updater = Updater(token='TOKEN') # put here the bot's token
dispatcher = updater.dispatcher
ydl_opts = {
'restrictfilenames': True,
}
def download(bot, update):
for f in glob('*.mp4'):
os.remove(f) # remove old video(s)
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([update.message.text])
for f in glob('*.mp4'): # TODO this way for find the file(s) IMHO is not elegant
bot.send_document(chat_id=update.message.chat_id, document=open(f, 'rb'))
except Exception as e:
bot.sendMessage(chat_id=update.message.chat_id, text='Error')
logger.info(e)
download_handler = MessageHandler(Filters.text, download)
dispatcher.add_handler(download_handler)
updater.start_polling()
updater.idle()
|
Python
| 0
|
@@ -116,16 +116,54 @@
Filters
+%0Afrom vid_utils import check_dimension
%0A%0Aloggin
@@ -242,14 +242,9 @@
)s',
-%0A%09%09%09%09%09
+
leve
@@ -485,19 +485,109 @@
b('*.mp4
-'):
+*') + glob('*.webm*'): # with glob it isn't possible to check multiple extension in one regex
%0A
@@ -772,68 +772,221 @@
.mp4
-'): # TODO this way for find the file(s) IMHO is not elegant
+*') + glob('*.webm*'): # if the video is bigger than 50MB split it%0A check_dimension(f)%0A break # check first file%0A %0A for f in glob('*.mp4*') + glob('*.webm*'): # send document(s)
%0A
@@ -1068,16 +1068,21 @@
'rb'))%0A
+ %0A
exce
@@ -1167,17 +1167,31 @@
t='Error
-'
+: %7B%7D'.format(e)
)%0A
|
255d6d990a3de88a03b591c574aa82373287e490
|
make initial connection handle connection error and reconnects with original params
|
tcelery/connection.py
|
tcelery/connection.py
|
from __future__ import absolute_import
try:
from urlparse import urlparse
except ImportError: # py3k
from urllib.parse import urlparse
from functools import partial
from itertools import cycle
from datetime import timedelta
import pika
import logging
from pika.adapters.tornado_connection import TornadoConnection
from pika.exceptions import AMQPConnectionError
from tornado import ioloop
class Connection(object):
content_type = 'application/x-python-serialize'
def __init__(self, io_loop=None):
self.channel = None
self.connection = None
self.url = None
self.io_loop = io_loop or ioloop.IOLoop.instance()
def connect(self, url=None, options=None, callback=None):
if url is not None:
self.url = url
purl = urlparse(self.url)
credentials = pika.PlainCredentials(purl.username, purl.password)
virtual_host = purl.path[1:]
host = purl.hostname
port = purl.port
options = options or {}
options = dict([(k.lstrip('DEFAULT_').lower(), v) for k, v in options.items()])
options.update(host=host, port=port, virtual_host=virtual_host,
credentials=credentials)
params = pika.ConnectionParameters(**options)
try:
TornadoConnection(
params, stop_ioloop_on_close=False,
on_open_callback=partial(self.on_connect, callback),
custom_ioloop=self.io_loop)
except AMQPConnectionError:
logging.info('Retrying to connect in 2 seconds')
self.io_loop.add_timeout(
timedelta(seconds=2),
partial(self.connect, url=url,
options=options, callback=callback))
def on_connect(self, callback, connection):
self.connection = connection
self.connection.add_on_close_callback(self.on_closed)
self.connection.channel(partial(self.on_channel_open, callback))
def on_channel_open(self, callback, channel):
self.channel = channel
if callback:
callback()
def on_exchange_declare(self, frame):
pass
def on_basic_cancel(self, frame):
self.connection.close()
def on_closed(self, connection, reply_code, reply_text):
"""This method is invoked by pika when the connection to RabbitMQ is
closed unexpectedly. Since it is unexpected, we will reconnect to
RabbitMQ if it disconnects.
:param pika.connection.Connection connection: The closed connection obj
:param int reply_code: The server provided reply_code if given
:param str reply_text: The server provided reply_text if given
"""
self._channel = None
logging.warning('Connection closed, reopening in 5 seconds: (%s) %s',
reply_code, reply_text)
connection.add_timeout(5, self.connect)
def publish(self, body, exchange=None, routing_key=None,
mandatory=False, immediate=False, content_type=None,
content_encoding=None, serializer=None,
headers=None, compression=None, retry=False,
retry_policy=None, declare=[], **properties):
assert self.channel
content_type = content_type or self.content_type
properties = pika.BasicProperties(content_type=content_type)
self.channel.basic_publish(exchange=exchange, routing_key=routing_key,
body=body, properties=properties,
mandatory=mandatory, immediate=immediate)
def consume(self, queue, callback, x_expires=None, persistent=True):
assert self.channel
self.channel.queue_declare(self.on_queue_declared, queue=queue,
exclusive=False, auto_delete=True,
nowait=True, durable=persistent,
arguments={'x-expires': x_expires})
self.channel.basic_consume(callback, queue, no_ack=True)
def on_queue_declared(self, *args, **kwargs):
pass
class ConnectionPool(object):
def __init__(self, limit, io_loop=None):
self._limit = limit
self._connections = []
self._connection = None
self.io_loop = io_loop
def connect(self, broker_url, options=None, callback=None):
self._on_ready = callback
for _ in range(self._limit):
conn = Connection(io_loop=self.io_loop)
conn.connect(broker_url, options=options,
callback=partial(self._on_connect, conn))
def _on_connect(self, connection):
self._connections.append(connection)
if len(self._connections) == self._limit:
self._connection = cycle(self._connections)
if self._on_ready:
self._on_ready()
def connection(self):
assert self._connection is not None
return next(self._connection)
|
Python
| 0
|
@@ -775,16 +775,83 @@
l = url%0A
+ if options is not None:%0A self.options = options%0A
@@ -1042,32 +1042,37 @@
l.port%0A%0A
+self.
options = option
@@ -1061,24 +1061,29 @@
f.options =
+self.
options or %7B
@@ -1088,24 +1088,29 @@
%7B%7D%0A
+self.
options = di
@@ -1159,16 +1159,21 @@
k, v in
+self.
options.
@@ -1186,24 +1186,29 @@
)%5D)%0A
+self.
options.upda
@@ -1274,32 +1274,37 @@
+
+
credentials=cred
@@ -1358,16 +1358,21 @@
eters(**
+self.
options)
@@ -1537,16 +1537,94 @@
lback),%0A
+ on_close_callback=partial(self.on_closed, callback=callback),%0A
@@ -2030,70 +2030,8 @@
ion%0A
- self.connection.add_on_close_callback(self.on_closed)%0A
@@ -2411,16 +2411,31 @@
ply_text
+, callback=None
):%0A
@@ -3041,16 +3041,24 @@
eout(5,
+partial(
self.con
@@ -3061,16 +3061,36 @@
.connect
+, callback=callback)
)%0A%0A d
|
d25f860c56e4e51203574ee8da4297c7aaa6195a
|
Bump version to 0.1.3
|
td_biblio/__init__.py
|
td_biblio/__init__.py
|
"""TailorDev Biblio
Scientific bibliography management with Django.
"""
__version__ = '0.1.2'
|
Python
| 0.000001
|
@@ -90,7 +90,7 @@
0.1.
-2
+3
'%0A
|
8d63ed96de0aa29cf709d28b1ad57385fcca3de2
|
Fix for depends on (#44)
|
tdi_python/tdiInfo.py
|
tdi_python/tdiInfo.py
|
#
# Copyright(c) 2021 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from ctypes import *
import pdb
class TdiInfo:
"""
This class contains abstractions on the TDI Runtime C API for all
tables described by a TDI Runtime Info object. This includes infos
for both p4 programs and for pd_fixed APIs exposed through
TDI Runtime.
"""
def __init__(self, cintf, name):
self._cintf = cintf
self.name = name
self.tbl_id_map = {}
self.tbl_dep_map = {}
self.lrn_id_map = {}
self.nested_tables = []
sts = self._init_handle()
if not sts == 0:
print("TdiInfo init hanle failed for {}!".format(self.name))
return -1
sts = self._init_tables()
if not sts == 0:
print("TdiInfo init tables failed for {}!".format(self.name))
return -1
def _init_handle(self):
# get tdi_info handler
self._handle = self._cintf.handle_type()
sts = self._cintf.get_driver().tdi_info_get(self._cintf.get_dev_id(), self.name, byref(self._handle))
if not sts == 0:
print("CLI Error: get info failed for {}".format(self.name))
return sts
def _init_tables(self):
num_tables = c_int(-1)
sts = self._cintf.get_driver().tdi_num_tables_get(self._handle, byref(num_tables))
if not sts == 0:
print("CLI Error: get num tables for {} failed with status {}.".format(self.name, self._cintf.err_str(sts)))
return sts
array_type = self._cintf.handle_type * num_tables.value
tables = array_type()
sts = self._cintf.get_driver().tdi_tables_get(self._handle, tables)
if not sts == 0:
print("CLI Error: get table handles for {} failed with status {}.".format(self.name, self._cintf.err_str(sts)))
return sts
# Python Tables Object Initialzation
# print("{:40s} | {:30s} | {:10s}".format("TableName","Table Type","Status"))
self.tables = {}
for table in tables:
table_info = self._cintf.handle_type()
self._cintf.get_driver().tdi_table_info_get(table, byref(table_info));
tbl_obj = self._cintf.TdiTable(self._cintf, table, self, table_info)
if tbl_obj == -1:
print("CLI Error: bad table object init")
return -1
elif tbl_obj.table_type_map(tbl_obj.get_type()) == "INVLD":
print("CLI Error: bad table type init")
return -1
else:
tbl_id = c_uint(0)
sts = self._cintf.get_driver().tdi_table_id_from_handle_get(table_info, byref(tbl_id))
tbl_obj.set_id(tbl_id.value)
has_const_default_action = c_bool(False)
sts = self._cintf.get_driver().tdi_table_has_const_default_action(table_info,
byref(has_const_default_action))
tbl_obj.set_has_const_default_action(has_const_default_action)
self.tables[tbl_obj.name] = tbl_obj
self.tbl_id_map[tbl_id.value] = tbl_obj
# Tables Dependencies Initialzation
for tbl_id, tbl_obj in self.tbl_id_map.items():
table_hdl = self._cintf.handle_type()
self._cintf.get_driver().tdi_table_from_id_get(self._handle, tbl_id, byref(table_hdl))
table_info = self._cintf.handle_type()
self._cintf.get_driver().tdi_table_info_get(table_hdl, byref(table_info))
num_deps = c_int()
self._cintf.get_driver().tdi_num_tables_this_table_depends_on_get(table_info, byref(num_deps))
if num_deps.value == 0:
continue
array_type = c_uint * num_deps.value
deps = array_type()
self._cintf.get_driver().tdi_tables_this_table_depends_on_get(table_info_hdl, tbl_id, deps)
self.tbl_dep_map[tbl_id] = deps
# Nested tables are to be included in the parent table from depends_on field
if tbl_obj.table_type in self.nested_tables:
prefix = self.tbl_id_map[deps[0]].name
# New name is used to create node tree
new_name = prefix + tbl_obj.name[tbl_obj.name.rfind('.'):]
self.tables[new_name] = tbl_obj
del self.tables[tbl_obj.name]
tbl_obj.name = new_name
return 0
def _init_learns(self):
num_learns = c_int(-1)
sts = self._cintf.get_driver().tdi_num_learns_get(self._handle, byref(num_learns))
if not sts == 0:
print("CLI Error: get num learns for {} failed with status {}.".format(self.name, self._cintf.err_str(sts)))
return sts
array_type = self._cintf.handle_type * num_learns.value
learns = array_type()
sts = self._cintf.get_driver().tdi_learns_get(self._handle, learns)
if not sts == 0:
print("CLI Error: get learn handles for {} failed with status {}.".format(self.name, self._cintf.err_str(sts)))
return sts
self.learns = {}
for learn in learns:
lrn_obj = self._cintf.TdiLearn(self._cintf, learn, self)
if lrn_obj == -1:
return -1
lrn_id = c_uint(0)
sts = self._cintf.get_driver().tdi_learn_id_get(learn, byref(lrn_id))
lrn_obj.set_id(lrn_id.value)
self.learns[lrn_obj.name] = lrn_obj
self.lrn_id_map[lrn_id.value] = lrn_obj
return 0
|
Python
| 0
|
@@ -4437,20 +4437,8 @@
info
-_hdl, tbl_id
, de
|
c749e5e4c47a9a63dc0e44bbc8df3b103dc1db7c
|
update to screen manager
|
main.py
|
main.py
|
'''
# Author: Aaron Gruneklee, Michael Asquith
# Created: 2014.12.08
# Last Modified: 2014.12.19
this is the main controler class it is responsible for displaying the 3 views and
controls the 5 input buttons.
'''
from kivy import require
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.properties import ListProperty, ObjectProperty
# from codeblock import CodeBlock
from functionlist import FunctionList
from kivy.uix.screenmanager import ScreenManager, Screen
from programmerview import ProgrammerView
from robotcontroller import RobotController
#Kivy version check
require('1.8.0')
__version__ = '0.1'
class FunctionalProgrammerWidget(BoxLayout):
# set all the properties of the Controller
text_colour = ListProperty([1, 0, 0, 1])
# current_view = ObjectProperty(None)
# programmer_view = ObjectProperty(None)
# run_View = ObjectProperty(None)
# debug_View = ObjectProperty(None)
save_Buttton = ObjectProperty(None)
load_Buttton = ObjectProperty(None)
run_Buttton = ObjectProperty(None)
debug_Buttton = ObjectProperty(None)
exit_Buttton = ObjectProperty(None)
def __init__(self, **kwargs):
super(FunctionalProgrammerWidget, self).__init__(**kwargs)
pass
''' saves user program to user defined location '''
def save_Button(self):
self.ids.save_Button.text = 'not yet implemented'
''' loads a user program from a user defined location '''
def load_Button(self):
self.ids.load_Button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze '''
def run_Button(self):
program = open('user_file', 'r').read()
if 'x' not in program:
self.ids.run_Button.text = program
run_robot = RobotController()
run_robot.executeProgram('user_file')
else:
self.ids.run_Button.text = 'variables not defined'
def reset_Button(self):
self.ids.reset_button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze alongside
the user program as it steps through the code'''
def debug_Button(self):
# current_view = debug_View
self.ids.debug_Button.text = 'not yet implemented'
class FPWScreenManager(ScreenManager):
pass
class FunctionalProgrammerApp(App):
def build(self):
return FunctionalProgrammerWidget()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
FunctionalProgrammerApp().run()
|
Python
| 0
|
@@ -1331,20 +1331,275 @@
-pass
+self.current_view = ScreenManager()%0A self.pv = ProgrammerView(name='pv')%0A self.current_view.add_widget(pv)%0A self.rv = RunScreen(name='rv')%0A self.current_view.add_widget(rv)%0A self.current_view.current = 'pv'%0A%0A self.add
%0A
@@ -2185,32 +2185,116 @@
%0A
+ self.current_view.current = 'rv'%0A self.rv.run_code%0A #
run_robot = Rob
@@ -2323,16 +2323,18 @@
+ #
run_rob
|
f06f81251d7c8d1a12e88d54c1856756979edb7d
|
Fix tests for Django 1.5
|
django_socketio/example_project/settings.py
|
django_socketio/example_project/settings.py
|
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
full_path = lambda *parts: os.path.join(PROJECT_ROOT, *parts)
example_path = full_path("..", "..")
if example_path not in sys.path:
sys.path.append(example_path)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
SECRET_KEY = 'i_!&$f5@^%y*i_qa$*o&0$3q*1dcv^@_-l2po8-%_$_gwo+i-l'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_socketio',
'chat',
)
|
Python
| 0.00001
|
@@ -1071,16 +1071,17 @@
lates%22)%0A
+#
LOGIN_UR
|
b20614673a122bbc6940f9103275dcf84e3f298f
|
check if article exists in wallabag before adding
|
main.py
|
main.py
|
import asyncio
import logging
import sys
from time import mktime
from urllib.parse import urljoin
import aiohttp
import feedparser
import yaml
from raven import Client
from wallabag_api.wallabag import Wallabag
import github_stars
logger = logging.getLogger()
logger.handlers = []
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
logger.setLevel(logging.DEBUG)
with open("config.yaml", 'r') as stream:
try:
config = yaml.load(stream)
except (yaml.YAMLError, FileNotFoundError) as exception:
config = None
exit(1)
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(logging.WARNING if "debug" not in config or not config["debug"] else logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
fh = logging.FileHandler('debug.log')
fh.setFormatter(formatter)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with open("sites.yaml", 'r') as stream:
try:
sites = yaml.load(stream)
except (yaml.YAMLError, FileNotFoundError) as exception:
logger.error(exception)
sites = None
exit(1)
if "sentry_url" in config and ("debug" not in config or not config["debug"]):
client = Client(
dsn=config["sentry_url"],
processors=(
'raven.processors.SanitizePasswordsProcessor',
)
)
async def fetch(session, url):
try:
async with session.get(url) as response:
return await response.text()
except Exception as e:
logging.exception("failed to fetch {url}".format(url=url))
if 'client' in locals():
client.captureException(data={url:url})
async def main(loop, sites):
token = await Wallabag.get_token(**config["wallabag"])
async with aiohttp.ClientSession(loop=loop) as session:
wall = Wallabag(host=config["wallabag"]["host"], client_secret=config["wallabag"]["client_secret"],
client_id=config["wallabag"]["client_id"], token=token, aio_sess=session)
sites = github_stars.get_starred_repos(config["github_username"], sites)
await asyncio.gather(*[handle_feed(session, wall, sitetitle, site) for sitetitle, site in sites.items()])
async def handle_feed(session, wall, sitetitle, site):
logger.info("Downloading feed: " + sitetitle)
rss = await fetch(session, site["url"])
logger.info("Parsing feed: " + sitetitle)
f = feedparser.parse(rss)
logger.debug("finished parsing: " + sitetitle)
# feedtitle = f["feed"]["title"]
if "latest_article" in site:
for article in f.entries:
if article.title == site["latest_article"]:
logger.debug("already added: " + article.title)
break
logger.info("article found: " + article.title)
taglist = [sitetitle]
if site["tags"]:
taglist.extend(site["tags"])
tags = ",".join(taglist)
if "published_parsed" in article:
published = mktime(article.published_parsed)
elif "updated_parsed" in article:
published = mktime(article.updated_parsed)
else:
published = None
logger.info("add to wallabag: " + article.title)
if "github" in site and site["github"]:
title = sitetitle + ": " + article.title
else:
title = article.title
if "debug" not in config or not config["debug"]:
await wall.post_entries(url=urljoin(site["url"], article.link), title=title, tags=tags)
else:
logger.debug("no latest_article: " + sitetitle)
if f.entries:
sites[sitetitle]["latest_article"] = f.entries[0].title
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main(loop, sites))
with open("sites.yaml", 'w') as stream:
yaml.dump(sites, stream, default_flow_style=False)
|
Python
| 0
|
@@ -1624,16 +1624,17 @@
ta=%7Burl:
+
url%7D)%0A%0A%0A
@@ -3394,32 +3394,275 @@
= article.title%0A
+ url = urljoin(site%5B%22url%22%5D, article.link)%0A exists = await wall.entries_exists(url)%0A print(exists,url)%0A if exists%5B%22exists%22%5D:%0A logger.info(%22already found in wallabag: %22 + article.title)%0A
if %22
@@ -3757,39 +3757,8 @@
=url
-join(site%5B%22url%22%5D, article.link)
, ti
|
3fbbba8dae5c97cedf414eea8a39482c01a269e6
|
Add `debug=True` to avoid restarting the server after each change
|
main.py
|
main.py
|
import io
import json
import logging
import os
import pdb
import traceback
from logging import config
from functools import wraps
from flask import (
Flask,
render_template,
request,
send_file,
send_from_directory,
)
app = Flask(__name__)
config.fileConfig('logger.conf')
logger = logging.getLogger('video_annotation')
@app.route('/', methods=['GET'])
def home():
return render_template('login.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('www/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('www/css', path)
def _error_as_json(ex, status=500, trace=True):
logger.error(" -- Got exception in the tagger backend!")
logger.error(" -- %r" % ex)
if trace:
logger.error(traceback.format_exc())
return json.dumps({'error': "{}".format(ex)}), status
if __name__ == '__main__':
app.run('0.0.0.0', port=5152)
|
Python
| 0.000003
|
@@ -946,10 +946,22 @@
ort=5152
+, debug=True
)%0A
|
4bd4281f85649468c7e8b6f1c3abaad5ec2a8c1e
|
Set first column to display in outputting functions
|
main.py
|
main.py
|
import Library
import IOHelper
import argparse
import sys
from tabulate import tabulate
wordgensettings = {}
formrules = {}
def add():
'''Interface for addWord().'''
meaning = input("Enter meaning in English: ")
word = input("Enter word in conlang: ")
form = input("Enter part of speech (verb/noun/other): ")
if Library.addWord(meaning, word, form) == 0:
print("Word added")
else:
print("An error occured")
def list():
'''Interface for listWords().'''
t = IOHelper.chooseOption("Enter list type", ["all", "form"])
if t == "form":
pos = ["verb", "noun", "other"]
f = IOHelper.chooseOption("Enter desired part of speech", pos)
else:
f = None
l = Library.listWords(t, f)
print(tabulate(l, headers=["English", "Conlang", "Form"]))
def quit():
sys.exit(0)
def decline():
''' Allows user to select word to decline and declension, then outputs the
declined word.
'''
word = input("Enter word (in conlang) to decline: ")
try:
result = Library.findConWord(word)
except LookupError:
print("Word not found in database")
return 1
prompt = "Select declension"
dec = IOHelper.createMenu(prompt, Library.getAvailableDeclensions())
output = Library.declineWord(result, dec)
outputWord(output)
def outputWord(word, first="english"):
table = [[], [], []]
headers = []
phonetic = Library.transcribePhonemes(word["word"])
allophonetic = Library.transcribeAllophones(phonetic)
if first == "english":
table[0].append(word["english"])
table[1].append("")
table[2].append("")
headers.append("English")
table[0].append(word["word"])
table[1].append(phonetic)
table[2].append(allophonetic)
headers.append("Conlang")
elif first == "conlang":
table[0].append(word["word"])
table[1].append(phonetic)
table[2].append(allophonetic)
headers.append("Conlang")
table[0].append(word["english"])
table[1].append("")
table[2].append("")
headers.append("English")
for item in word:
if item != "word" and item != "english" and item != "id":
table[0].append(word[item])
table[1].append("")
table[2].append("")
headers.append(item.capitalize())
print(tabulate(table, headers=headers))
def statistics():
'''Interface for getStatistics().'''
print("Words: " + str(Library.getStatistics()))
def search():
'''Interface for searchWords().'''
term = input("Enter search term: ")
results = Library.searchWords(term)
if len(results[0]) == 0 and len(results[1]) == 0:
print("Word not found")
else:
for word in results[0]:
outputWord(word)
print("")
for word in results[1]:
outputWord(word)
print("")
def quickgenerate():
print("Generating using Quickgen...")
def generate():
'''Outputs word according to output type: english (English first),
onlyconlang (No English column), or conlang first.
'''
form = IOHelper.chooseOption("Enter word type", ["noun", "verb", "other"])
english = input("Enter word in English: ")
if Library.wordExists(english):
print("Word already exists!")
w = Library.findEnglishWord(english)
outputWord(w)
return 1
categories = Library.getCategories()
accepted = False
while accepted is not True:
word = Library.generateWord(english, form, categories, wordgensettings,
formrules)
while Library.wordExists(word['word']):
word = Library.generateWord(english, form, categories,
wordgensettings, formrules)
outputWord(word)
accepted = IOHelper.yesNo("Accept word")
Library.addWord(word['english'], word['word'], word['form'])
print("Wod saved in database!")
def loadData(filename):
'''Loads data from config file and passes it to Library.'''
result = IOHelper.parseConfig(filename)
phonemes = result[0]
allophones = result[1]
declensions = result[2]
wordgencats = result[3]
global wordgensettings
wordgensettings = result[4]
global formrules
formrules = result[5]
Library.setPhonemes(phonemes)
Library.setAllophones(allophones)
Library.setCategories(wordgencats)
Library.setDeclensions(declensions)
def export():
'''Interface for exportWords().'''
filename = input("Enter filename to export: ")
Library.exportWords(filename)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-g", "--generator", choices=["quickgen", "builtin"],
help="select generator to use")
args = parser.parse_args()
commands = {"add": add,
"list": list,
"decline": decline,
"statistics": statistics,
"search": search,
"generate": generate,
"export": export,
"quit": quit}
if args.generator == "quickgen":
commands["generate"] = quickgenerate
commandList = ""
for key in commands.keys():
commandList = commandList + key + ", "
commandList = commandList[:-2] + "."
print("Available commands: " + commandList)
loadData("config.txt")
command = input("Please enter a command: ")
while command != "quit":
commands[command]()
command = input("Please enter a command: ")
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -1343,16 +1343,27 @@
d(output
+, %22conlang%22
)%0A%0A%0Adef
@@ -2844,32 +2844,43 @@
outputWord(word
+, %22english%22
)%0A pr
@@ -2939,32 +2939,43 @@
outputWord(word
+, %22conlang%22
)%0A pr
@@ -3910,24 +3910,35 @@
putWord(word
+, %22conlang%22
)%0A ac
|
466eabcb57c590dce1342710c8ae331899046417
|
Simplify postwork
|
main.py
|
main.py
|
import csv
import importlib
import logging
import operator
import os
import time
import sys
from functools import reduce
from datetime import datetime
from dev.logger import logger_setup
from helpers.config import Config
from helpers.data_saver import DataSaver
from helpers.module_loader import ModuleLoader
def init_loggers():
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_DEBUG_FILE')),
['ddd_site_parse'], True)
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_GRAB_FILE')), [
'grab.document',
'grab.spider.base',
'grab.spider.task',
'grab.spider.base.verbose'
'grab.proxylist',
'grab.stat',
'grab.script.crawl'
]
)
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
return logger
def process_stats(stats):
output = ''
if not stats:
return output
_stats = sorted(stats.items(), key=operator.itemgetter(1), reverse=True)
_max = reduce(lambda a, b: a+b, stats.values())
for row in _stats:
output += 'Code: {}, count: {}% ({} / {})\n'.format(row[0], row[1]/_max * 100, row[1], _max)
return output
def fix_dirs():
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
os.makedirs(Config.get('APP_OUTPUT_DIR'))
log_dir = os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'))
if not os.path.exists(log_dir):
os.makedirs(log_dir)
def load_config():
if len(sys.argv) > 1:
Config.load(os.path.join(os.path.dirname(__file__), 'config'), sys.argv[1])
return True
return False
def main():
# load config
if not load_config():
exit(2)
# output dirs
fix_dirs()
# log
logger = init_loggers()
logger.info(' --- ')
logger.info('Start app...')
# output category for detect save mode
# need for use after parse, but read before for prevent useless parse (if will errors)
cat = Config.get('APP_OUTPUT_CAT')
# parser loader
loader = ModuleLoader('d_parser.{}'.format(Config.get('APP_PARSER')))
d_spider = loader.get('DSpider')
# load post-worker
need_post = Config.get('APP_NEED_POST', '')
if need_post == 'True':
d_post_work = loader.get('do_post_work')
else:
d_post_work = None
# main
try:
# bot parser
logger.info('{} :: Start...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = d_spider(thread_number=threads_counter, try_limit=int(Config.get('APP_TRY_LIMIT')))
bot.run()
# post work
if need_post and d_post_work:
d_post_work()
# save output
saver = DataSaver(bot.result, Config.get('APP_OUTPUT_DIR'), Config.get('APP_OUTPUT_ENC'))
# single file
if cat == '':
saver.save()
# separate categories
else:
saver.save_by_category(cat)
logger.info('End with stats: \n{}'.format(process_stats(bot.status_counter)))
except Exception as e:
logger.fatal('App core fatal error: {}'.format(e))
logger.info('{} :: End...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
if __name__ == '__main__':
main()
|
Python
| 0.000006
|
@@ -2291,194 +2291,8 @@
')%0A%0A
- # load post-worker%0A need_post = Config.get('APP_NEED_POST', '')%0A if need_post == 'True':%0A d_post_work = loader.get('do_post_work')%0A else:%0A d_post_work = None%0A%0A
@@ -2633,33 +2633,39 @@
if
-need_post and d_post_work
+Config.get('APP_NEED_POST', '')
:%0A
@@ -2674,16 +2674,20 @@
+bot.
d_post_w
|
ce9f5f4072c38f8b31f0d8c01228caede4ff5897
|
disable int export
|
main.py
|
main.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from modules.utils import export_obj
from modules.utils import load_obj
from modules.utils import random_unit_vec
from modules.utils import get_surface_edges
PROCS = 4
NMAX = int(10e6)
ITT = int(10e9)
OPT_ITT = 1
NEARL = 0.003
H = NEARL*1.2
FARL = 0.03
FLIP_LIMIT = NEARL*0.5
EXPORT_ITT = 1000
STAT_ITT = 10
SCALE = [0.009]*3
MOVE = [0.5]*3
#STP = 1.0e-6
STP = 1.0e-7
REJECT_STP = STP*1.0
TRIANGLE_STP = STP*0.1
ATTRACT_STP = STP*0.2
UNFOLD_STP = STP*0.01
COHESION_STP = STP*0.
def main(argv):
from differentialMesh3d import DifferentialMesh3d
from time import time
from modules.helpers import print_stats
from numpy import unique
from numpy import array
from numpy.random import random
from numpy.random import randint
name = argv[0]
fn_obj = './data/base.obj'
fn_out = './res/{:s}'.format(name)
DM = DifferentialMesh3d(NMAX, FARL, NEARL, FARL, PROCS)
data = load_obj(
fn_obj,
sx = SCALE,
mx = MOVE
)
info = DM.initiate_faces(data['vertices'], data['faces'])
if info['minedge']<NEARL:
return
noise = random_unit_vec(DM.get_vnum(), STP*1000.)
DM.position_noise(noise, scale_intensity=-1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
print(alive_vertices)
DM.optimize_edges(H, FLIP_LIMIT)
for he in xrange(DM.get_henum()):
DM.set_edge_intensity(he, 1.0)
for i in xrange(ITT):
try:
t1 = time()
DM.optimize_position(
REJECT_STP,
TRIANGLE_STP,
ATTRACT_STP,
UNFOLD_STP,
COHESION_STP,
OPT_ITT,
scale_intensity=1
)
if i%10 == 0:
DM.optimize_edges(H, FLIP_LIMIT)
DM.diminish_all_vertex_intensity(0.99)
if i%100 == 0:
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
print('number of alive vertices: {:d}'.format(len(alive_vertices)))
if len(alive_vertices)>0:
DM.set_vertices_intensity(array([v for v in alive_vertices]), 1.0)
DM.smooth_intensity(0.08)
if i%STAT_ITT==0:
print_stats(i, time()-t1, DM)
if i%EXPORT_ITT==0:
fn = '{:s}_{:08d}.obj'.format(fn_out, i)
export_obj(DM, 'thing_mesh', fn, write_intensity=True)
except KeyboardInterrupt:
break
if __name__ == '__main__' :
import sys
argv = sys.argv
if False:
import pstats, cProfile
fn = './profile/profile'
cProfile.run('main(argv[1:])',fn)
p = pstats.Stats(fn)
p.strip_dirs().sort_stats('cumulative').print_stats()
else:
main(argv[1:])
|
Python
| 0
|
@@ -2452,11 +2452,12 @@
ity=
-Tru
+Fals
e)%0A%0A
|
40b102b00f86bd375bbdab86bdec62f85496f601
|
Add proper logging
|
main.py
|
main.py
|
#!/usr/bin/env python
import RPi.GPIO as GPIO
import datetime
import requests
import settings
import time
import threading
class Pin(object):
URL = settings.API_URL + settings.NAME + '/'
def post(self, data):
data['api_key'] = settings.API_KEY
r = requests.post(self.URL + self.relative_url, data=data)
if settings.DEBUG:
print 'POST:', self.URL + self.relative_url
class Coffee(Pin):
pots = 0
relative_url = 'coffee'
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.day = datetime.date.today()
GPIO.setup(self.PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.RISING, callback=self.update, bouncetime=5000)
def update(self, signal):
today = datetime.date.today()
if today > self.day:
self.pots = 0
self.day = today
self.pots += 1
self.notipi.blink(2)
# Date formatted like '06. October 2014 13:13:19'
coffee_date = datetime.datetime.now().strftime('%d. %B %Y %H:%M:%S')
self.post({'pots': self.pots, 'datetime': coffee_date})
time.sleep(1)
self.notipi.blink(2)
if settings.DEBUG:
print 'New coffee pot:', coffee_date
class Light(Pin):
relative_url = 'status'
interval = 60 * 30 # 30min
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.status = None
GPIO.setup(self.PIN, GPIO.IN)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.BOTH, callback=self.update)
# Update once every hour too
self.periodic_update()
def update(self, signal=0):
time.sleep(0.2)
if GPIO.input(self.PIN) == GPIO.LOW:
status = 'true'
else:
status = 'false'
# Only update if status has changed
if self.status != status:
self.status = status
self.post({'status': status})
self.notipi.blink()
if settings.DEBUG:
print 'Light status updated:', status
def periodic_update(self):
self.update()
threading.Timer(self.interval, self.periodic_update).start()
class Led(Pin):
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
GPIO.setup(self.PIN, GPIO.OUT)
def blink(self, n=1):
for _ in range(n):
GPIO.output(self.PIN, False)
time.sleep(0.3)
GPIO.output(self.PIN, True)
time.sleep(0.3)
class Notipi(object):
def __init__(self):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.led = Led(self, settings.LED_PIN)
self.coffee = Coffee(self, settings.BUTTON_PIN)
self.light = Light(self, settings.LIGHT_PIN)
self.blink(5)
def blink(self, *args, **kwargs):
self.led.blink(*args, **kwargs)
def main():
notipi = Notipi()
# Wait forever
while True:
time.sleep(1)
if __name__ == '__main__':
main()
|
Python
| 0.000013
|
@@ -51,24 +51,39 @@
rt datetime%0A
+import logging%0A
import reque
@@ -86,16 +86,16 @@
equests%0A
-
import s
@@ -228,16 +228,142 @@
data):%0A
+ logging.debug('Ready to send a POST request for %7Burl%7D with data %7Bdata%7D'.format(url=self.relative_url, data=data))%0A
@@ -488,86 +488,91 @@
-if settings.DEBUG:%0A print 'POST:', self.URL + self.relative_url
+logging.debug('POST Request sent with response %7Bresponse%7D'.format(response=r.text))
%0A%0A%0Ac
@@ -955,16 +955,63 @@
me=5000)
+%0A logging.info('Coffee button is ready')
%0A%0A de
@@ -1470,75 +1470,85 @@
-if settings.DEBUG:%0A print 'New coffee pot:', coffee_date
+logging.info('New coffee pot at %7Bdate%7D'.format(date=datetime.datetime.now()))
%0A%0A%0Ac
@@ -1957,16 +1957,62 @@
update()
+%0A logging.info('Light sensor is ready')
%0A%0A de
@@ -2378,80 +2378,84 @@
-if settings.DEBUG:%0A print 'Light status updated:', status
+logging.debug('Light status changed to %7Bstatus%7D'.format(status=self.status))
%0A%0A
@@ -2720,16 +2720,53 @@
PIO.OUT)
+%0A logging.info('LED is ready')
%0A%0A de
@@ -2949,16 +2949,85 @@
ep(0.3)%0A
+ logging.debug('LED blinked %7Btimes%7D time(s)'.format(times=n))%0A
%0A%0Aclass
@@ -3408,24 +3408,259 @@
-notipi = Notipi(
+# Logging%0A log_level = logging.DEBUG if settings.DEBUG else logging.INFO%0A logging.basicConfig(format='%25(asctime)s %25(message)s', level=log_level)%0A%0A logging.info('Starting NotiPi')%0A notipi = Notipi()%0A logging.info('NotPi handlers started'
)%0A
|
ccceeb75740b0b39a7c43a09dbf684651803f12a
|
Remove some leftover debug statements
|
pdns_cassandra/pdns_cassandra.py
|
pdns_cassandra/pdns_cassandra.py
|
'''
Cassandra remote backend for PowerDNS
'''
__author__ = 'Ruben Kerkhof <ruben@tilaa.com>'
__license__ = 'MIT'
__version__ = '0.0.1'
import os
import cassandra.cluster
import cassandra.query
from flask import Flask, jsonify, abort, request
app = Flask(__name__)
@app.errorhandler(404)
def return_404(error):
return jsonify(result=False), 404
def get_or_404(query, *args):
result = db_session.execute(query, *args)
if not result:
abort(404)
return result
@app.route('/lookup/<qname>/<qtype>')
def lookup(qname, qtype):
''' do a basic query '''
app.logger.debug(request.headers)
rrset = []
if qtype == 'ANY':
rrset = get_or_404(
'SELECT * FROM records WHERE qname = %s', (qname,)
)
else:
rrset = get_or_404(
'SELECT * FROM records WHERE qname = %s AND qtype = %s',
(qname, qtype)
)
return jsonify(result=rrset)
@app.route('/getDomainMetadata/<name>/<kind>')
def get_domain_metadata(name, kind):
''' get metadata for a domain '''
result = []
rrset = get_or_404(
'SELECT content FROM domain_metadata WHERE name = %s and kind = %s',
(name, kind)
)
for rr in rrset:
result.append(rr['content'])
return jsonify(result=result)
@app.route('/list/<qname>')
def axfr(qname):
''' AXFR requests '''
rrset = get_or_404(
'SELECT * FROM records WHERE qname = %s', (qname,)
)
return jsonify(result=rrset)
@app.route('/getDomainInfo/<zone>')
def get_domain_info(zone):
''' get info for a domain '''
result = 1
return jsonify(result=result)
rows = get_or_404(
'SELECT * FROM domains WHERE zone = %s LIMIT 1', (zone,)
)
r = rows[0]
result = dict(
zone=r['zone'],
kind=r['kind'],
masters=r['masters'],
id=1,
serial=1,
notified_serial=1,
last_check=0,
)
@app.route('/superMasterBackend/<ip>/<domain>', methods=['POST'])
def super_master_backend(ip, domain):
''' check if we can be a slave for a domain '''
for key, value in request.form.items(multi=True):
if 'content' in key:
rows = db_session.execute(
'''
SELECT account from supermasters
WHERE ip = %s AND nameserver = %s
''',
(ip, value)
)
if not rows:
continue
#if rows[0]['account'] is None:
# remotebackend doesn't like json null
# return jsonify(result=True)
return jsonify(result={'account': rows[0]['account']})
abort(404)
@app.route('/createSlaveDomain/<ip>/<domain>', methods=['PUT'])
def create_slave_domain(ip, domain):
''' create a new slave domain '''
db_session.execute(
"""
INSERT INTO domains (zone, kind, masters)
VALUES (%s, 'SLAVE', %s)
""", (domain, [ip]))
return jsonify(result=True)
if __name__ == '__main__':
app.config['HOST'] = os.getenv('HOST', '::1')
app.config['PORT'] = os.getenv('HOST', 5000)
app.config['DEBUG'] = os.getenv('DEBUG', False)
app.config['KEYSPACE'] = os.getenv('KEYSPACE', 'powerdns')
cassandra_nodes = os.getenv('CASSANDRA_NODES')
if not cassandra_nodes:
raise SystemExit("CASSANDRA_NODES is not set")
app.config['cassandra_nodes'] = cassandra_nodes.split(',')
cluster = cassandra.cluster.Cluster(app.config['cassandra_nodes'])
db_session = cluster.connect(app.config['KEYSPACE'])
db_session.row_factory = cassandra.query.dict_factory
app.run(host=app.config['HOST'], port=app.config['PORT'])
|
Python
| 0.000004
|
@@ -581,46 +581,8 @@
''%0A%0A
- app.logger.debug(request.headers)%0A
@@ -1551,57 +1551,8 @@
'''%0A
- result = 1%0A return jsonify(result=result)%0A
@@ -1841,16 +1841,50 @@
,%0A )%0A
+ return jsonify(result=result)%0A
%0A%0A@app.r
|
a658b1268f8a2a31d3a5cb56ab0b12f8290d474c
|
Add functions to calculate cluster statistics averages over many realizations
|
percolation/analysis/clusters.py
|
percolation/analysis/clusters.py
|
import numpy as np
# % Single value % #
def cluster_densities(count, L):
return count/(L*L)
def percolating_cluster_mass(size, percolated):
idx_percolated = np.where(percolated > 0)[0]
if idx_percolated.size == 0:
return 0
return np.average(size[idx_percolated], weights=percolated[idx_percolated])
def percolating_cluster_density(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
def percolating_cluster_strength(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
# % Multiple values (list) % #
def cluster_densities_list(count, L):
data = [cluster_densities(count[i], L[i]) for i in range(L.size)]
return data
def percolating_cluster_mass_list(size, percolated):
data = [percolating_cluster_mass(size[i], percolated[i]) for i in range(len(size))]
return np.array(data)
def percolating_cluster_density_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
def percolating_cluster_strength_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
|
Python
| 0
|
@@ -1111,28 +1111,549 @@
ist(size, percolated)/(L*L)%0A
+%0A%0A# %25 Averaged values over many realizations %25 #%0Adef percolating_cluster_mass_average(size, percolated, p_percolation):%0A return percolating_cluster_mass_list(size, percolated) * p_percolation%0A%0A%0Adef percolating_cluster_density_average(size, percolated, p_percolation, L):%0A return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)%0A%0A%0Adef percolating_cluster_strength_average(size, percolated, p_percolation, L):%0A return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)%0A%0A
|
eb446496cf625dc99fea2f15cf04d29bcde57453
|
Add an explicit gc call in the combiner script (#5658)
|
hail/python/scripts/run_combiner.py
|
hail/python/scripts/run_combiner.py
|
"""A high level script for running the hail gVCF combiner/joint caller"""
import argparse
import time
import sys
import uuid
import hail as hl
from hail.experimental import vcf_combiner as comb
MAX_COMBINER_LENGTH = 100
DEFAULT_REF = 'GRCh38'
def chunks(seq, size):
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
def run_combiner(sample_list, json, out_path, tmp_path, summary_path=None, overwrite=False):
# make the temp path a directory, no matter what
tmp_path += f'/combiner-temporary/{uuid.uuid4()}/'
vcfs = [comb.transform_one(vcf)
for vcf in hl.import_vcfs(sample_list, json, array_elements_required=False)]
combined = [comb.combine_gvcfs(mts) for mts in chunks(vcfs, MAX_COMBINER_LENGTH)]
if len(combined) == 1:
combined[0].write(out_path, overwrite=overwrite)
else:
hl.utils.java.info(f'Writing combiner temporary files to: {tmp_path}')
i = 0
while len(combined) > 1:
pad = len(str(len(combined)))
hl.experimental.write_matrix_tables(combined, tmp_path + f'{i}/', overwrite=True)
paths = [tmp_path + f'{i}/' + str(n).zfill(pad) + '.mt' for n in range(len(combined))]
i += 1
wmts = [hl.read_matrix_table(path) for path in paths]
combined = [comb.combine_gvcfs(mts) for mts in chunks(wmts, MAX_COMBINER_LENGTH)]
combined[0].write(out_path, overwrite=overwrite)
if summary_path is not None:
mt = hl.read_matrix_table(out_path)
comb.summarize(mt).rows().write(summary_path, overwrite=overwrite)
def build_sample_list(sample_map_file, sample_list_file=None):
if sample_list_file is None:
with open(sample_map_file) as smap:
return [l.strip().split('\t')[1] for l in smap]
# else
with open(sample_map_file) as smap:
sample_map = dict()
for l in smap:
k, v = l.strip().split('\t')
sample_map[k] = v
with open(sample_list_file) as slist:
sample_set = {l.strip() for l in slist}
sample_lst = list(sample_set)
sample_lst.sort()
samples = []
missing = []
for sample in sample_lst:
try:
samples.append(sample_map[sample])
except KeyError:
missing.append(sample)
print(f'No gVCF path for samples {", ".join(missing)}', file=sys.stderr)
return samples
def main():
parser = argparse.ArgumentParser(description="Driver for hail's gVCF combiner")
parser.add_argument('--sample-map', help='path to the sample map (must be filesystem local)',
required=True)
parser.add_argument('--sample-file', help='path to a file containing a line separated list'
'of samples to combine (must be filesystem local)')
parser.add_argument('--tmp-path', help='path to folder for temp output (can be a cloud bucket)',
default='/tmp')
parser.add_argument('--out-file', '-o', help='path to final combiner output', required=True)
parser.add_argument('--summarize', help='if defined, run summarize, placing the rows table '
'of the output at the argument value')
parser.add_argument('--json', help='json to use for the import of the gVCFs'
'(must be filesystem local)', required=True)
args = parser.parse_args()
samples = build_sample_list(args.sample_map, args.sample_file)
with open(args.json) as j:
json = j.read()
hl.init(default_reference=DEFAULT_REF,
log='/hail-joint-caller-' + time.strftime('%Y%m%d-%H%M') + '.log')
run_combiner(samples, json, args.out_file, args.tmp_path, summary_path=args.summarize,
overwrite=True)
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -422,24 +422,38 @@
ite=False):%0A
+ import gc%0A
# make t
@@ -1389,24 +1389,93 @@
ER_LENGTH)%5D%0A
+ gc.collect() # need to try to free memory on the master%0A
comb
|
544b8efd33cc99f37f108fb87a603c6c9d8901c2
|
Rename variable "map_header_library" into "header_to_library_map"
|
header_toolkit_dependency_walker.py
|
header_toolkit_dependency_walker.py
|
#!/usr/bin/python
def extract_headers_directly_included(cpp_source_file, header_pattern):
"""Given a CPP source file, list all directly included headers matching the given pattern."""
import re, fnmatch
header_pattern_as_regex = fnmatch.translate(header_pattern).replace("\Z(?ms)", "")
matches = []
lines = open(cpp_source_file, "r")
for line in lines:
match = re.match("\#include\s+[\"<](" + header_pattern_as_regex + ")[\">]", line)
if match:
matches.append(match.group(1))
return matches;
def recursively_list_file_within_directory(directory, pattern):
"""The returned list of file including the relative path to the provided directory."""
import fnmatch
import os
matches = []
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(root, filename))
return matches
def generate_map_of_header_to_subdirectory(headers_with_relative_path):
import os
header_to_subdirectory = {}
for header in headers_with_relative_path:
subdirectory = os.path.basename(os.path.dirname(header))
header_to_subdirectory[os.path.basename(header)] = subdirectory
return header_to_subdirectory
if __name__ == '__main__':
from optparse import OptionParser
parser = OptionParser()
parser.add_option("--toolkit-source-directory", dest="toolkit_source_directory",
help="specify the directory containing the source of the toolkit to consider")
parser.add_option("--project-source-directory", default='.', dest="project_source_directory",
help="specify the directory containing the source of the project to process")
parser.add_option("--toolkit-pattern",
dest="toolkit_pattern", default='*.h',
help="Pattern used to match toolkit header file")
parser.add_option("--project-patterns", default='*.h *.cxx *.cpp',
dest="project_patterns",
help="patterns used to match source files. Example: \"*.h *.cxx *.cpp\"")
parser.add_option("--verbose",
dest="verbose", action="store_true",
help="Print verbose information")
parser.add_option("--extra-verbose",
dest="extra_verbose", action="store_true",
help="Print extra verbose information")
(options, args) = parser.parse_args()
requiredArgumentErrorMessage = "argument '%s' is required !";
if not options.toolkit_source_directory:
parser.error(requiredArgumentErrorMessage % '--toolkit-source-directory');
if options.extra_verbose:
options.verbose = True
import os.path
options.toolkit_source_directory = os.path.expanduser(options.toolkit_source_directory);
options.project_source_directory = os.path.expanduser(options.project_source_directory);
toolkit_headers = recursively_list_file_within_directory(options.toolkit_source_directory, options.toolkit_pattern)
if options.verbose:
print "%d headers found in [%s] using [%s]" % (len(toolkit_headers), options.toolkit_source_directory, options.toolkit_pattern)
map_header_library = generate_map_of_header_to_subdirectory(toolkit_headers)
if options.verbose:
print "%d entries added to 'Header -> Library' map " % (len(map_header_library.keys()))
expected_libraries = [];
all_project_files = [];
project_patterns = options.project_patterns.split(" ");
for project_pattern in project_patterns:
project_files = recursively_list_file_within_directory(options.project_source_directory, project_pattern)
all_project_files.extend(project_files);
if options.verbose:
print "Found %s files walking [%s] using [%s] pattern" % (len(project_files), options.project_source_directory, project_pattern)
for filepath in all_project_files:
project_headers = extract_headers_directly_included(filepath, options.toolkit_pattern)
#print "Found %s header matching [%s] in file [%s]" % (len(project_headers), options.toolkit_pattern, filepath)
for header in project_headers:
if header in map_header_library:
if options.extra_verbose:
print "[%s] found in [%s]" % (header, map_header_library[header])
expected_libraries.append(map_header_library[header])
expected_libraries = sorted(list(set(expected_libraries)))
for lib in expected_libraries:
print lib
|
Python
| 0.000261
|
@@ -3134,34 +3134,37 @@
n)%0A %0A
-map_
header_
+to_
library
+_map
= gener
@@ -3300,34 +3300,37 @@
%25 (len(
-map_
header_
+to_
library
+_map
.keys())
@@ -4106,34 +4106,37 @@
ader in
-map_
header_
+to_
library
+_map
:%0A
@@ -4211,34 +4211,37 @@
header,
-map_
header_
+to_
library
+_map
%5Bheader%5D
@@ -4280,26 +4280,29 @@
end(
-map_
header_
+to_
library
+_map
%5Bhea
|
c1855da6ebcccf44c24dca7a25949f33c84fe668
|
Add the physcial_device info to the UI
|
server/arsenalweb/views/nodes.py
|
server/arsenalweb/views/nodes.py
|
'''Arsenal nodes UI.'''
# Copyright 2015 CityGrid Media, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from pyramid.view import view_config
from arsenalweb.views import (
_api_get,
get_authenticated_user,
get_nav_urls,
get_pag_params,
site_layout,
)
LOG = logging.getLogger(__name__)
@view_config(route_name='node', permission='view', renderer='arsenalweb:templates/node.pt')
def view_node(request):
'''Handle requests for node UI route.'''
page_title = 'Node'
auth_user = get_authenticated_user(request)
uri = '/api/nodes/{0}'.format(request.matchdict['id'])
resp = _api_get(request, uri)
node = resp['results'][0]
# We need all the info about network_interfaces for display in the UI.
net_ifs = []
for net_if in node['network_interfaces']:
LOG.info('Getting network interface: {0}'.format(net_if))
uri = '/api/network_interfaces/{0}'.format(net_if['id'])
resp = _api_get(request, uri)
net_ifs.append(resp['results'][0])
node['network_interfaces'] = sorted(net_ifs, key=lambda k: k['name'])
LOG.debug('network interfaces: {0}'.format(node['network_interfaces']))
return {
'au': auth_user,
'node': node,
'page_title': page_title,
}
@view_config(route_name='nodes', permission='view', renderer='arsenalweb:templates/nodes.pt')
def view_nodes(request):
'''Handle requests for nodes UI route.'''
page_title_type = 'objects/'
page_title_name = 'nodes'
auth_user = get_authenticated_user(request)
(perpage, offset) = get_pag_params(request)
payload = {}
for k in request.GET:
payload[k] = request.GET[k]
# Force the UI to 50 results per page
if not perpage:
perpage = 50
payload['perpage'] = perpage
uri = '/api/nodes'
LOG.info('UI requesting data from API={0},payload={1}'.format(uri, payload))
resp = _api_get(request, uri, payload)
total = 0
nodes = []
if resp:
total = resp['meta']['total']
nodes = resp['results']
nav_urls = get_nav_urls(request.path, offset, perpage, total, payload)
# Used by the columns menu to determine what to show/hide.
column_selectors = [
{'name': 'created', 'pretty_name': 'Date Created'},
{'name': 'hardware_profile', 'pretty_name': 'Hardware Profile'},
{'name': 'last_registered', 'pretty_name': 'Last Registered'},
{'name': 'node_groups', 'pretty_name': 'Node Groups'},
{'name': 'serial_number', 'pretty_name': 'Serial Number'},
{'name': 'node_id', 'pretty_name': 'Node ID'},
{'name': 'node_name', 'pretty_name': 'Node Name'},
{'name': 'operating_system', 'pretty_name': 'Operating System'},
{'name': 'status', 'pretty_name': 'Status'},
{'name': 'unique_id', 'pretty_name': 'Unique ID'},
{'name': 'updated', 'pretty_name': 'Date Updated'},
{'name': 'updated_by', 'pretty_name': 'Updated By'},
]
return {
'au': auth_user,
'column_selectors': column_selectors,
'layout': site_layout('max'),
'nav_urls': nav_urls,
'nodes': nodes,
'offset': offset,
'page_title_name': page_title_name,
'page_title_type': page_title_type,
'perpage': perpage,
'total': total,
}
|
Python
| 0
|
@@ -1344,20 +1344,21 @@
LOG.
-info
+debug
('Gettin
@@ -1620,16 +1620,16 @@
ame'%5D)%0A%0A
-
LOG.
@@ -1693,24 +1693,403 @@
rfaces'%5D))%0A%0A
+ # We need all the info about the physcial_device for display in the UI.%0A if node%5B'physical_device'%5D:%0A LOG.debug('Getting physical_device: %7B0%7D'.format(node%5B'physical_device'%5D%5B'serial_number'%5D))%0A uri = '/api/physical_devices/%7B0%7D'.format(node%5B'physical_device'%5D%5B'id'%5D)%0A resp = _api_get(request, uri)%0A node%5B'physical_device'%5D = resp%5B'results'%5D%5B0%5D%0A%0A
return %7B
|
74ab963a424592299005f41a4d557ce9c7c2be4c
|
make board string representation GTP conform
|
Board.py
|
Board.py
|
"""Class to purely handle everything that concerns the board"""
from typing import Tuple, List
import numpy as np
# from scipy import ndimage
"""Just to adjust the internal representation of color at a single location,
instead of all over the code ;) Just in case. Maybe something else as -1 and 1
could be interesting, see the tick tack toe example"""
WHITE = -1
BLACK = 1
EMPTY = 0
class Board(np.matrix):
"""Class that purely handles the board, as well as board_related functions
The motivation for this was also that we can make a copy of the real board,
and evaluate all the `get_chain`, `check_dead` etc on the copy
"""
# def get_chain(self, loc: Tuple[int, int]) -> List[Tuple[int, int]]:
# # This method uses morphological operations to find out the
# # connected components ie., chains
# player = self[loc]
# test_matrix = self == self[loc]
# label_im, nb_labels = ndimage.label(test_matrix)
# label_im = label_im == label_im[loc]
# locations = np.where(label_im)
# group = list(zip(locations[0],locations[1]))
# return group
# old get_chain method without scipy-dependency
def get_chain(self, loc: Tuple[int, int]) -> List[Tuple[int, int]]:
player = self[loc]
# Check if neighbors of same player
to_check = [loc]
group = []
while len(to_check) > 0:
current = to_check.pop()
neighbors = self.get_adjacent_coords(current)
for n in neighbors:
if self[n] == player and n not in group and n not in to_check:
to_check.append(n)
group.append(current)
return group
def check_dead(self, group: List[Tuple[int, int]]) -> bool:
"""Check if a group is dead
Currently done by getting all the neighbors, and checking if any
of them is 0.
"""
total_neighbors = []
for loc in group:
total_neighbors += self.get_adjacent_coords(loc)
for n in total_neighbors:
if self[n] == EMPTY:
return False
return True
def get_adjacent_coords(self, loc: Tuple[int, int]):
neighbors = []
if loc[0] > 0:
neighbors.append((loc[0]-1, loc[1]))
if loc[0] < self.shape[0]-1:
neighbors.append((loc[0]+1, loc[1]))
if loc[1] > 0:
neighbors.append((loc[0], loc[1]-1))
if loc[1] < self.shape[1]-1:
neighbors.append((loc[0], loc[1]+1))
return neighbors
def to_number(self):
"""Create a unique representation for a board
Does this by creating an integer, with each position indicating a
location on the board. I do this because performence gets bad once
the board history is large
"""
number = 0
i = 0
for entry in np.nditer(self):
if entry == WHITE:
number += 1 * 10**i
elif entry == BLACK:
number += 2 * 10**i
else:
number += 3 * 10**i
i += 1
return number
def __str__(self):
"""String representation of the board!
Just a simple ascii output, quite cool but the code is a bit messy"""
b = self.copy()
rows = list(range(b.shape[0]))
rows = [chr(i + ord('a')) for i in rows]
cols = list(range(b.shape[1]))
cols = [chr(i + ord('a')) for i in cols]
# You might wonder why I do the following, but its so that numpy
# formats the str representation using a single space
b[b == BLACK] = 2
b[b == WHITE] = 3
matrix_repr = super(Board, b).__str__()
matrix_repr = matrix_repr.replace('2', 'X')
matrix_repr = matrix_repr.replace('3', 'O')
matrix_repr = matrix_repr.replace('0', '·')
matrix_repr = matrix_repr.replace('[[', ' [')
matrix_repr = matrix_repr.replace(']]', ']')
col_index = ' '.join(cols)
board_repr = ''
for i in zip(rows, matrix_repr.splitlines()):
board_repr += i[0]+i[1]+'\n'
board_repr = ' '*3 + col_index+'\n'+board_repr
return board_repr
###########################################################################
# Not used yet, but more relevant to `Board` than to `Game`
def _matrix2csv(self, matrix):
"""Transform a matrix to a string, using ';' as the separator"""
ls = matrix.tolist()
ls = [str(entry) for row in ls for entry in row]
s = ';'.join(ls)
return s
def board2file(self, file, mode='a'):
"""Store board to a file
The idea is also to create csv files that contain
all boards that were part of a game, so that we can
use those to train a network on.
"""
string = self._matrix2csv(self.board)
with open(file, mode) as f:
f.write(string)
f.write('\n')
if __name__ == '__main__':
import doctest
# doctest.testmod(extraglobs={'g': Game()})
doctest.testmod()
|
Python
| 0.000496
|
@@ -3365,21 +3365,150 @@
-rows = %5Bchr(i
+cols = list(range(b.shape%5B1%5D))%0A rows = %5Bstr(self.shape%5B0%5D - i) for i in rows%5D%0A cols = %5Bchr(i + ord('a')) if i %3C 8 else chr(i + 1
+ o
@@ -3517,35 +3517,35 @@
('a')) for i in
-row
+col
s%5D%0A cols
@@ -3543,46 +3543,100 @@
-cols = list(range(b.shape%5B1%5D))
+# was previously not GTP conform:%0A # rows = %5Bchr(i + ord('a')) for i in rows%5D
%0A
col
@@ -3623,32 +3623,34 @@
in rows%5D%0A
+ #
cols = %5Bchr(i +
@@ -3667,32 +3667,33 @@
for i in cols%5D%0A
+%0A
# You mi
|
1941d34e5cecf33090e73665034a8196b220e690
|
Mask more password fields by default
|
horizon/middleware/operation_log.py
|
horizon/middleware/operation_log.py
|
# Copyright 2016 NEC Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import re
from django.conf import settings
from django.contrib import messages as django_messages
from django.core.exceptions import MiddlewareNotUsed
import six.moves.urllib.parse as urlparse
LOG = logging.getLogger(__name__)
class OperationLogMiddleware(object):
"""Middleware to output operation log.
This log can includes information below:
- ``domain name``
- ``domain id``
- ``project name``
- ``project id``
- ``user name``
- ``user id``
- ``request scheme``
- ``referer url``
- ``request url``
- ``message``
- ``method``
- ``http status``
- ``request parameters``
and log format is defined OPERATION_LOG_OPTIONS.
"""
@property
def OPERATION_LOG(self):
# In order to allow to access from mock in test cases.
return self._logger
def __init__(self):
if not getattr(settings, "OPERATION_LOG_ENABLED", False):
raise MiddlewareNotUsed
# set configurations
_log_option = getattr(settings, "OPERATION_LOG_OPTIONS", {})
_available_methods = ['POST', 'GET', 'PUT', 'DELETE']
_methods = _log_option.get("target_methods", ['POST'])
self._default_format = (
"[%(client_ip)s] [%(domain_name)s]"
" [%(domain_id)s] [%(project_name)s]"
" [%(project_id)s] [%(user_name)s] [%(user_id)s]"
" [%(request_scheme)s] [%(referer_url)s] [%(request_url)s]"
" [%(message)s] [%(method)s] [%(http_status)s] [%(param)s]")
_default_ignored_urls = ['/js/', '/static/', '^/api/']
self.target_methods = [x for x in _methods if x in _available_methods]
self.mask_fields = _log_option.get("mask_fields", ['password'])
self.format = _log_option.get("format", self._default_format)
self._logger = logging.getLogger('horizon.operation_log')
ignored_urls = _log_option.get("ignore_urls", _default_ignored_urls)
self._ignored_urls = [re.compile(url) for url in ignored_urls]
def process_response(self, request, response):
"""Log user operation."""
log_format = self._get_log_format(request)
if not log_format:
return response
params = self._get_parameters_from_request(request)
# log a message displayed to user
messages = django_messages.get_messages(request)
result_message = None
if messages:
result_message = ', '.join('%s: %s' % (message.tags, message)
for message in messages)
elif 'action' in request.POST:
result_message = request.POST['action']
params['message'] = result_message
params['http_status'] = response.status_code
self.OPERATION_LOG.info(log_format, params)
return response
def process_exception(self, request, exception):
"""Log error info when exception occurred."""
log_format = self._get_log_format(request)
if log_format is None:
return
params = self._get_parameters_from_request(request, True)
params['message'] = exception
params['http_status'] = '-'
self.OPERATION_LOG.info(log_format, params)
def _get_log_format(self, request):
"""Return operation log format."""
user = getattr(request, 'user', None)
if not user:
return
if not request.user.is_authenticated():
return
method = request.method.upper()
if not (method in self.target_methods):
return
request_url = urlparse.unquote(request.path)
for rule in self._ignored_urls:
if rule.search(request_url):
return
return self.format
def _get_parameters_from_request(self, request, exception=False):
"""Get parameters to log in OPERATION_LOG."""
user = request.user
referer_url = None
try:
referer_dic = urlparse.urlsplit(
urlparse.unquote(request.META.get('HTTP_REFERER')))
referer_url = referer_dic[2]
if referer_dic[3]:
referer_url += "?" + referer_dic[3]
if isinstance(referer_url, str):
referer_url = referer_url.decode('utf-8')
except Exception:
pass
return {
'client_ip': request.META.get('REMOTE_ADDR', None),
'domain_name': getattr(user, 'domain_name', None),
'domain_id': getattr(user, 'domain_id', None),
'project_name': getattr(user, 'project_name', None),
'project_id': getattr(user, 'project_id', None),
'user_name': getattr(user, 'username', None),
'user_id': request.session.get('user_id', None),
'request_scheme': request.scheme,
'referer_url': referer_url,
'request_url': urlparse.unquote(request.path),
'method': request.method if not exception else None,
'param': self._get_request_param(request),
}
def _get_request_param(self, request):
"""Change POST data to JSON string and mask data."""
params = {}
try:
params = request.POST.copy()
if not params:
params = json.loads(request.body)
except Exception:
pass
for key in params:
# replace a value to a masked characters
if key in self.mask_fields:
params[key] = '*' * 8
# when a file uploaded (E.g create image)
files = request.FILES.values()
if len(list(files)) > 0:
filenames = ', '.join(
[up_file.name for up_file in files])
params['file_name'] = filenames
return json.dumps(params, ensure_ascii=False)
|
Python
| 0
|
@@ -2217,16 +2217,148 @@
/api/'%5D%0A
+ _default_mask_fields = %5B'password', 'current_password',%0A 'new_password', 'confirm_password'%5D%0A
@@ -2490,20 +2490,28 @@
s%22,
-%5B'password'%5D
+_default_mask_fields
)%0A
|
6dac96b5be82c0aa4005045c65820d7e95f330c1
|
Remove lightning layer from patterns which already have obvious button effects
|
pier14/opc-client/soma_client.py
|
pier14/opc-client/soma_client.py
|
#!/usr/bin/env python
from model import SomaModel
from renderer import Renderer
from controller import AnimationController
from effectlayer import *
from effects.color_cycle import *
from effects.random_phase import *
from effects.random_blink_cycle import *
from effects.chase import AxonChaseLayer
from effects.colorwave import ColorWave
from effects.colorwiper import ColorWiper
from effects.invert import InvertColorsLayer, InvertColorByRegionLayer
from effects.color_palette_battle import *
from effects.photo_colors import *
from effects.clamp import *
from effects.dim_bright_button_layer import *
from effects.button_flash import ButtonFlash
from effects.specklayer import SpeckLayer
from effects.lower import LowerLayer
from effects.upper import UpperLayer
from effects.axon import AxonLayer
from effects.morse2 import MorseLayer2
from effects.lightning import Lightning
from effects.repair import Repair
from playlist import Playlist
from threads import PlaylistAdvanceThread, KeyboardMonitorThread, ButtonMonitorThread
from random import random
from math import *
import os
import sys
def main(screen, interval):
# master parameters, used in rendering and updated by playlist advancer thread
masterParams = EffectParameters()
# if we got a curses screen, use it for button emulation through the keyboard
if screen:
# re-open stdout with a buffer size of 0. this makes print commands work again.
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
screen.clear()
screen.refresh()
# put keyboard state into effect parameters
keymonitor = KeyboardMonitorThread(masterParams, screen)
keymonitor.start()
else:
ButtonMonitorThread(masterParams).start()
model = SomaModel() #address_filename="../addresses.txt")
# a playlist. each entry in a playlist can contain one or more effect layers
# (if more than one, they are all rendered into the same frame...mixing method
# is determined by individual effect layers' render implementations)
playlist = Playlist([
# This is a very handy layer for debugging. Steps through LEDs in
# order of frame index in response to a button push, printing the
# address of the lit LED.
#[ControlledAddressTestLayer()],
#[TriangleWaveLayer()],
[
PhotoColorsLayer(model),
DimBrightButtonLayer(),
SpeckLayer(button=0),
SpeckLayer(button=1),
Lightning(),
Repair(),
],
[
MultiplierLayer(ColorWave(model, grayscale=True), ColorWiper(model)),
Lightning(),
Repair(),
],
[
RandomPhaseLayer(model),
ColorCycleLayer(0.00003, 0.0001),
Lightning(),
Repair(),
],
[
ColorPaletteBattleLayer(model),
Repair(),
],
[
MorseLayer2(["figure", "action", "light", "yang", "synergy", "unity in dual", "SOMA"], ["ground", "intention", "darkness", "yin", "discord", "order from chaos", "FLG"]),
ColorCycleLayer(0.0003, 0.0005),
Lightning(),
Repair(),
],
])
# the renderer manages a playlist (or dict of multiple playlists), as well as transitions
# and gamma correction
renderer = Renderer(playlists={'all': playlist}, gamma=2.2)
# the controller manages the animation loop - creates frames, calls into the renderer
# at appropriate intervals, updates the time stored in master params, and sends frames
# out over OPC
controller = AnimationController(model, renderer, masterParams)
# a thread that periodically advances the active playlist within the renderer.
# TODO: example to demonstrate swapping between multiple playlists with custom fades
advancer = PlaylistAdvanceThread(renderer, switchInterval=interval)
advancer.start()
# go!
controller.drawingLoop()
if __name__ == '__main__':
#try:
# # try to import curses for keyboard button emulator
# import curses.wrapper
# curses.wrapper(main)
#except ImportError:
# # otherwise just run main with no curses screen
# main(None)
# Unbuffer stdout (simulating python's "-u" flag)
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
# Redirect stderror to stdout
old = sys.stderr
sys.stderr = sys.stdout
old.close()
print "Starup, PID", os.getpid()
#interval = 10*60
interval = 60*3
main(None, interval)
|
Python
| 0
|
@@ -2481,33 +2481,8 @@
1),%0A
- Lightning(),%0A
@@ -2607,33 +2607,8 @@
)),%0A
- Lightning(),%0A
|
7119c07b422f823f40939691fa84f0c2581ae70d
|
Fix the REST module name.
|
test/unit/helpers/test_qiprofile_helper.py
|
test/unit/helpers/test_qiprofile_helper.py
|
import datetime
import pytz
from nose.tools import (assert_is_none)
from qipipe.helpers.qiprofile_helper import QIProfile
from qiprofile.models import Project
from test import project
from test.helpers.logging_helper import logger
SUBJECT = 'Breast099'
"""The test subject."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfileHelper(object):
"""The Imaging Profile helper unit tests."""
def setUp(self):
if not Project.objects.filter(name=project()):
Project(name=project()).save()
self._db = QIProfile()
self._clear()
def tearDown(self):
self._clear()
def test_save_subject(self):
self._db.save_subject(project(), SUBJECT)
def test_save_session(self):
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
def _clear(self):
sbj = self._db.find_subject(project(), SUBJECT)
if sbj:
sbj.delete()
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
|
Python
| 0.000003
|
@@ -130,16 +130,21 @@
iprofile
+_rest
.models
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.