commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
41d6d64ca46438e22ee69514a84357402c8d7869
|
Add unique_id to WOL integration (#49604)
|
homeassistant/components/wake_on_lan/switch.py
|
homeassistant/components/wake_on_lan/switch.py
|
"""Support for wake on lan."""
import logging
import platform
import subprocess as sp
import voluptuous as vol
import wakeonlan
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_BROADCAST_ADDRESS,
CONF_BROADCAST_PORT,
CONF_HOST,
CONF_MAC,
CONF_NAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.script import Script
_LOGGER = logging.getLogger(__name__)
CONF_OFF_ACTION = "turn_off"
DEFAULT_NAME = "Wake on LAN"
DEFAULT_PING_TIMEOUT = 1
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MAC): cv.string,
vol.Optional(CONF_BROADCAST_ADDRESS): cv.string,
vol.Optional(CONF_BROADCAST_PORT): cv.port,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OFF_ACTION): cv.SCRIPT_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a wake on lan switch."""
broadcast_address = config.get(CONF_BROADCAST_ADDRESS)
broadcast_port = config.get(CONF_BROADCAST_PORT)
host = config.get(CONF_HOST)
mac_address = config[CONF_MAC]
name = config[CONF_NAME]
off_action = config.get(CONF_OFF_ACTION)
add_entities(
[
WolSwitch(
hass,
name,
host,
mac_address,
off_action,
broadcast_address,
broadcast_port,
)
],
host is not None,
)
class WolSwitch(SwitchEntity):
"""Representation of a wake on lan switch."""
def __init__(
self,
hass,
name,
host,
mac_address,
off_action,
broadcast_address,
broadcast_port,
):
"""Initialize the WOL switch."""
self._hass = hass
self._name = name
self._host = host
self._mac_address = mac_address
self._broadcast_address = broadcast_address
self._broadcast_port = broadcast_port
domain = __name__.split(".")[-2]
self._off_script = (
Script(hass, off_action, name, domain) if off_action else None
)
self._state = False
self._assumed_state = host is None
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def assumed_state(self):
"""Return true if no host is provided."""
return self._assumed_state
@property
def should_poll(self):
"""Return false if assumed state is true."""
return not self._assumed_state
def turn_on(self, **kwargs):
"""Turn the device on."""
service_kwargs = {}
if self._broadcast_address is not None:
service_kwargs["ip_address"] = self._broadcast_address
if self._broadcast_port is not None:
service_kwargs["port"] = self._broadcast_port
_LOGGER.info(
"Send magic packet to mac %s (broadcast: %s, port: %s)",
self._mac_address,
self._broadcast_address,
self._broadcast_port,
)
wakeonlan.send_magic_packet(self._mac_address, **service_kwargs)
if self._assumed_state:
self._state = True
self.async_write_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off if an off action is present."""
if self._off_script is not None:
self._off_script.run(context=self._context)
if self._assumed_state:
self._state = False
self.async_write_ha_state()
def update(self):
"""Check if device is on and update the state. Only called if assumed state is false."""
if platform.system().lower() == "windows":
ping_cmd = [
"ping",
"-n",
"1",
"-w",
str(DEFAULT_PING_TIMEOUT * 1000),
str(self._host),
]
else:
ping_cmd = [
"ping",
"-c",
"1",
"-W",
str(DEFAULT_PING_TIMEOUT),
str(self._host),
]
status = sp.call(ping_cmd, stdout=sp.DEVNULL, stderr=sp.DEVNULL)
self._state = not bool(status)
|
Python
| 0
|
@@ -330,16 +330,72 @@
NAME,%0A)%0A
+from homeassistant.helpers import device_registry as dr%0A
import h
@@ -2389,16 +2389,69 @@
is None
+%0A self._unique_id = dr.format_mac(mac_address)
%0A%0A @p
@@ -2915,24 +2915,146 @@
umed_state%0A%0A
+ @property%0A def unique_id(self):%0A %22%22%22Return the unique id of this switch.%22%22%22%0A return self._unique_id%0A%0A
def turn
|
c5fff613e5b860d3df51fa45189d379c3e1aeb68
|
Support for namespace in graphite bridge #49
|
prometheus_client/bridge/graphite.py
|
prometheus_client/bridge/graphite.py
|
#!/usr/bin/python
from __future__ import unicode_literals
import logging
import re
import socket
import time
import threading
from .. import core
# Roughly, have to keep to what works as a file name.
# We also remove periods, so labels can be distinguished.
_INVALID_GRAPHITE_CHARS = re.compile(r"[^a-zA-Z0-9_-]")
def _sanitize(s):
return _INVALID_GRAPHITE_CHARS.sub('_', s)
class _RegularPush(threading.Thread):
def __init__(self, pusher, interval):
super(_RegularPush, self).__init__()
self._pusher = pusher
self._interval = interval
def run(self):
wait_until = time.time()
while True:
while True:
now = time.time()
if now >= wait_until:
# May need to skip some pushes.
while wait_until < now:
wait_until += self._interval
break
# time.sleep can return early.
time.sleep(wait_until - now)
try:
self._pusher.push()
except IOError:
logging.exception("Push failed")
class GraphiteBridge(object):
def __init__(self, address, registry=core.REGISTRY, timeout_seconds=30, _time=time):
self._address = address
self._registry = registry
self._timeout = timeout_seconds
self._time = _time
def push(self, prefix=''):
now = int(self._time.time())
output = []
prefixstr = ''
if prefix:
prefixstr = prefix + '.'
for metric in self._registry.collect():
for name, labels, value in metric._samples:
if labels:
labelstr = '.' + '.'.join(
['{0}.{1}'.format(
_sanitize(k), _sanitize(v))
for k, v in sorted(labels.items())])
else:
labelstr = ''
output.append('{0}{1}{2} {3} {4}\n'.format(
prefixstr, _sanitize(name), labelstr, float(value), now))
conn = socket.create_connection(self._address, self._timeout)
conn.sendall(''.join(output).encode('ascii'))
conn.close()
def start(self, interval=60.0):
t = _RegularPush(self, interval)
t.daemon = True
t.start()
|
Python
| 0
|
@@ -456,16 +456,24 @@
interval
+, prefix
):%0A
@@ -575,16 +575,46 @@
interval
+%0A self._prefix = prefix
%0A%0A de
@@ -1093,16 +1093,35 @@
er.push(
+prefix=self._prefix
)%0A
@@ -2328,16 +2328,27 @@
val=60.0
+, prefix=''
):%0A
@@ -2381,16 +2381,24 @@
interval
+, prefix
)%0A
|
9ecd320d41be2d20e37c95f79cc827eea31e0639
|
Fix error in email exception handling
|
etd_drop_app/forms.py
|
etd_drop_app/forms.py
|
import os
import sys
import zipfile
import json
import shutil
from datetime import datetime
from django import forms, template
from django.conf import settings
from django.forms.extras import SelectDateWidget
import bagit
from .validators import *
class NewSubmissionForm(forms.Form):
"""
Form for submitting an ETD.
"""
document_file = forms.FileField(
label="Main PDF File",
required=True,
allow_empty_file=False,
validators=[MimetypeValidator(('application/pdf'))],
help_text="Upload a PDF version of your thesis or dissertation. "
"Please take care to ensure that any custom fonts are properly "
"embedded and that your PDF displays correctly on different devices "
"before submitting."
)
supplemental_file = forms.FileField(
label="Supplemental Data",
required=settings.SUBMISSION_FORM_FIELDS['supplemental_file']['required'],
allow_empty_file=False,
validators=[MimetypeValidator(('application/zip'))],
help_text="Upload a ZIP file containing any supplemental "
"files you wish to deposit along with your thesis or dissertation."
)
license_file = forms.FileField(
label="License Agreement",
required=settings.SUBMISSION_FORM_FIELDS['license_file']['required'],
allow_empty_file=False,
validators=[MimetypeValidator(('application/pdf'))],
help_text="Upload a signed copy of a copyright license "
"agreement, as per the policy of your institution."
)
title = forms.CharField(
label="Title",
required=settings.SUBMISSION_FORM_FIELDS['title']['required'],
help_text="Title of your thesis or dissertation"
)
author = forms.CharField(
label="Author",
required=settings.SUBMISSION_FORM_FIELDS['author']['required'],
help_text="Name of the author of this work as it appears on your title page"
)
subject = forms.CharField(
label="Subject(s)",
required=settings.SUBMISSION_FORM_FIELDS['subject']['required'],
help_text="Any topics or subjects as they appear on your title page, separated with commas"
)
date = forms.DateField(
label="Date",
required=settings.SUBMISSION_FORM_FIELDS['date']['required'],
widget=SelectDateWidget,
help_text="Date of publication as it appears on your title page"
)
abstract = forms.CharField(
label="Abstract",
required=settings.SUBMISSION_FORM_FIELDS['abstract']['required'],
widget=forms.Textarea,
help_text="Abstract of your thesis or dissertation"
)
agreement = forms.BooleanField(
label="I agree to the terms.",
required=True
)
# TODO: Custom validation that PDF is really a PDF, etc...
def save(self, author):
"""
Saves the submission, taking care of BagIt creation and any
other necessary ingest behavior.
author is the User who submitted the request.
return value is the name of the bag directory created, or None.
"""
# Generate a submission ID. Must be unique.
datestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
etd_id = "%s-%s" % (datestamp, author.username)
# Set up staging directory for this bag (e.g. "STAGING_20140326-160532_lbroglie")
staging_name = "STAGING_%s" % etd_id
staging_path = os.path.abspath(os.path.join(settings.ETD_STORAGE_DIRECTORY, staging_name))
try:
# Create the staging directory
os.makedirs(staging_path)
# Move the main document to the staging area
document_path = os.path.join(staging_path, "etd.pdf")
with open(document_path, 'wb+') as destination:
for chunk in self.cleaned_data['document_file']:
destination.write(chunk)
# Move the license document to the staging area, if provided
if self.cleaned_data['license_file']:
document_path = os.path.join(staging_path, "license.pdf")
with open(document_path, 'wb+') as destination:
for chunk in self.cleaned_data['license_file']:
destination.write(chunk)
# Try to process the supplemental file as a ZipFile, if provided
if self.cleaned_data['supplemental_file']:
supplemental_path = os.path.join(staging_path, "supplemental")
supplemental_zip = zipfile.ZipFile(self.cleaned_data['supplemental_file'], 'r')
supplemental_zip.extractall(supplemental_path)
supplemental_zip.close()
# Create a dict representing all the form data
form_record_path = os.path.join(staging_path, "form.json")
form_record_file = open(form_record_path, 'w')
form_record = {
'document_file': {
'original_filename': self.cleaned_data['document_file'].name,
'size': self.cleaned_data['document_file'].size,
'content_type': self.cleaned_data['document_file'].content_type
},
}
for name in ('title', 'author', 'subject', 'date', 'abstract'):
if self.cleaned_data[name]:
form_record[name] = str(self.cleaned_data[name])
if self.cleaned_data['supplemental_file']:
form_record['supplemental_file'] = {
'original_filename': self.cleaned_data['supplemental_file'].name,
'size': self.cleaned_data['supplemental_file'].size,
'content_type': self.cleaned_data['supplemental_file'].content_type
}
if self.cleaned_data['license_file']:
form_record['license_file'] = {
'original_filename': self.cleaned_data['license_file'].name,
'size': self.cleaned_data['license_file'].size,
'content_type': self.cleaned_data['license_file'].content_type
}
json.dump(form_record, form_record_file,
skipkeys=True,
indent=2
)
form_record_file.close()
# TODO: Maybe write an XML version also
# Turn the staging directory into a bag
bag_info = {}
if self.cleaned_data['title']:
bag_info['Internal-Sender-Identifier'] = self.cleaned_data['title'].replace('\n', ' ').replace('\r', '')
bagit.make_bag(staging_path, bag_info)
# Remove "STAGING_" from the name of the directory to signify completion
final_path = os.path.abspath(os.path.join(settings.ETD_STORAGE_DIRECTORY, etd_id))
os.rename(staging_path, final_path)
# Fire any emails/notifications/webhooks the institution wants to receive
try:
recipients = getattr(settings, 'SUBMISSION_EMAIL_RECIPIENTS', None)
if recipients:
subject = "New ETD Submission"
body = "New submission came in!"
sender = settings.SUBMISSION_EMAIL_FROM_ADDRESS
send_mail(subject, body, sender, recipients)
# TODO
# Return the id to signify success to the caller
return etd_id
except Exception as e:
# Log this event
# TODO
# Clean up the staging directory if it exists
if os.path.isdir(staging_path):
shutil.rmtree(staging_path)
if settings.DEBUG:
raise e
return None
|
Python
| 0.000088
|
@@ -7297,35 +7297,135 @@
pients)%0A
-%0A # TODO
+ except Exception as e:%0A # Log this email failure%0A # TODO%0A pass
%0A%0A
|
a390d2551a5e39ad35888c4b326f50212b60cabf
|
add description of exception
|
eventbus/exception.py
|
eventbus/exception.py
|
__author__ = 'Xsank'
class EventTypeError(Exception):
'''Event type is invalid!'''
class UnregisterError(Exception):
'''No listener to unregister!'''
|
Python
| 0.000002
|
@@ -83,16 +83,68 @@
id!'''%0A%0A
+ def __str__(self):%0A return self.__doc__%0A%0A
%0Aclass U
@@ -202,12 +202,64 @@
register!'''
+%0A%0A def __str__(self):%0A return self.__doc__
|
bc083b7fc9ea0598dde9abe584365ba9e697bb9d
|
Fix export samples output message bug.
|
bayesdb/client.py
|
bayesdb/client.py
|
#
# Copyright (c) 2010-2013, MIT Probabilistic Computing Project
#
# Lead Developers: Jay Baxter and Dan Lovell
# Authors: Jay Baxter, Dan Lovell, Baxter Eaves, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import inspect
import pickle
import gzip
import prettytable
import re
import os
import time
import ast
import crosscat.utils.api_utils as au
import utils
from parser import Parser
from engine import Engine
class Client(object):
def __init__(self, hostname=None, port=8008, crosscat_engine_type='multiprocessing'):
self.parser = Parser()
if hostname is None or hostname=='localhost':
self.online = False
self.engine = Engine(crosscat_engine_type)
else:
self.online = True
self.hostname = hostname
self.port = port
self.URI = 'http://' + hostname + ':%d' % port
def call_bayesdb_engine(self, method_name, args_dict):
if self.online:
out, id = au.call(method_name, args_dict, self.URI)
else:
method = getattr(self.engine, method_name)
argnames = inspect.getargspec(method)[0]
args = [args_dict[argname] for argname in argnames if argname in args_dict]
out = method(*args)
return out
def __call__(self, call_input, pretty=True, timing=False, wait=False, plots=None):
return self.execute(call_input, pretty, timing, wait, plots)
def execute(self, call_input, pretty=True, timing=False, wait=False, plots=None):
if type(call_input) == file:
bql_string = call_input.read()
path = os.path.abspath(call_input.name)
self.parser.set_root_dir(os.path.dirname(path))
elif type(call_input) == str:
bql_string = call_input
else:
print "Invalid input type: expected file or string."
if not pretty:
return_list = []
lines = self.parser.parse(bql_string)
for line in lines:
if type(call_input) == file:
print '> %s' % line
if wait:
user_input = raw_input()
if len(user_input) > 0 and (user_input[0] == 'q' or user_input[0] == 's'):
continue
result = self.execute_line(line, pretty, timing)
if not pretty:
return_list.append(result)
if type(call_input) == file:
print
self.parser.reset_root_dir()
if not pretty:
return return_list
def execute_line(self, bql_string, pretty=True, timing=False, plots=None):
if timing:
start_time = time.time()
out = self.parser.parse_line(bql_string)
if out is None:
print "Could not parse command. Try typing 'help' for a list of all commands."
return
elif not out:
return
method_name, args_dict = out
result = self.call_bayesdb_engine(method_name, args_dict)
result = self.callback(method_name, args_dict, result)
if timing:
end_time = time.time()
print 'Elapsed time: %.2f seconds.' % (end_time - start_time)
if plots is None:
plots = 'DISPLAY' in os.environ.keys()
if bql_string.lower().strip().startswith('estimate'):
## Special logic to display matrices.
if not (result['filename'] or plots):
print "No GUI available to display graphics: please enter the filename where the graphics should be saved, with the extension indicating the filetype (e.g. .png or .pdf). Enter a blank filename to instead view the matrix as text."
filename = raw_input()
if len(filename) > 0:
result['filename'] = filename
utils.plot_matrix(result['matrix'], result['column_names'], title=result['title'], filename=result['filename'])
else:
pp = self.pretty_print(result)
print pp
return pp
else:
utils.plot_matrix(result['matrix'], result['column_names'], title=result['message'], filename=result['filename'])
elif pretty:
if type(result) == dict and 'message' in result.keys():
print result['message']
pp = self.pretty_print(result)
print pp
return pp
else:
if type(result) == dict and 'message' in result.keys():
print result['message']
return result
def callback(self, method_name, args_dict, result):
if method_name == 'export_samples':
samples_dict = result
samples_file = gzip.GzipFile(args_dict['pkl_path'], 'w')
pickle.dump(samples_dict, samples_file)
return dict(message="Successfully exported the samples to %s" % pklpath)
else:
return result
def pretty_print(self, query_obj):
result = ""
if type(query_obj) == dict and 'data' in query_obj and 'columns' in query_obj:
pt = prettytable.PrettyTable()
pt.field_names = query_obj['columns']
for row in query_obj['data']:
pt.add_row(row)
result = pt
elif type(query_obj) == list and type(query_obj[0]) == tuple:
pt = prettytable.PrettyTable()
## TODO
return "TODO"
elif type(query_obj) == list:
result = str(query_obj)
elif type(query_obj) == dict and 'column_names' in query_obj:
colnames = query_obj['column_names']
zmatrix = query_obj['matrix']
pt = prettytable.PrettyTable(hrules=prettytable.ALL, vrules=prettytable.ALL, header=False)
pt.add_row([''] + list(colnames))
for row, colname in zip(zmatrix, list(colnames)):
pt.add_row([colname] + list(row))
result = pt
return result
|
Python
| 0
|
@@ -5517,15 +5517,29 @@
%22 %25
+args_dict%5B'
pkl
+_
path
+'%5D
)%0A
|
0f1fdb93c8005a26fcea10f708252a9e5f358270
|
add compare string in JRC
|
src/JRCFileParserService.py
|
src/JRCFileParserService.py
|
'''
Created on Jan 30, 2017
@author: Subhasis
'''
import csv
from MongoManager import MongoManager
class JRCFileParserService(object):
'''
This class takes care of reading the input file parsing the text line by line and pushing it into MongoDB.
'''
def __init__(self, file_path, db_config, schema, table, batch_size):
self.file_path = file_path
self.manager = MongoManager(schema, table, batch_size, db_config)
def process(self):
print "Reading File ", self.file_path
count_record = 0
entity_count = 0
similar_record = []
previous_record_id = '0'
with open(self.file_path, 'rb') as csvfile:
reader = csv.reader(csvfile, delimiter='\t')
for row in reader:
if previous_record_id != row[0]:
self.manager.pushRecords(self.getInsertObject(similar_record))
entity_count += 1
similar_record = []
similar_record.append(row)
previous_record_id = row[0]
count_record += 1
self.manager.pushRecords(self.getInsertObject(similar_record))
print "Records Processed ", count_record
print "Entity Processed ", entity_count
return self.manager.flushBatch()
def getInsertObject(self, data_list):
d = {}
d['id'] = int(data_list[0][0])
d['type'] = 'UNKNOWN'
if data_list[0][1] == 'P':
d['type'] = 'PERSON'
if data_list[0][1] == 'O':
d['type'] = 'ORGANIZATION'
variations = []
for r in data_list:
v = {}
v['lang'] = r[2]
v['name'] = r[3]
variations.append(v)
d['variations'] = variations
return d
|
Python
| 0.00002
|
@@ -1595,24 +1595,53 @@
ations = %5B%5D%0A
+ compare_strings = %5B%5D%0A
for
@@ -1656,16 +1656,16 @@
a_list:%0A
-
@@ -1778,35 +1778,131 @@
-d%5B'variations'%5D = variation
+ compare_strings.append(r%5B3%5D.lower())%0A d%5B'variations'%5D = variations%0A d%5B'compare_strings'%5D = compare_string
s%0A
|
0ff9ccacf20d2896353df906426db06ce8c24605
|
Update ASIC count from 7 to 10
|
scripts/avalon3-a3233-modular-test.py
|
scripts/avalon3-a3233-modular-test.py
|
#!/usr/bin/env python2.7
# This simple script was for test A3255 modular. there are 128 cores in one A3255 chip.
# If all cores are working the number should be 0.
# If some of them not working the number is the broken cores count.
from serial import Serial
from optparse import OptionParser
import binascii
import sys
parser = OptionParser()
parser.add_option("-s", "--serial", dest="serial_port", default="/dev/ttyUSB0", help="Serial port")
(options, args) = parser.parse_args()
ser = Serial(options.serial_port, 115200, 8, timeout=8)
cmd="415614010100000000000000000000000000000000000000000000000000000000000000000000"
#cmd="415614010100000000000000000000000000000000000000000000000000000000000000011021"
#cmd="415614010100000000000000000000000000000000000000000000000000000000000000022042"
while (1):
print ("Reading result ...")
ser.write(cmd.decode('hex'))
count = 0
while (1):
res_s = ser.read(39)
if not res_s:
print(str(count) + ": Something is wrong or modular id not correct")
else :
result = binascii.hexlify(res_s)
for i in range(0, 8):
number = '{:03}'.format(int(result[10 + i * 2:12 + i * 2], 16))
if (i == 0):
sys.stdout.write(number + ":\t")
else :
sys.stdout.write(number + "\t")
sys.stdout.flush()
print("")
count = count + 1
if (count == 5):
raw_input('Press enter to continue:')
break
|
Python
| 0.000013
|
@@ -1064,17 +1064,18 @@
ange(0,
-8
+11
):%0A%09%09%09%09n
|
1b704c24eaeb412e0636e5a0111ce2ac990998fd
|
remove confirm_text option in example
|
example/app/tables.py
|
example/app/tables.py
|
#!/usr/bin/env python
# coding: utf-8
from table.columns import Column, LinkColumn, Link
from table.utils import A
from table import Table
from models import Person
class PersonTable(Table):
id = Column(field='id', header=u'序号', header_attrs={'width': '50%'})
name = Column(field='name', header=u'姓名', header_attrs={'width': '50%'})
class Meta:
model = Person
ext_button_link = "http://www.baidu.com"
class LinkColumnTable(Table):
id = Column(field='id', header=u'序号', header_attrs={'width': '33%'})
name = Column(field='name', header=u'姓名', header_attrs={'width': '33%'})
action = LinkColumn(header=u'操作', header_attrs={'width': '33%'}, links=[
Link(text=u'编辑', viewname='app.views.edit', args=('id',), confirm=True, confirm_text=u"确定吗?")])
class Meta:
model = Person
|
Python
| 0
|
@@ -763,27 +763,8 @@
irm=
-True, confirm_text=
u%22%E7%A1%AE%E5%AE%9A
|
47582594a20fdef8a02e8386a7e6565284fdd18e
|
Remove repeated line
|
dojo/product_type/views.py
|
dojo/product_type/views.py
|
import logging
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from dojo.filters import ProductTypeFilter
from dojo.forms import Product_TypeForm, Product_TypeProductForm, Delete_Product_TypeForm
from dojo.models import Product_Type
from dojo.utils import get_page_items, add_breadcrumb
from dojo.notifications.helper import create_notification
from django.db.models import Count, Q
from django.db.models.query import QuerySet
logger = logging.getLogger(__name__)
"""
Jay
Status: in prod
Product Type views
"""
def product_type(request):
# query for names outside of query with prefetch to avoid the complex prefetch query from executing twice
name_words = Product_Type.objects.all().values_list('name', flat=True)
prod_types = Product_Type.objects.all()
ptl = ProductTypeFilter(request.GET, queryset=prod_types)
pts = get_page_items(request, ptl.qs, 25)
pts.object_list = prefetch_for_product_type(pts.object_list)
add_breadcrumb(title="Product Type List", top_level=True, request=request)
return render(request, 'dojo/product_type.html', {
'name': 'Product Type List',
'metric': False,
'user': request.user,
'pts': pts,
'ptl': ptl,
'name_words': name_words})
def prefetch_for_product_type(prod_types):
prefetch_prod_types = prod_types
if isinstance(prefetch_prod_types, QuerySet): # old code can arrive here with prods being a list because the query was already executed
active_findings_query = Q(prod_type__engagement__test__finding__active=True,
prod_type__engagement__test__finding__mitigated__isnull=True,
prod_type__engagement__test__finding__verified=True,
prod_type__engagement__test__finding__false_p=False,
prod_type__engagement__test__finding__duplicate=False,
prod_type__engagement__test__finding__out_of_scope=False)
prefetch_prod_types = prefetch_prod_types.prefetch_related('authorized_users')
prefetch_prod_types = prefetch_prod_types.annotate(findings_count=Count('prod_type__engagement__test__finding__id', filter=active_findings_query))
prefetch_prod_types = prefetch_prod_types.annotate(prod_count=Count('prod_type', distinct=True))
prefetch_prod_types = prefetch_prod_types.annotate(user_count=Count('authorized_users', distinct=True))
else:
logger.debug('unable to prefetch because query was already executed')
return prefetch_prod_types
@user_passes_test(lambda u: u.is_staff)
def add_product_type(request):
form = Product_TypeForm()
if request.method == 'POST':
form = Product_TypeForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request,
messages.SUCCESS,
'Product type added successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('product_type'))
add_breadcrumb(title="Add Product Type", top_level=False, request=request)
return render(request, 'dojo/new_product_type.html', {
'name': 'Add Product Type',
'metric': False,
'user': request.user,
'form': form,
})
@user_passes_test(lambda u: u.is_staff)
def edit_product_type(request, ptid):
pt = get_object_or_404(Product_Type, pk=ptid)
authed_users = pt.authorized_users.all()
pt_form = Product_TypeForm(instance=pt, initial={'authorized_users': authed_users})
delete_pt_form = Delete_Product_TypeForm(instance=pt)
if request.method == "POST" and request.POST.get('edit_product_type'):
pt_form = Product_TypeForm(request.POST, instance=pt)
if pt_form.is_valid():
pt.authorized_users.set(pt_form.cleaned_data['authorized_users'])
pt = pt_form.save()
pt.authorized_users.set(pt_form.cleaned_data['authorized_users'])
messages.add_message(
request,
messages.SUCCESS,
'Product type updated successfully.',
extra_tags="alert-success",
)
return HttpResponseRedirect(reverse("product_type"))
if request.method == "POST" and request.POST.get("delete_product_type"):
form2 = Delete_Product_TypeForm(request.POST, instance=pt)
if form2.is_valid():
pt.delete()
messages.add_message(
request,
messages.SUCCESS,
"Product type Deleted successfully.",
extra_tags="alert-success",
)
create_notification(event='other',
title='Deletion of %s' % pt.name,
description='The product type "%s" was deleted by %s' % (pt.name, request.user),
url=request.build_absolute_uri(reverse('product_type')),
icon="exclamation-triangle")
return HttpResponseRedirect(reverse("product_type"))
add_breadcrumb(title="Edit Product Type", top_level=False, request=request)
return render(request, 'dojo/edit_product_type.html', {
'name': 'Edit Product Type',
'metric': False,
'user': request.user,
'pt_form': pt_form,
'pt': pt})
@user_passes_test(lambda u: u.is_staff)
def add_product_to_product_type(request, ptid):
pt = get_object_or_404(Product_Type, pk=ptid)
form = Product_TypeProductForm(initial={'prod_type': pt})
add_breadcrumb(title="New %s Product" % pt.name, top_level=False, request=request)
return render(request, 'dojo/new_product.html',
{'form': form,
})
|
Python
| 0.024529
|
@@ -4135,86 +4135,8 @@
e()%0A
- pt.authorized_users.set(pt_form.cleaned_data%5B'authorized_users'%5D)%0A
|
1591faf725844ae76bdf0a4343837e1c3d2e16c0
|
update weight clipping params
|
common/models/discriminators.py
|
common/models/discriminators.py
|
import numpy as np
import math
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import cuda, optimizers, serializers, Variable
from chainer import function
from chainer.utils import type_check
from .ops import *
class DCGANDiscriminator(chainer.Chain):
def __init__(self, in_ch=3, base_size=128, down_layers=4, use_bn=True, noise_all_layers=False, conv_as_last=False, w_init=None):
layers = {}
self.down_layers = down_layers
self.conv_as_last = conv_as_last
if use_bn:
norm = 'bn'
else:
norm = None
act = F.leaky_relu
if w_init is None:
w_init = chainer.initializers.Normal(0.02)
layers['c_first'] = NNBlock(in_ch, base_size, nn='down_conv', norm=None, activation=act, noise=noise_all_layers, w_init=w_init)
base = base_size
for i in range(down_layers-1):
layers['c'+str(i)] = NNBlock(base, base*2, nn='down_conv', norm=norm, activation=act, noise=noise_all_layers, w_init=w_init)
base*=2
if conv_as_last:
layers['c_last'] = NNBlock(base, 1, nn='conv', norm=None, activation=None, w_init=w_init)
else:
layers['c_last'] = NNBlock(None, 1, nn='linear', norm=None, activation=None, w_init=w_init)
super(DCGANDiscriminator, self).__init__(**layers)
def __call__(self, x, test=False, retain_forward=False):
h = self.c_first(x, test=test, retain_forward=retain_forward)
for i in range(self.down_layers-1):
h = getattr(self, 'c'+str(i))(h, test=test, retain_forward=retain_forward)
if not self.conv_as_last:
_b, _ch, _w, _h = h.data.shape
self.last_shape=(_b, _ch, _w, _h)
h = F.reshape(h, (_b, _ch*_w*_h))
h = self.c_last(h, test=test, retain_forward=retain_forward)
return h
def clip(self, upper=0.1, lower=-0.1):
weight_clipping(self, upper=upper, lower=lower)
def differentiable_backward(self, g):
g = self.c_last.differentiable_backward(g)
if not self.conv_as_last:
_b, _ch, _w, _h = self.last_shape
g = F.reshape(g, (_b, _ch, _w, _h))
for i in reversed(range(self.down_layers-1)):
g = getattr(self, 'c'+str(i)).differentiable_backward(g)
g = self.c_first.differentiable_backward(g)
return g
|
Python
| 0
|
@@ -1920,16 +1920,17 @@
upper=0.
+0
1, lower
@@ -1933,16 +1933,17 @@
ower=-0.
+0
1):%0A
|
1c06ea2b4558851cb013dc908abc0c3e3c8f1955
|
add support for queuing system in _write method
|
communication/communications.py
|
communication/communications.py
|
#!/usr/bin/env python
from serial import Serial
from Queue import Queue
import fileinput
import readline
from time import sleep
from threading import Thread
# constants
TEAM = 0
# Operation Codes (OpCodes)
STOP = 0
FORWARD = 1
BACKWARD = 2
LEFT = 3
RIGHT = 4
GRAB = 5
class CommsToArduino(object):
queue = Queue()
internal_queue = Queue()
write_queue = Queue()
# these should be hard-coded, the values should not change
def __init__(self, port="/dev/ttyACM0",
rate=115200,
timeout=0,
connected=False):
self.isConnected = connected
self.port = port
self.comn = None # updated when we establish the connection
self.rate = rate
self.timeout = timeout
self.connect()
seqNo = False
ready = True
# this function establishes the connection between the devices
# and updates the boolean variable isConnected
def connect(self):
if self.isConnected is False and self.comn is None:
try:
self.comn = Serial(port=self.port,
baudrate=self.rate,
timeout=self.timeout)
self.isConnected = True
except OSError as ex:
print("Cannot connect to Arduino.")
print(ex.strerror)
def create_checksum(self, arg, opcode):
"""
Creates the checksum that is used to verify the opcode.
"""
return (arg + opcode) % 10
def _write(self, sig, opcode, arg, seqNo):
"""
Repeatedly sends the opcode to the robot until an OK is received.
Then waits until DONE, and sets "ready" true.
"""
received = None
checksum = self.create_checksum(arg, opcode)
# If message start with robotGrab
# The robot executed the action
while received[:5] != "Robot":
opcode_string = "%d%d%03d%d%d\r" % (sig, opcode, arg, checksum, seqNo)
self.comn.write(opcode_string)
sleep(0.2) # Possibly unnecessary
received = self.comn.readline()
# Checksum failure
if received == "Checksum failed\r\n":
print "Checksum Failed", opcode_string
seqNo = not seqNo
# Command did not get recognized
if received == "Wat?\r\n":
print "WAT WAT? Arduino did not understand its input", opcode_string
# Print robot executed action
print received
print
self.ready = True
def write(self, sig, opcode, arg):
"""
Public interface for sending opcodes to the robot
"""
if self.isConnected:
self.ready = False
self.seqNo = not self.seqNo
self.write_queue.put({
'sig': sig,
'opcode': opcode,
'arg': arg,
'seqNo': self.seqNo
})
else:
print("Not connected to Arduino.")
# plan on keeping this as a skeleton used purely for communication
class RobotComms(CommsToArduino):
_close = False
def __init__(self, port):
self.write_thread = Thread(target = self.write_stream)
self.read_thread = Thread(target = self.read_stream)
self.read_thread.start()
self.write_thread.start()
super(RobotComms, self).__init__(port)
def close(self):
self._close = True
self.comn.close()
self.queue.put("Robot Closed")
def write_stream(self):
while True:
if self._close:
self.queue.put("Read Stream Closed")
break
if not self.write_queue.empty():
msg_dict = self.write_queue.get()
self._write(
msg_dict["sig"],
msg_dict["opcode"],
msg_dict["arg"],
msg_dict["seqNo"]
)
def read_stream(self):
while True:
if self._close:
self.queue.put("Read Stream Closed")
break
sleep(0.5)
if self.comn and self.comn.is_open:
line = self.comn.readline()
if line.strip() != "":
self.queue.put(line)
def stop(self):
self.write(TEAM, STOP, 0)
def forward(self, speed):
self.write(TEAM, FORWARD, int(speed))
def backward(self, speed):
self.write(TEAM, BACKWARD, int(speed))
def left(self, speed):
self.write(TEAM, LEFT, int(speed))
def right(self, speed):
self.write(TEAM, RIGHT, int(speed))
def grab(self, speed):
self.write(TEAM, GRAB, speed)
if __name__ == "__main__":
print("This class is not designed to be run by hand")
|
Python
| 0
|
@@ -1601,20 +1601,9 @@
-Repeatedly s
+S
ends
@@ -1603,26 +1603,24 @@
Sends the
-op
code to the
@@ -1623,307 +1623,81 @@
the
-robot until an OK is received.%0A Then waits until DONE, and sets %22ready%22 true.%0A %22%22%22%0A%0A received = None%0A checksum = self.create_checksum(arg, opcode)%0A%0A # If message start with robotGrab%0A # The robot executed the action%0A while received%5B:5%5D != %22Robot%22:%0A
+Arduino%0A %22%22%22%0A checksum = self.create_checksum(arg, opcode)%0A
@@ -1783,25 +1783,21 @@
-
self.
-comn.write
+to_robot
(opc
@@ -1820,536 +1820,81 @@
- sleep(0.2) # Possibly unnecessary%0A received = self.comn.readline()%0A %0A # Checksum failure%0A if received == %22Checksum failed%5Cr%5Cn%22:%0A print %22Checksum Failed%22, opcode_string%0A seqNo = not seqNo%0A %0A # Command did not get recognized%0A if received == %22Wat?%5Cr%5Cn%22:%0A print %22WAT WAT? Arduino did not understand its input%22, opcode_string%0A %0A # Print robot executed action%0A print received%0A print%0A self.ready = True
+return%0A%0A def to_robot(self, message):%0A self.comn.write(message)
%0A%0A
|
0e409813d360c8036d4a6a74c82d986f1b32472d
|
Improve error message when base64 data is an invalid type
|
drf_extra_fields/fields.py
|
drf_extra_fields/fields.py
|
import base64
import binascii
import imghdr
import uuid
from django.core.exceptions import ValidationError
from django.core.files.base import ContentFile
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from rest_framework.fields import (
DateField,
DateTimeField,
DictField,
FileField,
FloatField,
ImageField,
IntegerField,
)
from rest_framework.utils import html
from .compat import (
DateRange,
DateTimeTZRange,
NumericRange,
postgres_fields,
)
DEFAULT_CONTENT_TYPE = "application/octet-stream"
class Base64FieldMixin(object):
@property
def ALLOWED_TYPES(self):
raise NotImplementedError
@property
def INVALID_FILE_MESSAGE(self):
raise NotImplementedError
@property
def INVALID_TYPE_MESSAGE(self):
raise NotImplementedError
EMPTY_VALUES = (None, '', [], (), {})
def __init__(self, *args, **kwargs):
self.represent_in_base64 = kwargs.pop('represent_in_base64', False)
super(Base64FieldMixin, self).__init__(*args, **kwargs)
def to_internal_value(self, base64_data):
# Check if this is a base64 string
if base64_data in self.EMPTY_VALUES:
return None
if isinstance(base64_data, six.string_types):
# Strip base64 header.
if ';base64,' in base64_data:
header, base64_data = base64_data.split(';base64,')
# Try to decode the file. Return validation error if it fails.
try:
decoded_file = base64.b64decode(base64_data)
except (TypeError, binascii.Error, ValueError):
raise ValidationError(self.INVALID_FILE_MESSAGE)
# Generate file name:
file_name = str(uuid.uuid4())[:12] # 12 characters are more than enough.
# Get the file name extension:
file_extension = self.get_file_extension(file_name, decoded_file)
if file_extension not in self.ALLOWED_TYPES:
raise ValidationError(self.INVALID_TYPE_MESSAGE)
complete_file_name = file_name + "." + file_extension
data = ContentFile(decoded_file, name=complete_file_name)
return super(Base64FieldMixin, self).to_internal_value(data)
raise ValidationError(_('This is not an base64 string'))
def get_file_extension(self, filename, decoded_file):
raise NotImplementedError
def to_representation(self, file):
if self.represent_in_base64:
try:
with open(file.path, 'rb') as f:
return base64.b64encode(f.read()).decode()
except Exception:
raise IOError("Error encoding file")
else:
return super(Base64FieldMixin, self).to_representation(file)
class Base64ImageField(Base64FieldMixin, ImageField):
"""
A django-rest-framework field for handling image-uploads through raw post data.
It uses base64 for en-/decoding the contents of the file.
"""
ALLOWED_TYPES = (
"jpeg",
"jpg",
"png",
"gif"
)
INVALID_FILE_MESSAGE = _("Please upload a valid image.")
INVALID_TYPE_MESSAGE = _("The type of the image couldn't be determined.")
def get_file_extension(self, filename, decoded_file):
extension = imghdr.what(filename, decoded_file)
extension = "jpg" if extension == "jpeg" else extension
return extension
class HybridImageField(Base64ImageField):
"""
A django-rest-framework field for handling image-uploads through
raw post data, with a fallback to multipart form data.
"""
def to_internal_value(self, data):
"""
Try Base64Field first, and then try the ImageField
``to_internal_value``, MRO doesn't work here because
Base64FieldMixin throws before ImageField can run.
"""
try:
return Base64FieldMixin.to_internal_value(self, data)
except ValidationError:
return ImageField.to_internal_value(self, data)
class Base64FileField(Base64FieldMixin, FileField):
"""
A django-rest-framework field for handling file-uploads through raw post data.
It uses base64 for en-/decoding the contents of the file.
"""
@property
def ALLOWED_TYPES(self):
raise NotImplementedError('List allowed file extensions')
INVALID_FILE_MESSAGE = _("Please upload a valid file.")
INVALID_TYPE_MESSAGE = _("The type of the file couldn't be determined.")
def get_file_extension(self, filename, decoded_file):
raise NotImplementedError('Implement file validation and return matching extension.')
class RangeField(DictField):
range_type = None
default_error_messages = {
'not_a_dict': _('Expected a dictionary of items but got type "{input_type}".'),
'too_much_content': _('Extra content not allowed "{extra}".'),
}
def to_internal_value(self, data):
"""
Range instances <- Dicts of primitive datatypes.
"""
if html.is_html_input(data):
data = html.parse_html_dict(data)
if not isinstance(data, dict):
self.fail('not_a_dict', input_type=type(data).__name__)
validated_dict = {}
for key in ('lower', 'upper'):
try:
value = data.pop(key)
except KeyError:
continue
validated_dict[six.text_type(key)] = self.child.run_validation(value)
for key in ('bounds', 'empty'):
try:
value = data.pop(key)
except KeyError:
continue
validated_dict[six.text_type(key)] = value
if data:
self.fail('too_much_content', extra=', '.join(map(str, data.keys())))
return self.range_type(**validated_dict)
def to_representation(self, value):
"""
Range instances -> dicts of primitive datatypes.
"""
if value.isempty:
return {'empty': True}
lower = self.child.to_representation(value.lower) if value.lower is not None else None
upper = self.child.to_representation(value.upper) if value.upper is not None else None
return {'lower': lower,
'upper': upper,
'bounds': value._bounds}
class IntegerRangeField(RangeField):
child = IntegerField()
range_type = NumericRange
class FloatRangeField(RangeField):
child = FloatField()
range_type = NumericRange
class DateTimeRangeField(RangeField):
child = DateTimeField()
range_type = DateTimeTZRange
class DateRangeField(RangeField):
child = DateField()
range_type = DateRange
if postgres_fields is not None:
# monkey patch modelserializer to map Native django Range fields to
# drf_extra_fiels's Range fields.
from rest_framework.serializers import ModelSerializer
ModelSerializer.serializer_field_mapping[postgres_fields.DateTimeRangeField] = DateTimeRangeField
ModelSerializer.serializer_field_mapping[postgres_fields.DateRangeField] = DateRangeField
ModelSerializer.serializer_field_mapping[postgres_fields.IntegerRangeField] = IntegerRangeField
ModelSerializer.serializer_field_mapping[postgres_fields.FloatRangeField] = FloatRangeField
|
Python
| 0.000021
|
@@ -2324,16 +2324,30 @@
rror(_('
+Invalid type.
This is
@@ -2366,17 +2366,60 @@
4 string
-'
+: %7B%7D'.format(%0A type(base64_data))
))%0A%0A
|
cb34162de51f36e2d6b846cfbb6e9d6fe8801e48
|
implement send message
|
client/client.py
|
client/client.py
|
class OTC_Client(object):
def __init__(self):
raise NotImplementedError("TODO: write a client")
def send(message):
raise NotImplementedError("TODO: write send method")
def recieve():
raise NotImplementedError("TODO: write recieve")
def decrypt(message, pad_index=current_index):
raise NotImplementedError("TODO: clients need to decrypt messages")
def encrypt(encrypt):
raise NotImplementedError("TODO: clients need to encrypt messages")
def connect():
raise NotImplementedError("TODO:clients need to be able to connect to server")
if __name__ == "__main__":
client = OTC_Client()
|
Python
| 0.000029
|
@@ -43,16 +43,17 @@
(self):%0A
+%0A
@@ -125,16 +125,113 @@
ssage):%0A
+ payload = %7B'message':message%7D%0A r = requets.post(self.server_address,data=payload)%0A
@@ -604,18 +604,77 @@
connect(
-):
+server_address):%0A self.server_address = server_address
%0A
|
2ac53588f9d0cc5ce32f6fe15b715f5178a1a264
|
Send more info from i2pcontrol.
|
client/client.py
|
client/client.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# client.py - i2p clients share what they know to a centralized server.
# Author: Chris Barry <chris@barry.im>
# License: This is free and unencumbered software released into the public domain.
import argparse
import i2py.netdb
import i2py.control
import i2py.control.pyjsonrpc
import os
routers = []
VERSION = 1
# Aggregreates a buncha data
def print_entry(ent):
n = ent.dict()
country = '??'
ipv6 = False
firewalled = False
for a in n['addrs']:
if a.location and a.location.country:
country = a.location.country
ipv6 = 1 if ':' in a.location.ip else 0
firewalled = 1 if a.firewalled else 0
break
routers.append({
'public_key' : n['pubkey'],
'sign_key' : n['cert']['signature_type'],
'crypto_key' : n['cert']['crypto_type'],
'version' : n['options']['coreVersion'],
'caps' : n['options']['caps'],
'country' : country,
'ipv6' : ipv6,
'firewalled' : firewalled,
})
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', help='prints request json instead of sending',type=bool, default=False)
parser.add_argument('-i', '--i2p-directory', help='I2P home',type=str, default=os.path.join(os.environ['HOME'],'.i2p','netDb'))
parser.add_argument('-s', '--server', help='where to send data',type=str, default='tuuql5avhexhn7oq4lhyfythxejgk4qpavxvtniu3u3hwfwkogmq.b32.i2p')
parser.add_argument('-p', '--port', help='where to send data',type=int, default='80')
parser.add_argument('-t', '--token', help='token to prove yourself',type=str, default='')
args = parser.parse_args()
if not args.token:
print 'Use a token. See --help for usage.'
raise SystemExit, 1
rpc = i2py.control.pyjsonrpc.HttpClient(
url = ''.join(['http://',args.server,':',str(args.port)]),
gzipped = True
)
# Local router stuff
try:
a = i2py.control.I2PController()
except:
print 'I2PControl not installed, or router is down.'
raise SystemExit, 1
ri_vals = a.get_router_info()
this_router = {
'activepeers' : ri_vals['i2p.router.netdb.activepeers'],
'fastpeers' : ri_vals['i2p.router.netdb.fastpeers'],
'tunnelsparticipating' : ri_vals['i2p.router.net.tunnels.participating'],
'crypto.garlic.decryptFail' : a.get_rate(stat='crypto.garlic.decryptFail', period=3600),
#'peer.failedLookupRate' : a.get_rate(stat='peer.failedLookupRate', period=3600),
'stream.trend' : a.get_rate(stat='stream.trend', period=3600),
'stream.con.windowSizeAtCongestion' : a.get_rate(stat='stream.con.windowSizeAtCongestion', period=3600),
}
# NetDB Stuff
i2py.netdb.inspect(hook=print_entry,netdb_dir=args.i2p_directory)
try:
if args.debug:
# To check the approximate size of a request, run this. No network call is sent. Results in bytes.
# $ python client.py | gzip --stdout | wc --bytes
print i2py.control.pyjsonrpc.create_request_json('collect', token=args.token, netdb=routers, local=this_router, version=VERSION)
else:
rpc.collect(token=args.token, netdb=routers, local=this_router, version=VERSION)
except i2py.control.pyjsonrpc.JsonRpcError, err:
print 'Error code {}: {} -- {}'.format(err.code, err.message, err.data)
except:
print 'Could not submit due to other error.'
|
Python
| 0
|
@@ -2063,35 +2063,24 @@
-
: ri_vals%5B'i
@@ -2137,27 +2137,16 @@
-
-
: ri_val
@@ -2202,27 +2202,16 @@
ipating'
-
: ri_
@@ -2259,30 +2259,16 @@
g'%5D,%0A%09%09'
-crypto.garlic.
decryptF
@@ -2271,16 +2271,19 @@
yptFail'
+
@@ -2344,16 +2344,51 @@
=3600),%0A
+%09%09# TODO: This is being all weird.%0A
%09%09#'peer
@@ -2405,28 +2405,16 @@
kupRate'
-
: a.get
@@ -2470,31 +2470,52 @@
%0A%09%09'
-stream.trend'
+failedLookupRate' : 0,%0A%09%09'streamtrend'
@@ -2573,27 +2573,16 @@
00),%0A%09%09'
-stream.con.
windowSi
|
afc39de6009b28c6378d1ce605d8ca91e240accd
|
remove obsolete import
|
osmaxx/conversion/converters/converter_gis/helper/osm_boundaries_importer.py
|
osmaxx/conversion/converters/converter_gis/helper/osm_boundaries_importer.py
|
from itertools import chain
from sqlalchemy import MetaData, Table, create_engine, func
from sqlalchemy.engine.url import URL
from sqlalchemy.sql import select, insert, expression
from geoalchemy2 import Geometry, Geography
class OSMBoundariesImporter:
def __init__(self):
self._osm_boundaries_tables = ['coastline_l', 'landmass_a', 'sea_a']
_osm_boundaries_db_connection_parameters = dict(
username='osmboundaries',
password='osmboundaries',
port=5432,
database='osmboundaries',
host='osmboundaries-database',
)
osm_boundaries_db_connection = URL('postgresql', **_osm_boundaries_db_connection_parameters)
self._osm_boundaries_db_engine = create_engine(osm_boundaries_db_connection)
_local_db_connection_parameters = dict(
username='postgres',
password='postgres',
port=5432,
database='osmaxx_db',
)
local_db_connection = URL('postgresql', **_local_db_connection_parameters)
self._local_db_engine = create_engine(local_db_connection)
assert Geometry, Geography # assert classes needed for GIS-reflection are available
self._db_meta_data = MetaData()
self._table_metas = self._get_meta_tables()
def _autoinspect_tables(self, tables, autoloader):
return {
table: Table(table, self._db_meta_data, autoload=True, autoload_with=autoloader)
for table in tables
}
def _get_meta_tables(self):
meta_boundaries = self._autoinspect_tables(
tables=self._osm_boundaries_tables, autoloader=self._osm_boundaries_db_engine
)
return meta_boundaries
def load_area_specific_data(self, *, extent):
self._create_tables_on_local_db()
self._load_boundaries_tables(extent)
def _create_tables_on_local_db(self):
self._db_meta_data.create_all(self._local_db_engine)
def _load_boundaries_tables(self, extent):
multipolygon_cast = Geometry(geometry_type='MULTIPOLYGON', srid=4326)
multilinestring_cast = Geometry(geometry_type='MULTILINESTRING', srid=4326)
table_casts = {
'sea_a': multipolygon_cast,
'landmass_a': multipolygon_cast,
'coastline_l': multilinestring_cast,
}
for table_name in self._osm_boundaries_tables:
source_table_meta = self._table_metas[table_name]
query = select([
source_table_meta.c.ogc_fid,
source_table_meta.c.fid,
source_table_meta.c.wkb_geometry
])
query = query.where(func.ST_Intersects(source_table_meta.c.wkb_geometry, extent.ewkt))
self._execute_and_insert_into_local_db(query, source_table_meta, source_engine=self._osm_boundaries_db_engine)
from sqlalchemy_views import CreateView
view_definition_query = select([
source_table_meta.c.ogc_fid,
source_table_meta.c.fid,
expression.cast(
func.ST_Multi(func.ST_Intersection(source_table_meta.c.wkb_geometry, extent.ewkt)),
table_casts[table_name]
).label('geom')
]).where(func.ST_Intersects(source_table_meta.c.wkb_geometry, extent.ewkt))
view_meta = MetaData()
view = Table(table_name, view_meta, schema='view_osmaxx')
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import text
query_defintion_string = str(
view_definition_query.compile(dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})
)
query_defintion_string = query_defintion_string.replace('ST_AsEWKB(CAST', 'CAST')
query_defintion_string = query_defintion_string.replace('))) AS geom', ')) AS geom')
query_defintion_text = text(query_defintion_string)
create_view = CreateView(view, query_defintion_text, or_replace=True)
self._local_db_engine.execute(create_view)
def _execute_and_insert_into_local_db(self, query, table_meta, source_engine=None):
query_result = source_engine.execute(query)
if query_result.rowcount > 0:
results = query_result.fetchall()
for result in results:
self._local_db_engine.execute(
insert(table_meta, values=result)
)
|
Python
| 0.000998
|
@@ -1,32 +1,4 @@
-from itertools import chain%0A
from
|
f1ed9cf573ec8aaa61e9aefb124b453a5a353db4
|
fix pointe-claire
|
ca_qc_pointe_claire/people.py
|
ca_qc_pointe_claire/people.py
|
from pupa.scrape import Scraper
from utils import lxmlize, CanadianLegislator as Legislator
import re
COUNCIL_PAGE = 'http://www.ville.pointe-claire.qc.ca/en/city-hall-administration/your-council/municipal-council.html'
class PointeClairePersonScraper(Scraper):
def get_people(self):
page = lxmlize(COUNCIL_PAGE)
mayor = page.xpath('.//div[@class="item-page clearfix"]//table[1]//p')[1]
name = mayor.xpath('.//strong/text()')[0]
p = Legislator(name=name, post_id='Pointe-Claire', role='Maire')
p.add_source(COUNCIL_PAGE)
phone = re.findall(r'[0-9]{3} [0-9]{3}-[0-9]{4}', mayor.text_content())[0].replace(' ', '-')
p.add_contact('voice', phone, 'legislature')
yield p
rows = page.xpath('//tr')
for i, row in enumerate(rows):
if i % 2 == 0:
continue
councillors = row.xpath('./td')
for j, councillor in enumerate(councillors):
name = councillor.text_content()
# rows[i + 1].xpath('.//td//a[contains(@href, "maps")]/text()')[j] # district number
district = rows[i + 1].xpath('.//td/p[1]/text()')[j].replace(' / ', '/')
p = Legislator(name=name, post_id=district, role='Conseiller')
p.add_source(COUNCIL_PAGE)
p.image = councillor.xpath('.//img/@src')[0]
phone = re.findall(r'[0-9]{3} [0-9]{3}-[0-9]{4}', rows[i + 1].xpath('.//td')[j].text_content())[0].replace(' ', '-')
p.add_contact('voice', phone, 'legislature')
yield p
|
Python
| 0.000092
|
@@ -570,33 +570,36 @@
ndall(r'%5B0-9%5D%7B3%7D
-
+%5B -%5D
%5B0-9%5D%7B3%7D-%5B0-9%5D%7B4
@@ -1312,17 +1312,20 @@
%5B0-9%5D%7B3%7D
-
+%5B -%5D
%5B0-9%5D%7B3%7D
|
9a3081c58818ad28e216e9a14fc573c4a392f55f
|
Add method for getting csv Hours Worked reports for Jobs Board and Payment Plan
|
invoice/management/commands/ticket_time_csv.py
|
invoice/management/commands/ticket_time_csv.py
|
# -*- encoding: utf-8 -*-
import csv
import os
from django.core.management.base import BaseCommand
from invoice.models import TimeRecord
class Command(BaseCommand):
help = "Export ticket time to a CSV file"
def handle(self, *args, **options):
"""Export ticket time to a CSV file.
Columns:
- ticket number
- user name
- billable - True or False
- date started
- minutes
"""
tickets = (
732,
746,
747,
748,
749,
750,
751,
752,
753,
754,
755,
756,
757,
758,
759,
906
)
tickets = list(tickets)
tickets.sort()
file_name = '{}_ticket_time.csv'.format(
'_'.join([str(i) for i in tickets])
)
if os.path.exists(file_name):
raise Exception(
"Export file, '{}', already exists. "
"Cannot export time.".format(file_name)
)
with open(file_name, 'w', newline='') as csv_file:
csv_writer = csv.writer(csv_file, dialect='excel-tab')
for tr in TimeRecord.objects.filter(ticket__pk__in=tickets):
csv_writer.writerow([
tr.ticket.pk,
tr.user.username,
tr.billable,
tr.date_started,
tr._timedelta_minutes(),
])
print("Exported time to {}".format(file_name))
|
Python
| 0
|
@@ -209,16 +209,20 @@
V file%22%0A
+
%0A def
@@ -226,256 +226,187 @@
def
-handle(self, *args, **options):%0A %22%22%22Export ticket time to a CSV file.%0A%0A Columns:%0A%0A - ticket number%0A - user name%0A - billable - True or False%0A - date started%0A - minutes%0A%0A %22%22%22%0A tickets = (
+_jobs_board_tickets(self):%0A return (%0A 732,%0A 746,%0A 747,%0A 748,%0A 749,%0A 750,%0A 751,%0A 752,
%0A
@@ -407,34 +407,34 @@
2,%0A 7
+5
3
-2
,%0A 74
@@ -424,34 +424,34 @@
3,%0A 7
+5
4
-6
,%0A 74
@@ -441,34 +441,34 @@
4,%0A 7
-47
+55
,%0A 74
@@ -458,34 +458,34 @@
5,%0A 7
-48
+56
,%0A 74
@@ -475,34 +475,34 @@
6,%0A 7
-49
+57
,%0A 75
@@ -493,33 +493,33 @@
,%0A 75
-0
+8
,%0A 75
@@ -510,33 +510,33 @@
,%0A 75
-1
+9
,%0A 75
@@ -525,35 +525,35 @@
59,%0A
-752
+906
,%0A 75
@@ -542,35 +542,35 @@
06,%0A
-753
+976
,%0A 75
@@ -555,38 +555,107 @@
976,%0A
+)%0A
-75
+%0A def _payment_plan_tickets(self):%0A return (%0A 64
4,%0A 7
@@ -653,109 +653,344 @@
+)%0A
-755,%0A 756,%0A 757,%0A 758,%0A 759,%0A 906%0A
+%0A def handle(self, *args, **options):%0A %22%22%22Export ticket time to a CSV file.%0A%0A Columns:%0A%0A - ticket number%0A - user name%0A - billable - True or False%0A - date started%0A - minutes%0A%0A %22%22%22%0A tickets = self._jobs_board_tickets()%0A # tickets = self._payment_plan_tickets(
)%0A
@@ -1683,16 +1683,57 @@
llable,%0A
+ tr.has_invoice_line,%0A
|
c0e2fb3068270ebe7183587db1264c193c43b6cc
|
fix previous commit wrong domain
|
redpanal/redpanal/social/models.py
|
redpanal/redpanal/social/models.py
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.db.models.signals import post_save
from django.core.urlresolvers import reverse
from actstream import action
from taggit.managers import TaggableManager
from taggit.models import Tag
from redpanal.audio.models import Audio
from redpanal.project.models import Project
class Message(models.Model):
msg = models.TextField(verbose_name=_('message'))
user = models.ForeignKey(User, verbose_name=_('user'), editable=False)
created_at = models.DateTimeField(verbose_name=_('created at'), auto_now_add=True)
tags = TaggableManager(verbose_name=_('hashtags'), blank=True)
mentioned_users = models.ManyToManyField(User, verbose_name=_('hashtags'), blank=True,
null=True, editable=False,
related_name="mentioned_messages")
content_type = models.ForeignKey(ContentType, null=True, editable=False)
object_id = models.PositiveIntegerField(null=True, editable=False)
content_object = generic.GenericForeignKey('content_type', 'object_id')
_msg_html_cache = models.TextField(editable=False, blank=True, null=True)
def __unicode__(self):
return mark_safe(self.as_html())
def as_html(self):
if not self._msg_html_cache:
self._msg_html_cache = Message.to_html(self.msg)
self.save()
return self._msg_html_cache
@staticmethod
def to_html(msg):
import re
USER_REGEX = re.compile(r'@(\w+)')
HASHTAG_REGEX = re.compile(r'#(\w+)')
# ToDo: deberia obtenerse el dominio del sitio de forma dinamica?
OBJECTS_URL_REGEX = re.compile(r'(https?://)(grafiks\.info:8080)/([p|a])/([0-9a-zA-Z_-]+)/?') #beta\.redpanal\.org
URL_REGEX = re.compile(r'(https?://)(www\.)?(\S+)')
def replace_user(match):
if match:
username = match.group(1)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return match.group()
return '<a href="%s">@%s</a>' % (user.get_absolute_url(), username)
def replace_hashtags(match):
if match:
tag = match.group(1)
try:
tagobj = Tag.objects.get(name=tag)
except Tag.DoesNotExist:
return match.group()
return '<a href="%s">#%s</a>' % (reverse("hashtaged-list", None, (tagobj.slug,)), tag)
def replace_objects_urls(match):
if match:
slug = match.group(4)
if match.group(3) == 'a':
try:
obj = Audio.objects.get(slug=slug)
except Audio.DoesNotExist:
return match.group()
elif match.group(3) == 'p':
try:
obj = Project.objects.get(slug=slug)
except Project.DoesNotExist:
return match.group()
else:
return match.group()
text = obj.name[:25] + (obj.name[25:] and '..')
return '<a href="%s"><i class="fa alias-%s"></i>%s</a>' % (obj.get_absolute_url(), obj._meta.verbose_name, text)
def replace_urls(match):
if match:
url = match.group(0)
text = match.group(3)[:25] + (match.group(3)[25:] and '..')
return '<a href="%s" target="_blank">%s</a>' % (url, text)
msg = msg.replace("\n", "<br>")
html = re.sub(USER_REGEX, replace_user, msg)
html = re.sub(HASHTAG_REGEX, replace_hashtags, html)
html = re.sub(OBJECTS_URL_REGEX, replace_objects_urls, html)
html = re.sub(URL_REGEX, replace_urls, html)
return html
@staticmethod
def extract_mentioned_users(msg):
"""Returns a list of users that are mentioned with @userfoo @UserBar"""
words = msg.split()
users = filter(lambda word: word.startswith('@'), words)
users = [u[1:] for u in users]
return User.objects.filter(username__in=users)
@staticmethod
def extract_hashtags(msg):
"""Returns the list of hashtags in the msg"""
msg = msg.replace(".", " ").replace(";", " ").replace(",", " ")
words = msg.split()
tags = filter(lambda word: word.startswith('#'), words)
return [tag[1:] for tag in tags]
def save(self, *args, **kwargs):
super(Message, self).save(*args, **kwargs)
tags = Message.extract_hashtags(self.msg)
self.tags.clear()
if tags:
self.tags.add(*tags)
mentioned_users = Message.extract_mentioned_users(self.msg)
self.mentioned_users.clear()
if mentioned_users:
self.mentioned_users.add(*mentioned_users)
class Meta:
ordering = ['-created_at']
def message_created_signal(sender, instance, created, **kwargs):
if created:
action.send(instance.user, verb='commented', action_object=instance)
post_save.connect(message_created_signal, sender=Message, dispatch_uid="message_created_signal")
|
Python
| 0.000002
|
@@ -1941,26 +1941,27 @@
//)(
-grafiks%5C.info:8080
+beta%5C.redpanal%5C.org
)/(%5B
@@ -1988,35 +1988,34 @@
+)/?') #
-beta%5C.redpanal%5C.org
+grafiks%5C.info:8080
%0A
|
93ff10b9add58dbc045c6656715bf67409a51404
|
fix style-check error (#416)
|
gapic/schema/naming.py
|
gapic/schema/naming.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import dataclasses
import os
import re
from typing import cast, List, Match, Tuple
from google.protobuf import descriptor_pb2
from gapic import utils
from gapic.generator import options
# See https://github.com/python/mypy/issues/5374 for details on the mypy false
# positive.
@dataclasses.dataclass(frozen=True) # type: ignore
class Naming(abc.ABC):
"""Naming data for an API.
This class contains the naming nomenclature used for this API
within templates.
An concrete child of this object is made available to every template
(as ``api.naming``).
"""
name: str = ''
namespace: Tuple[str, ...] = dataclasses.field(default_factory=tuple)
version: str = ''
product_name: str = ''
proto_package: str = ''
def __post_init__(self):
if not self.product_name:
self.__dict__['product_name'] = self.name
@staticmethod
def build(
*file_descriptors: descriptor_pb2.FileDescriptorProto,
opts: options.Options = options.Options(),
) -> 'Naming':
"""Return a full Naming instance based on these file descriptors.
This is pieced together from the proto package names as well as the
``google.api.metadata`` file annotation. This information may be
present in one or many files; this method is tolerant as long as
the data does not conflict.
Args:
file_descriptors (Iterable[~.FileDescriptorProto]): A list of
file descriptor protos. This list should only include the
files actually targeted for output (not their imports).
Returns:
~.Naming: A :class:`~.Naming` instance which is provided to
templates as part of the :class:`~.API`.
Raises:
ValueError: If the provided file descriptors contain contradictory
information.
"""
# Determine the set of proto packages.
proto_packages = {fd.package for fd in file_descriptors}
root_package = os.path.commonprefix(tuple(proto_packages)).rstrip('.')
# Sanity check: If there is no common ground in the package,
# we are obviously in trouble.
if not root_package:
raise ValueError(
'The protos provided do not share a common root package. '
'Ensure that all explicitly-specified protos are for a '
'single API. '
f'The packages we got are: {", ".join(proto_packages)}'
)
# Define the valid regex to split the package.
#
# It is not necessary for the regex to be as particular about package
# name validity (e.g. avoiding .. or segments starting with numbers)
# because protoc is guaranteed to give us valid package names.
pattern = r'^((?P<namespace>[a-z0-9_.]+)\.)?(?P<name>[a-z0-9_]+)'
# Only require the version portion of the regex if the version is
# present.
#
# This code may look counter-intuitive (why not use ? to make it
# optional), but the engine's greediness routine will decide that
# the version is the name, which is not what we want.
version = r'\.(?P<version>v[0-9]+(p[0-9]+)?((alpha|beta)[0-9]+)?)'
if re.search(version, root_package):
pattern += version
# Okay, do the match
match = cast(Match,
re.search(pattern=pattern, string=root_package)).groupdict()
match['namespace'] = match['namespace'] or ''
klass = OldNaming if opts.old_naming else NewNaming
package_info = klass(
name=match['name'].capitalize(),
namespace=tuple(
i.capitalize() for i in match['namespace'].split('.') if i
),
product_name=match['name'].capitalize(),
proto_package=root_package,
version=match.get('version', ''),
)
# Sanity check: Ensure that the package directives all inferred
# the same information.
if not package_info.version and len(proto_packages) > 1:
raise ValueError('All protos must have the same proto package '
'up to and including the version.')
# If a naming information was provided on the CLI, override the naming
# value.
#
# We are liberal about what formats we take on the CLI; it will
# likely make sense to many users to use dot-separated namespaces and
# snake case, so handle that and do the right thing.
if opts.name:
package_info = dataclasses.replace(package_info, name=' '.join((
i.capitalize() for i in opts.name.replace('_', ' ').split(' ')
)))
if opts.namespace:
package_info = dataclasses.replace(package_info, namespace=tuple(
# The join-and-split on "." here causes us to expand out
# dot notation that we may have been sent; e.g. a one-tuple
# with ('x.y',) will become a two-tuple: ('x', 'y')
i.capitalize() for i in '.'.join(opts.namespace).split('.')
))
# Done; return the naming information.
return package_info
def __bool__(self):
"""Return True if any of the fields are truthy, False otherwise."""
return any(
(getattr(self, i.name) for i in dataclasses.fields(self)),
)
@property
def long_name(self) -> str:
"""Return an appropriate title-cased long name."""
return ' '.join(tuple(self.namespace) + (self.name,))
@property
def module_name(self) -> str:
"""Return the appropriate Python module name."""
return utils.to_valid_module_name(self.name)
@property
def module_namespace(self) -> Tuple[str, ...]:
"""Return the appropriate Python module namespace as a tuple."""
return tuple(utils.to_valid_module_name(i) for i in self.namespace)
@property
def namespace_packages(self) -> Tuple[str, ...]:
"""Return the appropriate Python namespace packages."""
answer: List[str] = []
for cursor in (i.lower() for i in self.namespace):
answer.append(f'{answer[-1]}.{cursor}' if answer else cursor)
return tuple(answer)
@property
@abc.abstractmethod
def versioned_module_name(self) -> str:
"""Return the versiond module name (e.g. ``apiname_v1``).
If there is no version, this is the same as ``module_name``.
"""
raise NotImplementedError
@property
def warehouse_package_name(self) -> str:
"""Return the appropriate Python package name for Warehouse."""
# Piece the name and namespace together to come up with the
# proper package name.
answer = list(self.namespace) + self.name.split(' ')
return '-'.join(answer).lower()
class NewNaming(Naming):
@property
def versioned_module_name(self) -> str:
"""Return the versiond module name (e.g. ``apiname_v1``).
If there is no version, this is the same as ``module_name``.
"""
return self.module_name + (f'_{self.version}' if self.version else '')
class OldNaming(Naming):
@property
def versioned_module_name(self) -> str:
"""Return the versiond module name (e.g. ``apiname_v1``).
If there is no version, this is the same as ``module_name``.
"""
return self.module_name + (f'.{self.version}' if self.version else '')
|
Python
| 0
|
@@ -860,16 +860,18 @@
sitive.%0A
+%0A%0A
@datacla
|
a3db681a63a5908d38ca5fcac4bdd96f5e4fed7e
|
use typing.TYPE_CHECKING to avoid flake8 failure (#398)
|
launch/launch/event_handlers/on_execution_complete.py
|
launch/launch/event_handlers/on_execution_complete.py
|
# Copyright 2019 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections.abc
from typing import Callable
from typing import cast
from typing import List # noqa
from typing import Optional
from typing import Text
from typing import Union
from ..event import Event
from ..event_handler import EventHandler
from ..events import ExecutionComplete
from ..launch_context import LaunchContext
from ..launch_description_entity import LaunchDescriptionEntity
from ..some_actions_type import SomeActionsType
class OnExecutionComplete(EventHandler):
"""
Convenience class for handling an action completion event.
It may be configured to only handle the completion of a specific action,
or to handle them all.
"""
def __init__(
self,
*,
target_action: Optional['Action'] = None,
on_completion: Union[SomeActionsType, Callable[[int], Optional[SomeActionsType]]],
**kwargs
) -> None:
"""Create an OnExecutionComplete event handler."""
from ..action import Action # noqa
if not isinstance(target_action, (Action, type(None))):
raise ValueError("OnExecutionComplete requires an 'Action' as the target")
super().__init__(
matcher=(
lambda event: (
isinstance(event, ExecutionComplete) and (
target_action is None or
event.action == target_action
)
)
),
entities=None,
**kwargs,
)
self.__target_action = target_action
# TODO(wjwwood) check that it is not only callable, but also a callable that matches
# the correct signature for a handler in this case
self.__on_completion = on_completion
self.__actions_on_completion = [] # type: List[LaunchDescriptionEntity]
if callable(on_completion):
# Then on_completion is a function or lambda, so we can just call it, but
# we don't put anything in self.__actions_on_completion because we cannot
# know what the function will return.
pass
else:
# Otherwise, setup self.__actions_on_completion
if isinstance(on_completion, collections.abc.Iterable):
for entity in on_completion:
if not isinstance(entity, LaunchDescriptionEntity):
raise ValueError(
"expected all items in 'on_completion' iterable to be of type "
"'LaunchDescriptionEntity' but got '{}'".format(type(entity)))
self.__actions_on_completion = list(on_completion)
else:
self.__actions_on_completion = [on_completion]
# Then return it from a lambda and use that as the self.__on_completion callback.
self.__on_completion = lambda event, context: self.__actions_on_completion
def handle(self, event: Event, context: LaunchContext) -> Optional[SomeActionsType]:
"""Handle the given event."""
return self.__on_completion(cast(ExecutionComplete, event), context)
@property
def handler_description(self) -> Text:
"""Return the string description of the handler."""
# TODO(jacobperron): revisit how to describe known actions that are passed in.
# It would be nice if the parent class could output their description
# via the 'entities' property.
if self.__actions_on_completion:
return '<actions>'
return '{}'.format(self.__on_completion)
@property
def matcher_description(self) -> Text:
"""Return the string description of the matcher."""
if self.__target_action is None:
return 'event == ExecutionComplete'
return 'event == ExecutionComplete and event.action == Action({})'.format(
hex(id(self.__target_action))
)
|
Python
| 0
|
@@ -750,24 +750,57 @@
import Text%0A
+from typing import TYPE_CHECKING%0A
from typing
@@ -1075,16 +1075,69 @@
nsType%0A%0A
+if TYPE_CHECKING:%0A from .. import Action # noqa%0A%0A
%0Aclass O
|
8fa0ca6a307f7b23545d297d17f8eb05f037978f
|
fix one e2e test problem (#459)
|
test/e2e/utils.py
|
test/e2e/utils.py
|
# Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from kfserving import KFServingClient
KFServing = KFServingClient(config_file="~/.kube/config")
def wait_for_kfservice_ready(name, namespace='kfserving-ci-e2e-test', Timeout_seconds=600):
for _ in range(round(Timeout_seconds/10)):
time.sleep(10)
kfsvc_status = KFServing.get(name, namespace=namespace)
for condition in kfsvc_status['status'].get('conditions', {}):
if condition.get('type', '') == 'Ready':
status = condition.get('status', 'Unknown')
if status == 'True':
return
raise RuntimeError("Timeout to start the KFService.")
|
Python
| 0.000001
|
@@ -905,16 +905,43 @@
espace)%0A
+ status = 'Unknown'%0A
|
0097a4022ec8ab57e76c86bd202dd7a5fad8a076
|
Revert to original
|
cardiffshop/products/admin.py
|
cardiffshop/products/admin.py
|
from products.models import Product, Category, ProductImage
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import SimpleListFilter
from suit.admin import SortableTabularInline
class CanBeSoldListFilter(SimpleListFilter):
title = "Can be sold"
parameter_name = "can_be_sold"
def lookups(self, request, model_admin):
return (
("1", "Yes"),
("0", "No")
)
def queryset(self, request, queryset):
value = self.value()
if value == "1":
return queryset.filter(stock_count__gt=0).filter(is_visible=True)
if value == "0":
return queryset.exclude(stock_count__gt=0).exclude(is_visible=True)
class ProductImageInline(SortableTabularInline):
model = ProductImage
fields = ("image", "image_preview", "alt_text")
readonly_fields = ("image_preview",)
extra = 0
ordering = ("order",)
def image_preview(self, obj):
if obj.image:
return '<img src="%s" width="100">' % obj.image.url
else:
return '<img src="%s%s" width="100">' % (settings.STATIC_URL, "images/no-image.jpg")
image_preview.allow_tags = True
class ProductAdmin(admin.ModelAdmin):
list_display = ("name", "category", "stock_count", "can_be_sold")
list_filter = ("category", "date_created", CanBeSoldListFilter)
search_fields = ("name", "description", "sku_number", "barcode")
inlines = (ProductImageInline, )
readonly_fields = ("date_created", )
prepopulated_fields = {"slug": ("name",)}
radio_fields = {"campaign": admin.VERTICAL}
fieldsets = (
(None, {
"fields": ("category", "name", "slug", "description"),
"classes": ("suit-tab", "suit-tab-identity",),
}),
(None, {
"fields": (("price", "price_unit"), "campaign", "campaign_end_date", "damaged"),
"classes": ("suit-tab", "suit-tab-price",),
}),
(None, {
"fields": ("barcode", "sku_number", "stock_count", "is_visible", "date_created"),
"classes": ("suit-tab", "suit-tab-stock",),
}),
)
suit_form_tabs = (
('identity', 'Identity'),
('price', 'Price'),
('stock', 'Stock')
)
def can_be_sold(self, obj):
"""
Determines whether the product can be sold or not.
"""
if obj.stock_count > 0 and obj.is_visible:
return True
else:
return False
can_be_sold.boolean = True
admin.site.register(Category)
admin.site.register(Product, ProductAdmin)
admin.site.register(ProductImage)
|
Python
| 0.999647
|
@@ -600,32 +600,8 @@
t=0)
-.filter(is_visible=True)
%0A
@@ -650,23 +650,22 @@
ueryset.
-exclude
+filter
(stock_c
@@ -672,39 +672,10 @@
ount
-__gt=0).exclude(is_visible=True
+=0
)%0A%0A%0A
@@ -2375,27 +2375,8 @@
%3E 0
- and obj.is_visible
:%0A
|
618c5bd2dee90565a97ab744f620ae8de4a74b91
|
refactor plymouth importer to use get_srid()
|
polling_stations/apps/data_collection/management/commands/import_plymouth.py
|
polling_stations/apps/data_collection/management/commands/import_plymouth.py
|
"""
Imports Plymouth
"""
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
from data_collection.google_geocoding_api_wrapper import (
GoogleGeocodingApiWrapper,
PostcodeNotFoundException
)
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Plymouth Council
"""
council_id = 'E06000026'
districts_name = 'Plymouth_Polling_Districts.kml'
stations_name = 'Plymouth Polling Stations.csv'
def district_record_to_dict(self, record):
# this kml has no altitude co-ordinates so the data is ok as it stands
geojson = record.geom.geojson
# The SRID for the KML is 4326 but the CSV is 2770 so we
# set it each time we create the polygon.
# We could probably do with a more elegant way of doing
# this longer term.
self._srid = self.srid
self.srid = 4326
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.srid))
self.srid = self._srid
# manually deal with dodgy/missing data
if record['DISTRICT'].value == '' and record['NOTES1'].value == 'EGGBUCKLAND' and record['AREA'].value == 689766:
id = 'HD'
elif record['DISTRICT'].value == '' and record['NOTES1'].value == 'EGGBUCKLAND' and record['AREA'].value == 594904:
id = 'HF'
elif record['DISTRICT'].value == '' and record['NOTES1'].value == '':
# Drake's Island ( https://en.wikipedia.org/wiki/Drake's_Island )
# seems to have a polling district but no associated station so can't work out the code.
# We'll just give it a name:
id = "Drake's Island"
else:
id = record['DISTRICT'].value
return {
'internal_council_id': id,
'name' : id,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.east), float(record.north), srid=self.srid)
address = "\n".join([record.addressl1, record.addressl2, record.addressl3])
if address[-1:] == '\n':
address = address[:-1]
# attempt to attach postcodes
gwrapper = GoogleGeocodingApiWrapper(address + ", Plymouth, UK")
try:
postcode = gwrapper.address_to_postcode()
except PostcodeNotFoundException:
postcode = ''
return {
'internal_council_id': record.statno,
'postcode': postcode,
'address': address,
'location': location
}
|
Python
| 0
|
@@ -682,271 +682,8 @@
on%0A%0A
- # The SRID for the KML is 4326 but the CSV is 2770 so we%0A # set it each time we create the polygon.%0A # We could probably do with a more elegant way of doing%0A # this longer term.%0A self._srid = self.srid%0A self.srid = 4326%0A
@@ -745,45 +745,31 @@
elf.
-srid))%0A self.srid = self._srid
+get_srid('districts')))
%0A%0A
@@ -1761,20 +1761,26 @@
id=self.
+get_
srid
+()
)%0A%0A
|
49be60d27b5d5ce40c20847f79a8dd09f580a830
|
Update _var_dump.py
|
var_dump/_var_dump.py
|
var_dump/_var_dump.py
|
from __future__ import print_function
import sys
try:
from types import NoneType
except:
NoneType = type(None)
if sys.version_info > (3,):
long = int
unicode = str
__author__ = "Shamim Hasnath"
__copyright__ = "Copyright 2013, Shamim Hasnath"
__license__ = "BSD License"
__version__ = "1.0.1"
TAB_SIZE = 4
def display(o, space, num, key, typ, display):
st = ""
l = []
if key:
if typ is dict:
st += " " * space + "['%s'] => "
else:
st += " " * space + "%s => "
l.append(key)
elif space > 0:
st += " " * space + "[%d] => "
l.append(num)
else: # at the very start
st += "#%d "
l.append(num)
if type(o) in (tuple, list, dict, int, str, float, long, bool, NoneType, unicode):
st += "%s(%s) "
l.append(type(o).__name__)
if type(o) in (int, float, long, bool, NoneType):
l.append(o)
else:
l.append(len(o))
if type(o) in (str, unicode):
st += '"%s"'
l.append(o)
elif isinstance(o, object):
st += "object(%s) (%d)"
l.append(o.__class__.__name__)
l.append(len(o.__dict__))
if display:
print(st % tuple(l))
else:
return st % tuple(l)
def dump(o, space, num, key, typ, proret):
if type(o) in (str, int, float, long, bool, NoneType, unicode):
display(o, space, num, key, typ, proret)
elif isinstance(o, object):
display(o, space, num, key, typ, proret)
num = 0
if type(o) in (tuple, list, dict):
typ = type(o) # type of the container of str, int, long, float etc
elif isinstance(o, object):
o = o.__dict__
typ = object
for i in o:
space += TAB_SIZE
if type(o) is dict:
dump(o[i], space, num, i, typ, proret)
else:
dump(i, space, num, '', typ, proret)
num += 1
space -= TAB_SIZE
def var_dump(*obs):
"""
shows structured information of a object, list, tuple etc
"""
i = 0
for x in obs:
dump(x, 0, i, '', object, True)
i += 1
def var_export(*obs):
"""
returns output as as string
"""
r = ''
i = 0
for x in obs:
r += dump(x, 0, i, '', object, False)
i += 1
return r
|
Python
| 0.000005
|
@@ -368,23 +368,22 @@
y, typ,
-display
+proret
):%0D%0A%09st
@@ -1086,23 +1086,22 @@
%0D%0A%0D%0A%09if
-display
+proret
:%0D%0A%09%09pri
@@ -1119,25 +1119,18 @@
le(l))%0D%0A
-%09else:
%0D%0A
-%09
%09return
@@ -1192,16 +1192,26 @@
oret):%0D%0A
+%09r = '';%0D%0A
%09if type
@@ -1266,24 +1266,29 @@
nicode):%0D%0A%09%09
+r +=
display(o, s
@@ -1351,16 +1351,21 @@
ct):%0D%0A%09%09
+r +=
display(
@@ -1647,24 +1647,29 @@
dict:%0D%0A%09%09%09%09
+r +=
dump(o%5Bi%5D, s
@@ -1710,16 +1710,21 @@
e:%0D%0A%09%09%09%09
+r +=
dump(i,
@@ -1788,16 +1788,27 @@
B_SIZE%0D%0A
+%09return r%0D%0A
%0D%0A%0D%0Adef
|
206696e82f3e5be4a64e60abdb59ca51d2b1461e
|
Add a test for rgb+mp to verify that it continues to work.
|
pysc2/tests/multi_player_env_test.py
|
pysc2/tests/multi_player_env_test.py
|
#!/usr/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that the multiplayer environment works."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from future.builtins import range # pylint: disable=redefined-builtin
from pysc2.agents import random_agent
from pysc2.env import run_loop
from pysc2.env import sc2_env
from pysc2.tests import utils
class TestMultiplayerEnv(utils.TestCase):
def test_multi_player_env(self):
steps = 100
step_mul = 16
players = 2
with sc2_env.SC2Env(
map_name="Simple64",
players=[sc2_env.Agent(sc2_env.Race.zerg),
sc2_env.Agent(sc2_env.Race.terran)],
feature_screen_size=84,
feature_minimap_size=64,
step_mul=step_mul,
game_steps_per_episode=steps * step_mul // 2) as env:
agents = [random_agent.RandomAgent() for _ in range(players)]
run_loop.run_loop(agents, env, steps)
if __name__ == "__main__":
absltest.main()
|
Python
| 0
|
@@ -1077,16 +1077,25 @@
ayer_env
+_features
(self):%0A
@@ -1570,16 +1570,523 @@
steps)%0A%0A
+ def test_multi_player_env_rgb(self):%0A steps = 100%0A step_mul = 16%0A players = 2%0A with sc2_env.SC2Env(%0A map_name=%22Simple64%22,%0A players=%5Bsc2_env.Agent(sc2_env.Race.zerg),%0A sc2_env.Agent(sc2_env.Race.terran)%5D,%0A rgb_screen_size=84,%0A rgb_minimap_size=64,%0A step_mul=step_mul,%0A game_steps_per_episode=steps * step_mul // 2) as env:%0A agents = %5Brandom_agent.RandomAgent() for _ in range(players)%5D%0A run_loop.run_loop(agents, env, steps)%0A%0A
%0Aif __na
|
48e3dab4ce044554b0ff606dea340ff8b6e5d928
|
Update __init__.py
|
edbo_connector/__init__.py
|
edbo_connector/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
edbo_connector
Author: Eldar Aliiev
Email: e.aliiev@vnmu.edu.ua
"""
from .client import EDBOWebApiClient
__name__ = 'python-edbo-connector'
__author__ = 'Eldar Aliiev'
__copyright__ = 'Copyright 2018, National Pirogov Memorial Medical University, Vinnytsya'
__credits__ = ['Eldar Aliiev']
__license__ = 'MIT'
__version__ = '1.0.4-12'
__maintainer__ = 'Eldar Aliiev'
__email__ = 'e.aliiev@vnmu.edu.ua'
__status__ = 'Production'
__all__ = ['EDBOWebApiClient']
|
Python
| 0.000072
|
@@ -377,17 +377,17 @@
'1.0.4-1
-2
+3
'%0A__main
|
949934ea7a34fcd71f65118b741a51a28e815e5d
|
update from trunk r9256
|
pywikibot/families/wowwiki_family.py
|
pywikibot/families/wowwiki_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'wowwiki'
self.langs = {
'cs': 'cs.wow.wikia.com',
'da': 'da.wowwiki.com',
'de': 'de.wow.wikia.com',
'el': 'el.wow.wikia.com',
'en': 'www.wowwiki.com',
'es': 'es.wow.wikia.com',
'fa': 'fa.wow.wikia.com',
'fi': 'fi.wow.wikia.com',
'fr': 'fr.wowwiki.com',
'he': 'he.wow.wikia.com',
'hr': 'hr.wow.wikia.com',
'hu': 'hu.wow.wikia.com',
'is': 'is.wow.wikia.com',
'it': 'it.wow.wikia.com',
'ja': 'ja.wow.wikia.com',
'ko': 'ko.wow.wikia.com',
'lt': 'lt.wow.wikia.com',
'lv': 'lv.wow.wikia.com',
'nl': 'nl.wow.wikia.com',
'no': 'no.wow.wikia.com',
'pl': 'pl.wow.wikia.com',
'pt': 'pt.wow.wikia.com',
'pt-br': 'pt-br.wow.wikia.com',
'ro': 'ro.wow.wikia.com',
'ru': 'ru.wow.wikia.com',
'sk': 'sk.wow.wikia.com',
'sr': 'sr.wow.wikia.com',
'sv': 'sv.warcraft.wikia.com',
'tr': 'tr.wow.wikia.com',
'zh-tw': 'zh-tw.wow.wikia.com',
'zh': 'zh.wow.wikia.com'
}
self.content_id = "article"
self.disambiguationTemplates['en'] = ['disambig', 'disambig/quest',
'disambig/quest2',
'disambig/achievement2']
self.disambcatname['en'] = "Disambiguations"
def scriptpath(self, code):
return ''
def version(self, code):
return '1.16.2'
|
Python
| 0
|
@@ -954,30 +954,28 @@
no': 'no.wow
-.
wiki
-a
.com',%0A
@@ -1717,16 +1717,319 @@
tions%22%0A%0A
+ # Wikia's default CategorySelect extension always puts categories last%0A self.categories_last = %5B'cs', 'da', 'de', 'el', 'en', 'es', 'fa', 'fi', 'fr', 'he', 'hr', 'hu', 'is', 'it', 'ja', 'ko', 'lt', 'lv', 'nl', 'no', 'pl', 'pt', 'pt-br', 'ro', 'ru', 'sk', 'sr', 'sv', 'tr', 'zh-tw', 'zh'%5D%0A
def
@@ -2121,11 +2121,11 @@
n '1.16.
-2
+4
'%0A
|
2d24ddd4e98972cdda69b5337ff2532f4319366e
|
add new line after test class
|
pywinauto/unittests/test_Calendar.py
|
pywinauto/unittests/test_Calendar.py
|
"""Tests for CalendarWrapper"""
import time
import datetime
import ctypes
import locale
import re
import sys
import os
import unittest
sys.path.append(".")
from pywinauto import win32structures
from pywinauto import win32defines
from pywinauto.application import Application
from pywinauto.sysinfo import is_x64_Python
from pywinauto.sysinfo import is_x64_OS
mfc_samples_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\MFC_samples")
if is_x64_Python():
mfc_samples_folder = os.path.join(mfc_samples_folder, 'x64')
class CalendarWrapperTests(unittest.TestCase):
"""Unit tests for the CalendarWrapperTests class"""
def setUp(self):
"""Start the application set some data and ensure the application
is in the state we want it."""
self.app = Application().start(os.path.join(mfc_samples_folder, u"CmnCtrl1.exe"))
self.dlg = self.app.Common_Controls_Sample
self.dlg.TabControl.Select(4)
self.calendar = self.app.Common_Controls_Sample.CalendarWrapper
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.type_keys("%{F4}")
def test_can_get_current_date_from_calendar(self):
date = self.calendar.get_current_date()
self.assert_system_time_is_equal_to_current_date_time(date,datetime.date.today())
def test_should_throw_runtime_error_when_try_to_get_current_date_from_calendar_if_calendar_state_is_multiselect(self):
self.set_calendar_state_into_multiselect()
self.assertRaises(RuntimeError, self.calendar.get_current_date)
def test_can_set_current_date_in_calendar(self):
self.calendar.set_current_date(2016, 4, 3, 13)
self.assert_system_time_is_equal_to_current_date_time(self.calendar.get_current_date(), datetime.date(2016, 4, 13))
def test_should_throw_runtime_error_when_try_to_set_invalid_date(self):
self.assertRaises(RuntimeError, self.calendar.set_current_date, -2016, -4, -3, -13)
def test_can_get_calendar_border(self):
width = self.calendar.get_border()
self.assertEqual(width, 4)
def test_can_set_calendar_border(self):
self.calendar.set_border(6)
self.assertEqual(self.calendar.get_border(), 6)
def test_can_get_calendars_count(self):
count = self.calendar.count()
self.assertEqual(count, 1)
def test_can_get_calendars_view(self):
view = self.calendar.get_view()
self.assertEqual(view, 0)
def test_should_throw_runtime_error_when_try_to_set_invalid_view(self):
self.assertRaises(RuntimeError, self.calendar.set_view, -1)
def test_can_set_calendars_view_into_month(self):
self.calendar.set_view(win32defines.MCMV_MONTH)
self.assertEqual(self.calendar.get_view(), win32defines.MCMV_MONTH)
def test_can_set_calendars_view_into_years(self):
self.calendar.set_view(win32defines.MCMV_YEAR)
self.assertEqual(self.calendar.get_view(), win32defines.MCMV_YEAR)
def test_can_set_calendars_view_into_decade(self):
self.calendar.set_view(win32defines.MCMV_DECADE)
self.assertEqual(self.calendar.get_view(), win32defines.MCMV_DECADE)
def test_can_set_calendars_view_into_century(self):
self.calendar.set_view(win32defines.MCMV_CENTURY)
self.assertEqual(self.calendar.get_view(), win32defines.MCMV_CENTURY)
def assert_system_time_is_equal_to_current_date_time(self,systemTime, now):
self.assertEqual(systemTime.wYear, now.year)
self.assertEqual(systemTime.wMonth, now.month)
self.assertEqual(systemTime.wDay, now.day)
def set_calendar_state_into_multiselect(self):
self.app['Common Controls Sample']['MCS_MULTISELECT'].WrapperObject().Click()
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000004
|
@@ -578,16 +578,17 @@
tCase):%0A
+%0A
%22%22%22U
|
327ba9de1fbe0ac3411c0192f944c3fae960d61b
|
fix tests
|
qiita_pet/test/test_qiita_redbiom.py
|
qiita_pet/test/test_qiita_redbiom.py
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from unittest import main
from copy import deepcopy
from json import loads
from qiita_pet.test.tornado_test_base import TestHandlerBase
class TestRedbiom(TestHandlerBase):
def test_get(self):
response = self.get('/redbiom/')
self.assertEqual(response.code, 200)
def test_post_metadata(self):
post_args = {
'search': 'Diesel',
'search_on': 'metadata'
}
response = self.post('/redbiom/', post_args)
self.assertEqual(response.code, 200)
exp_artifact_biom_ids = {
'5': ['1.SKD2.640178'],
'4': ['1.SKD2.640178', '1.SKD8.640184']}
response_body = loads(response.body)
obs_artifact_biom_ids = response_body['data'][0].pop(
'artifact_biom_ids')
self.assertDictEqual(obs_artifact_biom_ids, exp_artifact_biom_ids)
exp = {'status': 'success', 'message': '', 'data': DATA}
self.assertEqual(response_body, exp)
post_args = {
'search': 'inf',
'search_on': 'metadata'
}
response = self.post('/redbiom/', post_args)
self.assertEqual(response.code, 200)
exp = {'status': 'success',
'message': 'No samples were found! Try again ...', 'data': []}
self.assertEqual(loads(response.body), exp)
post_args = {
'search': '4353076',
'search_on': 'metadata'
}
response = self.post('/redbiom/', post_args)
self.assertEqual(response.code, 200)
exp = {'status': 'success',
'message': ('The query ("4353076") did not work and may be '
'malformed. Please check the search help for more '
'information on the queries.'), 'data': []}
self.assertEqual(loads(response.body), exp)
def test_post_features(self):
post_args = {
'search': '4479944',
'search_on': 'feature'
}
response = self.post('/redbiom/', post_args)
data = deepcopy(DATA)
data[0]['artifact_biom_ids'] = {
'5': ['1.SKM3.640197'], '4': ['1.SKM3.640197']}
exp = {'status': 'success', 'message': '', 'data': data}
self.assertEqual(response.code, 200)
self.assertEqual(loads(response.body), exp)
post_args = {
'search': 'TT',
'search_on': 'feature'
}
response = self.post('/redbiom/', post_args)
exp = {'status': 'success',
'message': 'No samples were found! Try again ...', 'data': []}
self.assertEqual(response.code, 200)
self.assertEqual(loads(response.body), exp)
def test_post_taxon(self):
post_args = {
'search': 'o__0319-7L14',
'search_on': 'taxon'
}
data = deepcopy(DATA)
data[0]['artifact_biom_ids'] = {
'5': ['1.SKM3.640197'], '4': ['1.SKM3.640197']}
response = self.post('/redbiom/', post_args)
print data[0]['artifact_biom_ids']
print loads(response.body)
exp = {'status': 'success', 'message': '', 'data': data}
self.assertEqual(response.code, 200)
self.assertEqual(loads(response.body), exp)
post_args = {
'search': 'o_0319-7L14',
'search_on': 'taxon'
}
response = self.post('/redbiom/', post_args)
exp = {'status': 'success',
'message': 'No samples were found! Try again ...', 'data': []}
self.assertEqual(response.code, 200)
self.assertEqual(loads(response.body), exp)
def test_post_errors(self):
post_args = {
'search_on': 'metadata'
}
response = self.post('/redbiom/', post_args)
self.assertEqual(response.code, 400)
post_args = {
'search': 'infant',
'search_on': 'error'
}
response = self.post('/redbiom/', post_args)
self.assertEqual(response.code, 200)
exp = {'status': 'success',
'message': ('Incorrect search by: you can use metadata, '
'features or taxon and you passed: error'),
'data': []}
self.assertEqual(loads(response.body), exp)
DATA = [
{'status': 'private',
'study_title': 'Identification of the Microbiomes for Cannabis Soils',
'metadata_complete': True, 'publication_pid': ['123456', '7891011'],
'ebi_submission_status': 'submitted',
'study_id': 1, 'ebi_study_accession': 'EBI123456-BB',
'study_abstract': ('This is a preliminary study to examine the '
'microbiota associated with the Cannabis plant. Soils '
'samples from the bulk soil, soil associated with the '
'roots, and the rhizosphere were extracted and the '
'DNA sequenced. Roots from three independent plants '
'of different strains were examined. These roots were '
'obtained November 11, 2011 from plants that had been '
'harvested in the summer. Future studies will attempt '
'to analyze the soils and rhizospheres from the same '
'location at different time points in the plant '
'lifecycle.'),
'pi': ['PI_dude@foo.bar', 'PIDude'],
'publication_doi': ['10.100/123456', '10.100/7891011'],
'study_alias': 'Cannabis Soils', 'number_samples_collected': 27}]
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -3254,36 +3254,65 @@
'5': %5B'1.SK
-M3.640197'%5D,
+D2.640178', '1.SKM3.640197'%5D,%0A
'4': %5B'1.SK
@@ -3313,32 +3313,49 @@
%5B'1.SKM3.640197'
+, '1.SKD2.640178'
%5D%7D%0A respo
@@ -3398,86 +3398,8 @@
gs)%0A
- print data%5B0%5D%5B'artifact_biom_ids'%5D%0A print loads(response.body)%0A
|
c95c222384c2c0d887d435017196c9af4137d1b2
|
set numba parallel option
|
hpat/__init__.py
|
hpat/__init__.py
|
from __future__ import print_function, division, absolute_import
import numba
from numba import *
from .compiler import add_hpat_stages
set_user_pipeline_func(add_hpat_stages)
del add_hpat_stages
def jit(signature_or_function=None, **options):
# set nopython by default
if 'nopython' not in options:
options['nopython'] = True
return numba.jit(signature_or_function, **options)
|
Python
| 0.000002
|
@@ -339,16 +339,47 @@
= True%0A
+ options%5B'parallel'%5D = True%0A
retu
|
678d96a624af1d3d4fa991ec41abde8919c9977c
|
add choices for some config options
|
yabgp/config.py
|
yabgp/config.py
|
# Copyright 2015 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" basic config """
import logging
import sys
import os
from oslo_config import cfg
CONF = cfg.CONF
CONF.register_cli_opts([
cfg.BoolOpt('standalone', default=True, help='The BGP Agent running mode'),
cfg.StrOpt('pid-file', default=None, help='pid file name')
])
msg_process_opts = [
cfg.BoolOpt('write_disk',
default=True,
help='Whether the BGP message is written to disk'),
cfg.StrOpt('write_dir',
default=os.path.join(os.environ['HOME'], 'data/bgp/'),
help='The BGP messages storage path'),
cfg.IntOpt('write_msg_max_size',
default=500,
help='The Max size of one BGP message file, the unit is MB'),
cfg.BoolOpt('write_keepalive',
default=False,
help='Whether write keepalive message to disk'),
cfg.StrOpt('format',
default='json',
help='The output format of bgp messagees.')
]
CONF.register_opts(msg_process_opts, group='message')
bgp_config_opts = [
cfg.IntOpt('peer_start_interval',
default=10,
help='The interval to start each BGP peer'),
cfg.ListOpt('afi_safi',
default=['ipv4'],
help='The Global config for address family and sub address family'),
cfg.BoolOpt('four_bytes_as',
default=True,
help='If support 4bytes AS'),
cfg.BoolOpt('route_refresh',
default=True,
help='If support sending and receiving route refresh message'),
cfg.BoolOpt('cisco_route_refresh',
default=True,
help='If support sending and receiving cisco route refresh message'),
cfg.BoolOpt('enhanced_route_refresh',
default=True,
help='If support enhanced route refresh'),
cfg.StrOpt('add_path',
help='BGP additional path feature and supported address family'),
cfg.BoolOpt('graceful_restart',
default=True,
help='if support graceful restart'),
cfg.BoolOpt('cisco_multi_session',
default=True,
help='if support cisco multi session'),
cfg.DictOpt('running_config',
default={},
help='The running configuration for BGP'),
cfg.StrOpt('config_file',
help='BGP peers configuration file')
]
CONF.register_opts(bgp_config_opts, group='bgp')
bgp_peer_conf_cli_opts = [
cfg.IntOpt('remote_as',
help='The remote BGP peer AS number'),
cfg.IntOpt('local_as',
help='The Local BGP AS number'),
cfg.StrOpt('remote_addr',
help='The remote address of the peer'),
cfg.StrOpt('local_addr',
default='0.0.0.0',
help='The local address of the BGP'),
cfg.StrOpt('md5',
help='The MD5 string use to auth',
secret=True),
cfg.BoolOpt('rib',
default=False,
help='Whether maintain BGP rib table'),
cfg.StrOpt('tag', help='The agent role tag')
]
CONF.register_cli_opts(bgp_peer_conf_cli_opts, group='bgp')
LOG = logging.getLogger(__name__)
def get_bgp_config():
"""
Get BGP running config
:return:
"""
# check bgp_conf_file
if CONF.bgp.config_file:
LOG.info('Try to load BGP configuration from %s', CONF.bgp.config_file)
LOG.error('Failed to load BGP configuration')
# TODO parse xml config file to get multi bgp config
# will be supported in future
sys.exit()
else:
# check bgp configuration from CLI input
LOG.info('Try to load BGP configuration from CLI input')
if CONF.bgp.local_as and CONF.bgp.remote_as and CONF.bgp.local_addr and CONF.bgp.remote_addr:
CONF.bgp.running_config[CONF.bgp.remote_addr] = {
'remote_as': CONF.bgp.remote_as,
'remote_addr': CONF.bgp.remote_addr,
'local_as': CONF.bgp.local_as,
'local_addr': CONF.bgp.local_addr,
'md5': CONF.bgp.md5,
'afi_safi': CONF.bgp.afi_safi,
'capability': {
'local': {
'four_bytes_as': CONF.bgp.four_bytes_as,
'route_refresh': CONF.bgp.route_refresh,
'cisco_route_refresh': CONF.bgp.cisco_route_refresh,
'enhanced_route_refresh': CONF.bgp.enhanced_route_refresh,
'graceful_restart': CONF.bgp.graceful_restart,
'cisco_multi_session': CONF.bgp.cisco_multi_session,
'add_path': CONF.bgp.add_path},
'remote': {}
},
'tag': CONF.bgp.tag
}
LOG.info('Get BGP running configuration for peer %s', CONF.bgp.remote_addr)
for item in CONF.bgp.running_config[CONF.bgp.remote_addr]:
if item == 'capability':
LOG.info('capability local:')
for capa in CONF.bgp.running_config[CONF.bgp.remote_addr][item]['local']:
LOG.info('-- %s: %s' % (
capa,
CONF.bgp.running_config[CONF.bgp.remote_addr][item]['local'][capa]
))
continue
LOG.info("%s = %s", item, CONF.bgp.running_config[CONF.bgp.remote_addr][item])
return
else:
LOG.error('Please provide enough parameters!')
sys.exit()
|
Python
| 0
|
@@ -1531,24 +1531,65 @@
ult='json',%0A
+ choices=%5B'json', 'list'%5D,%0A
@@ -2546,24 +2546,91 @@
'add_path',%0A
+ choices=%5B'ipv4_send', 'ipv4_receive', 'ipv4_both'%5D,%0A
@@ -3815,16 +3815,78 @@
t('tag',
+%0A choices=%5B'SRC', 'DST', 'BOTH'%5D,%0A
help='T
@@ -3903,16 +3903,32 @@
ole tag'
+%0A
)%0A%5D%0A%0ACON
|
07032d47dc103f6436989fdecf679bee92be0786
|
Rework question indexing
|
apps/questions/es_search.py
|
apps/questions/es_search.py
|
import elasticutils
import logging
import pyes
import time
from search.es_utils import (TYPE, LONG, INDEX, STRING, ANALYZED, ANALYZER,
SNOWBALL, TERM_VECTOR, STORE, YES, BOOLEAN,
WITH_POS_OFFSETS, DATE, INTEGER, get_index)
ID_FACTOR = 100000
log = logging.getLogger('k.questions.es_search')
def setup_mapping(index):
from questions.models import Question
mapping = {
'properties': {
'id': {TYPE: LONG},
'question_id': {TYPE: LONG},
'title': {TYPE: STRING, INDEX: ANALYZED, ANALYZER: SNOWBALL},
'question_content':
{TYPE: STRING, INDEX: ANALYZED, ANALYZER: SNOWBALL,
STORE: YES, TERM_VECTOR: WITH_POS_OFFSETS},
'answer_content':
{TYPE: STRING, INDEX: ANALYZED, ANALYZER: SNOWBALL},
'replies': {TYPE: INTEGER},
'is_solved': {TYPE: BOOLEAN},
'is_locked': {TYPE: BOOLEAN},
'has_answers': {TYPE: BOOLEAN},
'has_helpful': {TYPE: BOOLEAN},
'created': {TYPE: DATE},
'updated': {TYPE: DATE},
'question_creator': {TYPE: STRING},
'answer_creator': {TYPE: STRING},
'question_votes': {TYPE: INTEGER},
'answer_votes': {TYPE: INTEGER},
}
}
es = elasticutils.get_es()
# TODO: If the mapping is there already and we do a put_mapping,
# does that stomp on the existing mapping or raise an error?
try:
es.put_mapping(Question._meta.db_table, mapping, index)
except pyes.exceptions.ElasticSearchException, e:
log.error(e)
def extract_question(question):
"""Extracts indexable attributes from a Question and its answers."""
question_data = {}
question_data['id'] = question.id
question_data['title'] = question.title
question_data['question_content'] = question.content
question_data['replies'] = question.num_answers
question_data['is_solved'] = bool(question.solution_id)
question_data['is_locked'] = question.is_locked
question_data['has_answers'] = bool(question.num_answers)
question_data['created'] = question.created
question_data['updated'] = question.updated
question_data['question_creator'] = question.creator.username
question_data['question_votes'] = question.num_votes_past_week
# answer_content is a \n\n delimited mish-mosh of all the
# answer content.
answer_content = []
# has_helpful is true if at least one answer is marked as
# helpful.
has_helpful = False
# answer_creator is the set of all answer creator user names.
answer_creator = set()
# answer_votes is the sum of votes for all of the answers.
answer_votes = 0
for ans in question.answers.all():
answer_content.append(ans.content)
has_helpful = has_helpful or bool(ans.num_helpful_votes)
answer_creator.add(ans.creator.username)
answer_votes += ans.upvotes
question_data['answer_content'] = '\n\n'.join(answer_content)
question_data['has_helpful'] = has_helpful
question_data['answer_creator'] = list(answer_creator)
question_data['answer_votes'] = answer_votes
return question_data
def index_doc(doc, bulk=False, force_insert=False, es=None):
from questions.models import Question
if es is None:
es = elasticutils.get_es()
index = get_index(Question)
try:
es.index(doc, index, doc_type=Question._meta.db_table,
id=doc['id'], bulk=bulk, force_insert=force_insert)
except pyes.urllib3.TimeoutError:
# If we have a timeout, try it again rather than die. If we
# have a second one, that will cause everything to die.
es.index(doc, index, doc_type=Question._meta.db_table,
id=doc['id'], bulk=bulk, force_insert=force_insert)
def index_docs(documents, bulk=False, force_insert=False, es=None):
for doc in documents:
index_doc(doc, bulk, force_insert, es)
def unindex_questions(ids):
"""Removes Questions from the index."""
from questions.models import Question
es = elasticutils.get_es()
index = get_index(Question)
for question_id in ids:
# TODO wrap this in a try/except--amongst other things, this will
# only be in the index if the Question had no Answers.
try:
es.delete(index, doc_type=Question._meta.db_table,
id=question_id)
except pyes.exceptions.NotFoundException:
# If the document isn't in the index, then we ignore it.
# TODO: Is that right?
pass
def unindex_answers(ids):
"""Removes Answers from the index.
:arg ids: list of question ids
"""
# Answers are rolled up in Question documents, so we reindex the
# Question.
from questions.models import Question
for question_id in ids:
try:
# TODO: test the case where we delete the question
# twice.
question = Question.objects.get(id=question_id)
index_doc(extract_question(question))
except Question.ObjectDoesNotExist:
pass
def reindex_questions(percent=100):
"""Updates the mapping and indexes all questions.
Note: This gets run from the command line, so we log stuff to let
the user know what's going on.
:arg percent: The percentage of questions to index. Defaults to
100--e.g. all of them.
"""
from questions.models import Question
from django.conf import settings
index = get_index(Question)
start_time = time.time()
log.info('reindex questions: %s %s', index,
Question._meta.db_table)
es = pyes.ES(settings.ES_HOSTS, timeout=10.0)
log.info('setting up mapping....')
setup_mapping(index)
log.info('iterating through questions....')
total = Question.objects.count()
to_index = int(total * (percent / 100.0))
log.info('total questions: %s (to be indexed: %s)', total, to_index)
total = to_index
t = 0
for q in Question.objects.all():
t += 1
if t % 1000 == 0:
time_to_go = (total - t) * ((time.time() - start_time) / t)
if time_to_go < 60:
time_to_go = "%d secs" % time_to_go
else:
time_to_go = "%d min" % (time_to_go / 60)
log.info('%s/%s... (%s to go)', t, total, time_to_go)
es.flush_bulk(forced=True)
if t > total:
break
index_doc(extract_question(q), bulk=True, es=es)
es.flush_bulk(forced=True)
log.info('done!')
es.refresh()
|
Python
| 0.000016
|
@@ -3918,151 +3918,8 @@
)%0A%0A%0A
-def index_docs(documents, bulk=False, force_insert=False, es=None):%0A for doc in documents:%0A index_doc(doc, bulk, force_insert, es)%0A%0A%0A
def
|
81cbcf66ce2e4344a951092080a8be9bb0b86302
|
Complete tests for Group with invitation only
|
sigma_core/tests/test_group_member.py
|
sigma_core/tests/test_group_member.py
|
import json
from django.core import mail
from rest_framework import status
from rest_framework.test import APITestCase
from sigma_core.models.user import User
from sigma_core.models.group import Group
from sigma_core.models.group_member import GroupMember
from sigma_core.serializers.user import DetailedUserSerializer as UserSerializer
from sigma_core.tests.factories import UserFactory, AdminUserFactory, GroupFactory, GroupMemberFactory
class OpenGroupMemberCreationTests(APITestCase):
@classmethod
def setUpTestData(self):
super(APITestCase, self).setUpTestData()
# Routes
self.members_url = "/group-member/"
self.member_url = self.members_url + "%d/"
# Group open to anyone
self.group = GroupFactory()
self.group.default_member_rank = 1
self.group.save()
# Users already in group
self.users = [UserFactory()]
# Associated GroupMember
self.group_member1 = GroupMember(user=self.users[0], group=self.group, perm_rank=Group.ADMINISTRATOR_RANK)
# Testing user
self.user = UserFactory()
# Misc
self.new_membership_data = {"group": self.group.id, "user": self.user.id}
def test_create_not_authed(self):
self.client.force_authenticate(user=None)
response = self.client.post(self.members_url, self.new_membership_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_not_for_self(self):
# Attempt to add somebody else to a group
self.client.force_authenticate(user=self.users[0])
response = self.client.post(self.members_url, self.new_membership_data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_success(self):
# Succesful attempt to join an open group
self.client.force_authenticate(user=self.user)
response = self.client.post(self.members_url, self.new_membership_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['perm_rank'], self.group.default_member_rank)
class RequestGroupMemberCreationTests(APITestCase):
@classmethod
def setUpTestData(self):
super(APITestCase, self).setUpTestData()
# Routes
self.members_url = "/group-member/"
self.member_url = self.members_url + "%d/"
# Group with membership request
self.group = GroupFactory()
self.group.default_member_rank = 0
self.group.req_rank_accept_join_requests = 5
self.group.save()
# Users already in group
self.users = [UserFactory.batch_create(3)]
# Associated GroupMember
self.group_member1 = GroupMember(user=self.users[0], group=self.group, perm_rank=Group.ADMINISTRATOR_RANK) # can validate requests
self.group_member2 = GroupMember(user=self.users[1], group=self.group, perm_rank=1) # cannot validate requests
self.group_member3 = GroupMember(user=self.users[2], group=self.group, perm_rank=0) # request to be validated
# Testing user
self.user = UserFactory()
# Misc
self.new_membership_data = {"group": self.group.id, "user": self.user.id}
def test_create_not_authed(self):
self.client.force_authenticate(user=None)
response = self.client.post(self.members_url, self.new_membership_data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_success(self):
# Succesful attempt to request group membership
self.client.force_authenticate(user=self.user)
response = self.client.post(self.members_url, self.new_membership_data)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data['perm_rank'], self.group.default_member_rank)
def test_validate_forbidden(self):
# Attempt to validate a request but not enough permission
self.client.force_authenticate(user=self.users[1])
response = self.client.put(self.member_url + "accept_join_request/" % self.group_member3.id, {})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.data['perm_rank'], 0)
def test_validate_success(self):
# Succesful attempt to validate a request
self.client.force_authenticate(user=self.users[0])
response = self.client.put(self.member_url + "accept_join_request/" % self.group_member3.id, {})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['perm_rank'], 1)
class InvitationGroupMemberCreationTests(APITestCase):
@classmethod
def setUpTestData(self):
super(APITestCase, self).setUpTestData()
# Routes
self.members_url = "/group-member/"
self.member_url = self.members_url + "%d/"
# Group with invitation only
self.group = GroupFactory()
|
Python
| 0
|
@@ -5058,28 +5058,820 @@
self.group = GroupFactory()%0A
+ self.group.req_rank_invite = 5%0A self.group.save()%0A%0A # Testing user%0A self.user = UserFactory()%0A%0A # Misc%0A self.new_membership_data = %7B%22user%22: self.user.id, %22group%22: self.group.id%7D%0A%0A def test_create_not_authed(self):%0A self.client.force_authenticate(user=None)%0A response = self.client.post(self.members_url, self.new_membership_data)%0A self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)%0A%0A def test_create_forbidden(self):%0A # Attempt to get group membership%0A self.client.force_authenticate(user=self.user)%0A response = self.client.post(self.members_url, self.new_membership_data)%0A self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)%0A
|
80d3fe7d2c69fd960a5b585d60085f33e109e455
|
solution found text incorrect
|
simbad/command_line/simbad_lattice.py
|
simbad/command_line/simbad_lattice.py
|
#!/usr/bin/env python
__author__ = "Felix Simkovic & Adam Simpkin"
__date__ = "06 Mar 2017"
__version__ = "0.1"
import argparse
import os
import platform
import sys
import time
import simbad.command_line
import simbad.util.exit_util
import simbad.util.simbad_util
import simbad.version
__version__ = simbad.version.__version__
def lattice_argparse():
"""Create the argparse options"""
p = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
simbad.command_line._argparse_core_options(p)
simbad.command_line._argparse_lattice_options(p)
simbad.command_line._argparse_mtz_options(p)
simbad.command_line._argparse_mr_options(p)
p.add_argument('mtz', help="The path to the input mtz file")
return p.parse_args()
def main():
"""Main function to run SIMBAD's lattice search"""
args = lattice_argparse()
if args.work_dir and os.path.isdir(args.work_dir):
raise ValueError("Named working directory exists, please rename or remove")
elif args.work_dir:
os.mkdir(args.work_dir)
args.work_dir = args.work_dir
elif args.run_dir and os.path.isdir(args.run_dir):
args.work_dir = simbad.command_line.make_workdir(args.run_dir, ccp4_jobid=args.ccp4_jobid)
elif args.run_dir:
os.mkdir(args.run_dir)
args.work_dir = simbad.command_line.make_workdir(args.run_dir, ccp4_jobid=args.ccp4_jobid)
else:
raise RuntimeError("Not entirely sure what has happened here but I should never get to here")
# Logger setup
debug_log = os.path.join(args.work_dir, 'debug.log')
logger = simbad.command_line.setup_logging(logfile=debug_log)
# Check the CCP4 installation
ccp4_root = simbad.command_line.setup_ccp4()
ccp4_version = simbad.util.simbad_util.ccp4_version()
# Print some fancy info
logger.info(simbad.command_line.header)
logger.info("SIMBAD version: %s", __version__)
logger.info("Running with CCP4 version: %s from directory: %s", ccp4_version, ccp4_root)
logger.info("Running on host: %s", platform.node())
logger.info("Running on platform: %s", platform.platform())
logger.info("Job started at: %s", time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()))
logger.info("Invoked with command-line:\n%s\n", " ".join(map(str, sys.argv)))
logger.info("Running in directory: %s\n", args.work_dir)
# Take the start time
time_start = time.time()
# Perform the contaminante search
solution_found = simbad.command_line._simbad_lattice_search(args)
if solution_found:
logger.info("Check you out, crystallizing contaminants! But don't worry, SIMBAD figured it out and found a solution.")
else:
logger.info("No results found - lattice search was unsuccessful")
# Calculate and display the runtime in hours
days, hours, mins, secs = simbad.command_line.calculate_runtime(time_start, time.time())
logger.info("All processing completed in %d days, %d hours, %d minutes, %d and seconds", days, hours, mins, secs)
if __name__ == "__main__":
try:
main()
except Exception as e:
msg = "Error running main SIMBAD program: {0}".format(e.message)
simbad.util.exit_util.exit_error(msg, sys.exc_info()[2])
|
Python
| 0.999998
|
@@ -2626,110 +2626,76 @@
fo(%22
-Check you out, crystallizing contaminants! But don't worry, SIMBAD figured it out and found a solution
+Lucky you! SIMBAD worked its charm and found a lattice match for you
.%22)%0A
|
f6bb142bf965a7ac4f842d73235e2db17795e25e
|
Update sock_conn_test.py
|
apps/sock/sock_conn_test.py
|
apps/sock/sock_conn_test.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author : jeonghoonkang, https://github.com/jeonghoonkang
# Author : Kowonsik, https://github.com/kowonsik
import socket
import os
import sys
import struct
import time
ETYPE_VALUE_MAX = 8000
def main():
SERVER_ADDR = "125.xx.xx.41"
SERVER_PORT = 8283
print " "*10, "try connecting .... "
sock = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
ret_conn = sock.connect( ( SERVER_ADDR, SERVER_PORT ) )
readBuffer = ""
receivedLines = []
sendLines = []
while True:
# provide buffer
recv = sock.recv( 1024 )
readBuffer += recv
print recv
# consume buffer
l = readBuffer.split( ":" )
if ( len(l) < 2 ):
pass # nothing to do
else:
# dump, it appears in the tcollector screen
#print >> sys.stderr, "*readBuffer='" + readBuffer + "'"
readBuffer = ""
# check last
e = l[-1]
l = l[:-1]
if e != "":
readBuffer = e
# dump, it appears in the tcollector screen
#print >> sys.stderr, "+readBuffer='" + readBuffer + "'"
# check first
s = l[0]
if s.startswith( "Welcome" ):
s = s[ len( "Welcome"): ]
l[0] = s
receivedLines += l
#for s in l:
# print >> sys.stderr, "+line='" + s + "'"
# make packets
for s in receivedLines:
#
if len( s ) < 48:
print >> sys.stderr, "wrong data:" + s
print >> sys.stderr, "dump receivedLines"
for ss in receivedLines:
print >> sys.stderr, ss
assert len(s)>=48
# common
head = s[:20]
type = s[20:24]
serialID = s[24:36]
nodeID = s[36:40]
seq = s[40:44]
battery = s[44:48]
#
if type == "0064": # TH
temperature = bigEndian( s[48:52] )
humidity = bigEndian( s[52:56] )
light = bigEndian( s[56:60] )
# T
v1 = -39.6 + 0.01 * temperature
# H
tmp = -4 + 0.0405 * humidity + (-0.0000028) * humidity * humidity
v2 = (v1 - 25) * (0.01 + 0.00008 * humidity) + tmp
# L
tmp = (light * 100) / 75
v3 = tmp * 10
#
t = int( time.time() )
print "thl.temperature %d %f nodeid=%d" % ( t, v1, bigEndian( nodeID ) )
print "thl.humidity %d %f nodeid=%d" % ( t, v2, bigEndian( nodeID ) )
print "thl.light %d %f nodeid=%d" % ( t, v3, bigEndian( nodeID ) )
elif type == "0065":# PIR
pass # ignore this.
elif type == "0066":# CO2
ppm = s[48:52]
t = int( time.time() )
tmp = float( bigEndian( ppm ) )
value = float( 1.5 * ( tmp / 4086 ) * 2 * 1000 )
print "co2.ppm %d %f nodeid=%d" % ( t, value, bigEndian( nodeID ) )
elif type == "006D" or type == "006d": # SPlug
#current = s[48:54]
#t_current = s[60:66]
rawData = s[54:60]
tmp = bigEndian( rawData )
if tmp > 15728640:
tmp = 0
else:
tmp = float( tmp / 4.127 / 10 )
watt = tmp
t = int( time.time() )
print "splug.watt %d %f nodeid=%d" % ( t, watt, bigEndian( nodeID ) )
elif type == "00D3" or type == "00d3": # etype
# length check
if len( s ) < 72:
print >> sys.stderr, "ignore too short data for etype:" + s
continue
t_current = s[48:56]
current = s[64:72]
current = toFloat( swapBytes( current ) )
t_current = littleEndian( swapBytes( t_current ) )
nodeID = bigEndian( nodeID )
else:
print >> sys.stderr, "Invalid type:" + type
pass
# clear
receivedLines = []
sys.stdout.flush()
if __name__ == "__main__":
main()
|
Python
| 0.000004
|
@@ -146,17 +146,16 @@
wonsik%0A%0A
-%0A
import s
|
77651861fc5a27d1d62293e3bc66d62ae193221d
|
add tolerance option to F.sqrt
|
tests/chainer_tests/functions_tests/math_tests/test_sqrt.py
|
tests/chainer_tests/functions_tests/math_tests/test_sqrt.py
|
import unittest
import numpy
import chainer.functions as F
from chainer import testing
# sqrt
def make_data(shape, dtype):
x = numpy.random.uniform(0.1, 5, shape).astype(dtype)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype)
ggx = numpy.random.uniform(-1, 1, shape).astype(dtype)
return x, gy, ggx
@testing.unary_math_function_unittest(F.sqrt, make_data=make_data)
class TestSqrt(unittest.TestCase):
pass
# rsqrt
def rsqrt(x):
return numpy.reciprocal(numpy.sqrt(x))
class TestRsqrt(unittest.TestCase):
def test_rsqrt(self):
x = numpy.random.uniform(0.1, 5, (3, 2)).astype(numpy.float32)
testing.assert_allclose(F.rsqrt(x).data, rsqrt(x))
testing.run_module(__name__, __file__)
|
Python
| 0.000003
|
@@ -362,15 +362,24 @@
est(
+%0A
F.sqrt,
+%0A
mak
@@ -394,16 +394,69 @@
ake_data
+,%0A backward_options=%7B'atol': 1e-3, 'rtol': 1e-3%7D,%0A
)%0Aclass
|
cc17390eada091da34fed92ee7e2090adc1fa87e
|
Fix for `plot_field` function failing on non-square grids #666
|
examples/cfd/tools.py
|
examples/cfd/tools.py
|
from mpl_toolkits.mplot3d import Axes3D # noqa
import numpy as np
from matplotlib import pyplot, cm
def plot_field(field, xmax=2., ymax=2., zmax=None, view=None, linewidth=0):
"""Utility plotting routine for 2D data
:param field: Numpy array with field data to plot
:param xmax: (Optional) Length of the x-axis
:param ymax: (Optional) Length of the y-axis
:param view: (Optional) View point to intialise
"""
x_coord = np.linspace(0, xmax, field.shape[0])
y_coord = np.linspace(0, ymax, field.shape[1])
fig = pyplot.figure(figsize=(11, 7), dpi=100)
ax = fig.gca(projection='3d')
X, Y = np.meshgrid(x_coord, y_coord)
ax.plot_surface(X, Y, field[:], cmap=cm.viridis, rstride=1, cstride=1,
linewidth=linewidth, antialiased=False)
# Enforce axis measures and set view if given
ax.set_xlim(0., xmax)
ax.set_ylim(0., ymax)
if zmax is not None:
ax.set_zlim(1., zmax)
if view is not None:
ax.view_init(*view)
# Label axis
ax.set_xlabel('$x$')
ax.set_ylabel('$y$')
pyplot.show()
def init_hat(field, dx, dy, value=2., bgvalue=1.):
"""Set "hat function" initial condition on an array:
u(.5<=x<=1 && .5<=y<=1 ) is 2
:param field: Numpy array with field data to plot
:param dx: Spacing in the x-dimension
:param dy: Spacing in the y-dimension
:param value: Value of the top part of the function, default=2.
:param bgvalue: Background value for the bottom of the function, default=1.
"""
field[:] = bgvalue
field[int(.5 / dx):int(1 / dx + 1), int(.5 / dy):int(1 / dy + 1)] = value
def gaussian(x, mu, sig):
return np.exp(-np.power(x - mu, 2.) / (2 * np.power(sig, 2.)))
def fin_bump(x):
if x <= 0 or x >= 1:
return 0
else:
return 100*np.exp(-1./(x-np.power(x, 2.)))
def init_smooth(field, dx, dy):
nx, ny = field.shape
for ix in range(nx):
for iy in range(ny):
x = ix * dx
y = iy * dy
field[ix, iy] = fin_bump(x/1.5) * fin_bump(y/1.5) + 1.
|
Python
| 0
|
@@ -655,16 +655,31 @@
y_coord
+, indexing='ij'
)%0A ax
|
173317003a59afb639e6f4f5d5eca41a1f390979
|
Revise q05 to successfully use partial derivatives of u and v for gradient descent of E (error).
|
hw05/hw05ex05.py
|
hw05/hw05ex05.py
|
# dE/du (u e^v - 2v e^(-u))^2 = 2 (u e^v - 2v e^(-u))(e^v + 2v e^(-u))
#from decimal import Decimal
from math import exp #natural exponent, e**x
def calcE(u,v):
return 2 * ( u*exp(v) - 2*v*exp(-u) ) * ( exp(v) + 2*v*exp(-u) )
i = 0
eta = 0.1 # u"\u03B7"
#E = float(10^(-14))
#E = 10^(-14)
#E = Decimal(0.0000000000001)
#E = 0.0000000000001
E_threshold = 10e-14
print calcE(1,1)
'''
while True:
if E < E_threshold:
print E, '<', E_threshold, ' in', i, 'iterations'
break
else:
'''
|
Python
| 0
|
@@ -110,19 +110,31 @@
import
-exp
+sqrt, exp, fabs
#natura
@@ -149,16 +149,36 @@
nt, e**x
+. and absolute value
%0A%0Adef ca
@@ -180,16 +180,20 @@
ef calcE
+wrtu
(u,v):%0A
@@ -199,80 +199,549 @@
-return 2 * ( u*exp(v) - 2*v*exp(-u) ) * ( exp(v) + 2*v*exp(-u) )%0A%0A
+'''%0A Given u and v, the hypothesis and the target function, %0A return the partial deriv w.r.t. u for gradient descent of the error.%0A '''%0A return 2 * ( u*exp(v) - 2*v*exp(-u) ) * ( exp(v) + 2*v*exp(-u) )%0A%0Adef calcEwrtv(u,v):%0A '''%0A Given u and v, the hypothesis and the target function, %0A return the partial deriv w.r.t. v for gradient descent of the error.%0A '''%0A return 2 * ( u * exp(v) - 2*v*exp(-u) ) * ( u*exp(v) - 2*exp(-u))%0A%0Adef calcE(u,v):%0A return ( u*exp(v) - 2.*v*exp(-u) )**2%0A%0A%0Adef q05():%0A
i = 0%0A
+
eta
@@ -758,16 +758,42 @@
%5Cu03B7%22%0A
+ u = 1.%0A v = 1.%0A
#E = flo
@@ -805,16 +805,20 @@
%5E(-14))%0A
+
#E = 10%5E
@@ -823,16 +823,20 @@
0%5E(-14)%0A
+
#E = Dec
@@ -857,16 +857,20 @@
000001)%0A
+
#E = 0.0
@@ -882,16 +882,20 @@
0000001%0A
+
E_thresh
@@ -911,30 +911,28 @@
-14%0A
-%0Aprint calcE(1,1)%0A'''%0A
+ E = 99999.%0A%0A
whil
@@ -939,16 +939,20 @@
e True:%0A
+
if E
@@ -967,16 +967,20 @@
eshold:%0A
+
@@ -1037,16 +1037,20 @@
+
break%0A
@@ -1055,13 +1055,284 @@
+
+
else:%0A
-'''
+ dE_du = calcEwrtu(u,v)%0A dE_dv = calcEwrtv(u,v)%0A u = u - eta * dE_du%0A v = v - eta * dE_dv%0A E = calcE(u,v)%0A #print 'E:', E, 'u:', u, 'v:', v, 'iter:', i%0A i+=1%0A return u, v, E, i%0A%0Aprint q05()%0A
|
61609c6b1a93316c1b8a5e512ed310a38d6c772b
|
Add gss_mnist description
|
examples/gss_mnist.py
|
examples/gss_mnist.py
|
import torch
import torch.nn as nn
from torch.nn import CrossEntropyLoss
from torch.optim import SGD
from avalanche.benchmarks import Experience
from avalanche.benchmarks.classic import SplitMNIST
from avalanche.benchmarks.generators.benchmark_generators import \
data_incremental_benchmark
from avalanche.benchmarks.utils import AvalancheSubset
from avalanche.evaluation.metrics import accuracy_metrics, loss_metrics
from avalanche.logging import InteractiveLogger
from avalanche.training.plugins import EvaluationPlugin
from avalanche.training.strategies import GSS_greedy
class FlattenP(nn.Module):
'''A nn-module to flatten a multi-dimensional tensor to 2-dim tensor.'''
def forward(self, x):
batch_size = x.size(0) # first dimenstion should be batch-dimension.
return x.view(batch_size, -1)
def __repr__(self):
tmpstr = self.__class__.__name__ + '()'
return tmpstr
class MLP(nn.Module):
def __init__(self, sizes, bias=True):
super(MLP, self).__init__()
layers = []
for i in range(0, len(sizes) - 1):
if i < (len(sizes)-2):
layers.append(nn.Linear(sizes[i], sizes[i + 1]))
layers.append(nn.ReLU())
else:
layers.append(nn.Linear(sizes[i], sizes[i + 1], bias=bias))
self.net = nn.Sequential(FlattenP(), *layers)
def forward(self, x):
return self.net(x)
def shrinking_experience_size_split_strategy(
experience: Experience):
experience_size = 1000
exp_dataset = experience.dataset
exp_indices = list(range(len(exp_dataset)))
result_datasets = []
exp_indices = \
torch.as_tensor(exp_indices)[
torch.randperm(len(exp_indices))
].tolist()
result_datasets.append(AvalancheSubset(
exp_dataset, indices=exp_indices[0:experience_size]))
return result_datasets
def setup_mnist():
scenario = data_incremental_benchmark(SplitMNIST(
n_experiences=5, seed=1), experience_size=0, custom_split_strategy=shrinking_experience_size_split_strategy)
n_inputs = 784
nh = 100
nl = 2
n_outputs = 10
model = MLP([n_inputs] + [nh] * nl + [n_outputs])
return model, scenario
if __name__ == "__main__":
dev = "cuda:0"
device = torch.device(dev)
#_______________________________________Model and scenario
model, scenario = setup_mnist()
eval_plugin = EvaluationPlugin(
accuracy_metrics(epoch=True, experience=True, stream=True),
loss_metrics(stream=True), loggers=[InteractiveLogger()])
# _____________________________Strategy
optimizer = SGD(model.parameters(), lr=0.05)
strategy = GSS_greedy(model, optimizer, criterion=CrossEntropyLoss(), train_mb_size=10, mem_strength=10, input_size=[
1, 28, 28], train_epochs=3, eval_mb_size=10, mem_size=300, evaluator=eval_plugin)
# ___________________________________________train
for experience in scenario.train_stream:
print(">Experience ", experience.current_experience)
res = strategy.train(experience)
res = strategy.eval(scenario.test_stream)
|
Python
| 0.000003
|
@@ -573,16 +573,188 @@
_greedy%0A
+%22%22%22%0AThis example the strategy GSS_greedy on Split MNIST.%0AThe final accuracy is around 82.6%25 (std 2.9) %0Aas stated in the original paper: https://arxiv.org/abs/1903.08671%0A%22%22%22
%0A%0Aclass
|
c0eedfeca0e19a65e4484e63790319cf18433343
|
change optimizer in example
|
examples/mnist_mlp.py
|
examples/mnist_mlp.py
|
'''Trains a simple deep NN on the MNIST dataset.
Gets to 98.40% test accuracy after 20 epochs
(there is *a lot* of margin for parameter tuning).
2 seconds per epoch on a K520 GPU.
'''
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils import np_utils
batch_size = 128
nb_classes = 10
nb_epoch = 20
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(512, input_shape=(784,)))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(512))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
model.summary()
model.compile(loss='categorical_crossentropy',
optimizer=Adam(),
metrics=['accuracy'])
history = model.fit(X_train, Y_train,
batch_size=batch_size, nb_epoch=nb_epoch,
verbose=1, validation_data=(X_test, Y_test))
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
|
Python
| 0
|
@@ -1403,20 +1403,23 @@
timizer=
-Adam
+RMSprop
(),%0A
|
a5be3784d0cfce42c0cdb6bc83b37a07dff7a164
|
Implement accuracy on GPU
|
chainer/functions/accuracy.py
|
chainer/functions/accuracy.py
|
import numpy
from chainer import cuda, Function
class Accuracy(Function):
"""Compute accuracy within minibatch."""
def forward_cpu(self, inputs):
y, t = inputs
y = y.reshape(y.shape[0], y.size / y.shape[0]) # flatten
pred = y.argmax(axis=1)
return (pred == t).mean(dtype=numpy.float32),
def forward_gpu(self, inputs):
# Fallback to CPU
# TODO(beam2d): Pure GPU version
accuracy, = self.forward_cpu((a.get() for a in inputs))
return cuda.to_gpu_async(numpy.array(accuracy)),
def accuracy(y, t):
return Accuracy()(y, t)
|
Python
| 0.000008
|
@@ -6,16 +6,44 @@
t numpy%0A
+from pycuda import gpuarray%0A
from cha
@@ -69,16 +69,16 @@
unction%0A
-
%0Aclass A
@@ -399,186 +399,685 @@
-# Fallback to CPU%0A # TODO(beam2d): Pure GPU version%0A accuracy, = self.forward_cpu((a.get() for a in inputs))%0A return cuda.to_gpu_async(numpy.array(accuracy))
+x, t = inputs%0A fragments = cuda.empty((x.shape%5B0%5D,), dtype=numpy.int8)%0A cuda.elementwise(%0A 'char* fragments, const float* x, const int* t, int c',%0A '''%0A x += i * c;%0A float maxval = x%5B0%5D;%0A int argmax = 0;%0A for (int j = 1; j %3C c; ++j) %7B%0A if (maxval %3C x%5Bj%5D) %7B%0A maxval = x%5Bj%5D;%0A argmax = j;%0A %7D%0A %7D%0A fragments%5Bi%5D = argmax == t%5Bi%5D;%0A ''', 'accuracy_fwd_map')(fragments, x, t, x.shape%5B1%5D)%0A y = gpuarray.sum(fragments, dtype=numpy.float32)%0A y /= x.shape%5B0%5D%0A return y
,%0A%0Ad
|
f3c5a477141e5f3845641111f775ea90398be633
|
Add numpy.ndarray and cupy.ndarray as input type
|
chainer/functions/math/erf.py
|
chainer/functions/math/erf.py
|
import math
import warnings
import numpy
import chainer
from chainer import cuda
from chainer import function_node
from chainer import utils
from chainer.utils import type_check
_erf_cpu = None
class Erf(function_node.FunctionNode):
@property
def label(self):
return 'erf'
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].dtype.kind == 'f')
def forward_cpu(self, x):
global _erf_cpu
if _erf_cpu is None:
try:
from scipy import special
_erf_cpu = special.erf
except ImportError:
warnings.warn(
"SciPy is not available. Forward computation of erf in CPU"
" can be slow without SciPy.")
_erf_cpu = numpy.vectorize(math.erf)
self.retain_inputs((0,))
return utils.force_array(_erf_cpu(x[0]), dtype=x[0].dtype),
def forward_gpu(self, x):
self.retain_inputs((0,))
return cuda.elementwise(
'T x', 'T y',
'y = erf(x)',
'elementwise_erf',
)(x[0]),
def backward(self, indexes, gy):
x = self.get_retained_inputs()[0]
return 2 / numpy.pi ** 0.5 * chainer.functions.exp(-x ** 2) * gy[0],
def erf(x):
"""Elementwise error function.
.. note::
Forward computation in CPU can be slow if
`SciPy <https://www.scipy.org/>`_ is not available.
Args:
x (~chainer.Variable): Input variable.
Returns:
~chainer.Variable: Output variable.
"""
return Erf().apply((x,))[0]
|
Python
| 0.000127
|
@@ -1517,16 +1517,24 @@
x (
+:class:%60
~chainer
@@ -1542,16 +1542,78 @@
Variable
+%60 or :class:%60numpy.ndarray%60 or %5C%0A :class:%60cupy.ndarray%60
): Input
|
0eeacf39140ae204bcea59a497acb8e58d949f5a
|
Remove unused relation join
|
changes/utils/originfinder.py
|
changes/utils/originfinder.py
|
from __future__ import absolute_import
from collections import defaultdict
from sqlalchemy.orm import subqueryload_all
from changes.config import db
from changes.constants import Result, Status
from changes.models import Build, Job, TestGroup, Source
def first(key, iterable):
for x in iterable:
if key(x):
return x
return None
def find_failure_origins(build, test_failures):
"""
Attempt to find originating causes of failures.
Returns a mapping of {TestGroup.name_sha: Job}.
"""
project = build.project
if not test_failures:
return {}
# find any existing failures in the previous runs
# to do this we first need to find the last passing job
last_pass = Build.query.join(
Source, Source.id == Build.source_id,
).filter(
Build.project == project,
Build.date_created <= build.date_created,
Build.status == Status.finished,
Build.result == Result.passed,
Build.id != build.id,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc()).first()
if last_pass is None:
return {}
# We have to query all runs between build and last_pass. Because we're
# paranoid about performance, we limit this to 100 results.
previous_runs = Build.query.join(
Source, Source.id == build.source_id,
).options(
subqueryload_all(Build.jobs),
).filter(
Build.project == project,
Build.date_created <= build.date_created,
Build.date_created >= last_pass.date_created,
Build.status == Status.finished,
Build.result.in_([Result.failed, Result.passed]),
Build.id != build.id,
Build.id != last_pass.id,
Source.patch == None, # NOQA
).order_by(Build.date_created.desc())[:100]
if not previous_runs:
return {}
# we now have a list of previous_runs so let's find all test failures in
# these runs
queryset = db.session.query(
TestGroup.name_sha, Job.build_id,
).join(
Job, Job.id == TestGroup.job_id,
).filter(
Job.build_id.in_(b.id for b in previous_runs),
Job.status == Status.finished,
Job.result == Result.failed,
TestGroup.result == Result.failed,
TestGroup.num_leaves == 0,
TestGroup.name_sha.in_(t.name_sha for t in test_failures),
).group_by(
TestGroup.name_sha, Job.build_id
)
previous_test_failures = defaultdict(set)
for name_sha, build_id in queryset:
previous_test_failures[build_id].add(name_sha)
failures_at_build = dict()
searching = set(t for t in test_failures)
last_checked_run = build
for p_build in previous_runs:
p_build_failures = previous_test_failures[p_build.id]
# we have to copy the set as it might change size during iteration
for f_test in list(searching):
if f_test.name_sha not in p_build_failures:
failures_at_build[f_test] = last_checked_run
searching.remove(f_test)
last_checked_run = p_build
for f_test in searching:
failures_at_build[f_test] = last_checked_run
return failures_at_build
|
Python
| 0.000001
|
@@ -1361,61 +1361,8 @@
id,%0A
- ).options(%0A subqueryload_all(Build.jobs),%0A
|
569e21f9ad9f9668c7dcfcd4dba64806c9c07d7d
|
Support origination contexts in V2.
|
ambassador/ambassador/envoy/v2/v2cluster.py
|
ambassador/ambassador/envoy/v2/v2cluster.py
|
# Copyright 2018 Datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License
import urllib
from typing import List, TYPE_CHECKING
from ...ir.ircluster import IRCluster
from .v2tls import V2TLSContext
if TYPE_CHECKING:
from . import V2Config
class V2Cluster(dict):
def __init__(self, config: 'V2Config', cluster: IRCluster) -> None:
super().__init__()
fields = {
'name': cluster.name,
'type': cluster.type.upper(),
'lb_policy': cluster.lb_type.upper(),
'connect_timeout': "3s",
'load_assignment': {
'cluster_name': cluster.name,
'endpoints': [
{
'lb_endpoints': self.get_endpoints(cluster)
}
]
}
}
if cluster.get('grpc', False):
self["http2_protocol_options"] = {}
ctx = cluster.get('tls_context', None)
if ctx is not None:
# If TLS Context is enabled, then we at least need to specify `tls_context` to enabled HTTPS origination
if ctx.get('enabled'):
fields['tls_context'] = {
'common_tls_context': {}
}
envoy_ctx = V2TLSContext(ctx=ctx, host_rewrite=cluster.get('host_rewrite', None))
if envoy_ctx:
fields['tls_context'] = envoy_ctx
self.update(fields)
def get_endpoints(self, cluster: IRCluster):
result = []
for u in cluster.urls:
p = urllib.parse.urlparse(u)
address = {
'address': p.hostname,
'port_value': int(p.port)
}
if p.scheme:
address['protocol'] = p.scheme.upper()
result.append({'endpoint': {'address': {'socket_address': address}}})
return result
@classmethod
def generate(self, config: 'V2Config') -> None:
config.clusters = []
for ircluster in sorted(config.ir.clusters.values(), key=lambda x: x.name):
cluster = config.save_element('cluster', ircluster, V2Cluster(config, ircluster))
config.clusters.append(cluster)
|
Python
| 0
|
@@ -1465,16 +1465,17 @@
, None)%0A
+%0A
@@ -1515,30 +1515,64 @@
If
-TLS Context is enabled
+this is a null TLS Context (_ambassador_enabled is True)
, th
@@ -1584,14 +1584,8 @@
at
-least
need
@@ -1596,16 +1596,40 @@
specify
+a%0A # minimal
%60tls_con
@@ -1643,17 +1643,16 @@
o enable
-d
HTTPS o
@@ -1661,16 +1661,18 @@
gination
+.%0A
%0A
@@ -1692,16 +1692,28 @@
et('
+_ambassador_
enabled'
):%0A
@@ -1708,16 +1708,23 @@
enabled'
+, False
):%0A
@@ -1815,33 +1815,54 @@
%7D%0A
-%0A
+ else:%0A
envo
@@ -1951,16 +1951,20 @@
+
if envoy
@@ -1969,16 +1969,20 @@
oy_ctx:%0A
+
|
017413dedca384dad72a8159358f372ceff4949c
|
version bump
|
iiab/__init__.py
|
iiab/__init__.py
|
__version__ = '0.4.2'
|
Python
| 0.000001
|
@@ -16,7 +16,7 @@
0.4.
-2
+3
'%0A
|
95d6c63dfd527f3ffc19a713b2a2dfa2b97dfb1a
|
remove unnecessary lines
|
client/python/modeldb/tests/sklearn/testRandomSplitEvent.py
|
client/python/modeldb/tests/sklearn/testRandomSplitEvent.py
|
import unittest
import sys
from ModelDbSyncerTest import SyncerTest
import modeldb.tests.utils as utils
from modeldb.thrift.modeldb import ttypes as modeldb_types
from modeldb.sklearn_native.ModelDbSyncer import *
from modeldb.sklearn_native import SyncableRandomSplit
import pandas as pd
import random
FMIN = sys.float_info.min
FMAX = sys.float_info.max
class TestRandomSplitEvent(unittest.TestCase):
def setUp(self):
name = "random split test"
author = "srinidhi"
description = "70/30 split"
SyncerObj = SyncerTest(
NewOrExistingProject(name, author, description),
DefaultExperiment(),
NewExperimentRun("Abc"))
X = pd.DataFrame(np.random.randint(0,100,size=(100, 4)), columns=list('ABCD'))
y = pd.DataFrame(np.random.randint(0,100,size=(100, 1)), columns=['output'])
X.tag("digits-dataset")
seed = 1
weights = [0.7, 0.3]
SyncerTest.instance.clearBuffer()
X_set, y_set = SyncableRandomSplit.randomSplit(X, [0.7, 0.3], seed, y)
events = SyncerTest.instance.sync()
self.randomSplitEvent = events[0]
def test_random_split_event(self):
utils.validate_random_split_event_struct(self.randomSplitEvent, self)
self.assertEquals(self.randomSplitEvent.weights, [0.7, 0.3])
self.assertEquals(self.randomSplitEvent.seed, 1)
def test_old_dataframe(self):
old_df = self.randomSplitEvent.oldDataFrame
expected_df = modeldb_types.DataFrame(
-1,
[
modeldb_types.DataFrameColumn('A', 'int64'),
modeldb_types.DataFrameColumn('B', 'int64'),
modeldb_types.DataFrameColumn('C', 'int64'),
modeldb_types.DataFrameColumn('D', 'int64'),
],
100,
'digits-dataset')
utils.is_equal_dataframe(old_df, expected_df, self)
def test_split_dataframes(self):
split_data_frames = self.randomSplitEvent.splitDataFrames
self.assertEquals(len(split_data_frames), 2)
dataframe1 = split_data_frames[0]
dataframe2 = split_data_frames[1]
utils.validate_dataframe_struct(dataframe1, self)
utils.validate_dataframe_struct(dataframe2, self)
# Check if dataframes are split according to weights (within some margin of error)
self.assertIn(dataframe1.numRows, range(60,80))
self.assertIn(dataframe2.numRows, range(20,40))
self.assertEquals(dataframe1.numRows + dataframe2.numRows, 100)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.999144
|
@@ -303,61 +303,8 @@
om%0A%0A
-FMIN = sys.float_info.min%0AFMAX = sys.float_info.max%0A%0A
clas
|
8c4a54690cb99b63a9cf825e2958bb2b48cd7e5d
|
Complete lc009_palindrome_number.py
|
lc009_palindrome_number.py
|
lc009_palindrome_number.py
|
"""Leetcode 9. Palindrome Number
Easy
Determine whether an integer is a palindrome. An integer is a palindrome when
it reads the same backward as forward.
Example 1:
Input: 121
Output: true
Example 2:
Input: -121
Output: false
Explanation: From left to right, it reads -121. From right to left, it becomes
121-. Therefore it is not a palindrome.
Example 3:
Input: 10
Output: false
Explanation: Reads 01 from right to left. Therefore it is not a palindrome.
"""
class Solution(object):
def isPalindrome(self, x):
"""
:type x: int
:rtype: bool
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
Python
| 0.999989
|
@@ -455,16 +455,90 @@
indrome.
+%0A%0AFollow up:%0ACoud you solve it without converting the integer to a string?
%0A%22%22%22%0A%0Acl
@@ -559,17 +559,20 @@
bject):%0A
-%09
+
def isPa
@@ -594,10 +594,16 @@
x):%0A
-%09%09
+
%22%22%22%0A
@@ -668,32 +668,704 @@
-pass%0A%0A%0Adef main():%0A%09pass
+x_str = str(x)%0A return x_str == x_str%5B::-1%5D%0A%0Aclass Solution2(object):%0A def isPalindrome(self, x):%0A %22%22%22%0A :type x: int%0A :rtype: bool%0A %22%22%22%0A if x %3C 0:%0A return False%0A%0A ls = %5B%5D%0A while x %3E 0:%0A div, mod = divmod(x, 10)%0A x = div%0A ls.append(mod)%0A%0A return ls == ls%5B::-1%5D%0A%0A%0Adef main():%0A x = 121 # Ans: True.%0A print(Solution().isPalindrome(x))%0A print(Solution2().isPalindrome(x))%0A%0A x = -121 # Ans: False.%0A print(Solution().isPalindrome(x))%0A print(Solution2().isPalindrome(x))%0A%0A x = 10 # Ans: False.%0A print(Solution().isPalindrome(x))%0A print(Solution2().isPalindrome(x))
%0A%0A%0Ai
@@ -1394,9 +1394,12 @@
_':%0A
-%09
+
main
|
d325ed4aade30378e050cf2443081a86a6d4438c
|
Revise to var min_hq
|
lc0253_meeting_rooms_ii.py
|
lc0253_meeting_rooms_ii.py
|
"""Leetcode 253. Meeting Rooms II (Premium)
Medium
URL: https://leetcode.com/problems/meeting-rooms-ii
Given an array of meeting time intervals consisting of start and end times
[[s1,e1],[s2,e2],...] (si < ei),
find the minimum number of conference rooms required.
Example1
Input: intervals = [[0,30],[5,10],[15,20]]
Output: 2
Explanation:
We need two meeting rooms
room1: (0,30)
room2: (5,10),(15,20)
Example2
Input: intervals = [[7, 10], [2, 4]]
Output: 1
Explanation:
Only need one meeting room
"""
class SolutionSortEndMinHeapEnd(object):
def minMeetingRooms(self, intervals):
"""
:type intervals: List[List[int]]
:rtype: int
Time complexity: O(n*logn).
Space complexity: O(n).
"""
import heapq
if not intervals or not intervals[0]:
return 0
# Sort intervals by start time.
intervals.sort()
# Use min heap to store end times.
end_minheap = []
heapq.heappush(end_minheap, intervals[0][1])
for i in range(1, len(intervals)):
# If next start time is after min end time, remove min end time.
if intervals[i][0] >= end_minheap[0]:
heapq.heappop(end_minheap)
# Add next end time to min heap.
heapq.heappush(end_minheap, intervals[i][1])
return len(end_minheap)
class SolutionTimeCounterListInsort(object):
def minMeetingRooms(self, intervals):
"""
:type intervals: List[List[int]]
:rtype: int
Time complexity: O(n).
Space complexity: O(n).
"""
from bisect import insort
# Sort times and add increment/decrement counters by start/end.
time_counters = []
for i in range(len(intervals)):
insort(time_counters, (intervals[i][0], 1))
insort(time_counters, (intervals[i][1], -1))
cur_n, max_n = 0, 0
for t, counter in time_counters:
cur_n += counter
max_n = max(max_n, cur_n)
return max_n
def main():
# Output: 2.
intervals = [[0,30],[5,10],[15,20]]
print SolutionSortEndMinHeapEnd().minMeetingRooms(intervals)
print SolutionTimeCounterListInsort().minMeetingRooms(intervals)
# Output: 1.
intervals = [[7, 10], [2, 4]]
print SolutionSortEndMinHeapEnd().minMeetingRooms(intervals)
print SolutionTimeCounterListInsort().minMeetingRooms(intervals)
if __name__ == '__main__':
main()
|
Python
| 0.001577
|
@@ -953,19 +953,17 @@
end_minh
-eap
+q
= %5B%5D%0A
@@ -983,35 +983,33 @@
eappush(end_minh
-eap
+q
, intervals%5B0%5D%5B1
@@ -1174,19 +1174,17 @@
end_minh
-eap
+q
%5B0%5D:%0A
@@ -1218,19 +1218,17 @@
end_minh
-eap
+q
)%0A%0A
@@ -1302,19 +1302,17 @@
end_minh
-eap
+q
, interv
@@ -1350,19 +1350,17 @@
end_minh
-eap
+q
)%0A%0A%0Aclas
|
7e20350e4199a5cbb4c89bd6593a93b9a2e56cff
|
add in old commands
|
lean_workbench/commands.py
|
lean_workbench/commands.py
|
# -*- coding:utf-8 -*-
from flask.ext.script import Command, Option, prompt_bool
import os
import config
from main import app_factory
class CreateDB(Command):
"""
Creates sqlalchemy database
"""
def run(self):
from database import create_all
create_all()
class DropDB(Command):
"""
Drops sqlalchemy database
"""
def run(self):
from database import drop_all
drop_all()
class Scale(Command):
"""
For now, just hit the VC-matcher server
"""
# allow user to enter
# python manage.py scale --new=True
# to mine only new API keys
option_list = (
Option('--new', '-n', dest='new'),
)
def run(self, new=False):
"""
Run the mining
args:
new- if true, check for users that haven't been mined yet and mine only their data.
"""
app = app_factory(config.Dev)
with app.app_context():
from scale.scale_model import Startup_data_model
from scale.scale_mine import get_vcs
users = Startup_data_model.query.filter_by(vc_matcher_done=False).filter(Startup_data_model.description != None).all()
if users:
print 'there are users to vc mine'
get_vcs(users)
class Mine(Command):
"""
Mines the data sources
"""
# allow user to enter
# python manage.py mine --new=True
# to mine only new API keys
option_list = (
Option('--new', '-n', dest='new'),
)
def run(self, new=False):
"""
Run the mining
args:
new- if true, check for users that haven't been mined yet and mine only their data.
"""
from twitter.twitter_model import Twitter_model
from quickbooks.quickbooks_model import Quickbooks_model
from facebook.facebook_model import Facebook_model
from google_analytics.google_analytics_models import Google_Analytics_User_Model
from twitter.twitter_mine import track_keywords
from google_analytics.ga_mine import mine_visits
from facebook.fb_mine import mine_fb_page_data
#from quickbooks.qb_mine import mine_qb_data
app = app_factory(config.Dev)
with app.app_context():
consumer_key = app.config.get('QUICKBOOKS_OAUTH_CONSUMER_KEY')
consumer_secret = app.config.get('QUICKBOOKS_OAUTH_CONSUMER_SECRET')
app_token = app.config.get('QUICKBOOKS_APP_TOKEN')
if new:
new_twitters = Twitter_model.query.filter_by(active=False).all()
#new_qbs = Quickbooks_model.query.filter_by(active=False).all()
new_fbs = Facebook_model.query.filter_by(active=False).all()
new_gas = Google_Analytics_User_Model.query.filter_by(active=False).all()
for user in new_twitters:
track_keywords(username=user.username)
for user in new_fbs:
mine_fb_page_data(username=user.username)
for user in new_gas:
mine_visits(username=user.username)
else:
#mine_fb_page_data()
mine_visits()
#track_keywords()
#mine_qb_data(consumer_key,consumer_secret,app_token)
class PrintUsers(Command):
"""
Mines the data sources
"""
def run(self):
from users.user_model import User
users = User.query.all()
for user in users:
print users
class Test(Command):
"""
Run tests
"""
start_discovery_dir = "tests"
def get_options(self):
return [
Option('--start_discover', '-s', dest='start_discovery',
help='Pattern to search for features',
default=self.start_discovery_dir),
]
def run(self, start_discovery):
import unittest
if os.path.exists(start_discovery):
argv = [config.project_name, "discover"]
argv += ["-s", start_discovery]
print argv
unittest.main(argv=argv)
else:
print("Directory '%s' was not found in project root." % start_discovery)
|
Python
| 0.000118
|
@@ -3189,25 +3189,24 @@
-#
mine_fb_page
@@ -3259,17 +3259,16 @@
-#
track_ke
|
a2dcb94726d0bcc58b08eddcab6ebf433778af2c
|
Fix error handler to correct view.
|
zoll_me/urls.py
|
zoll_me/urls.py
|
'''
James D. Zoll
1/20/2013
Purpose: Defines URL rules for the project.
License: This is a public work.
'''
# Library Imports
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^scarf/', include('scarf.urls')),
url(r'^rss/', include('rss.urls')),
url(r'^files/', include('files.urls')),
url(r'^xbmc_photos/', include('xbmc_photos.urls')),
url(r'^leapday/', include('leapday.urls')),
url(r'^lensoftruth/', include('lensoftruth.urls')),
url(r'^dk_optimize/', include('dk_optimize.urls')),
url(r'^$', 'zoll_me.views.index'),
url(r'^resume/$', 'zoll_me.views.resume'),
url(r'^projects/$', 'zoll_me.views.projects'),
url(r'^account/login/$','django.contrib.auth.views.login', {'template_name': 'zoll_me/account/login.html'}, name="zoll_me-login"),
url(r'^account/logout/$','django.contrib.auth.views.logout', {'template_name': 'zoll_me/account/logged_out.html'}),
url(r'^account/password_change/$','django.contrib.auth.views.password_change', {'template_name': 'zoll_me/account/password_change.html'}),
url(r'^account/password_change_done/$','django.contrib.auth.views.password_change_done', {'template_name': 'zoll_me/account/password_change_done.html'}),
url(r'^account/password_reset/$','django.contrib.auth.views.password_reset', {'template_name': 'zoll_me/account/password_reset.html', 'email_template_name': 'zoll_me/account/password_reset_email.txt', 'subject_template_name':'zoll_me/account/password_reset_subject.txt'}),
url(r'^account/password_reset_done/$','django.contrib.auth.views.password_reset_done', {'template_name': 'zoll_me/account/password_reset_done.html'}),
url(r'^account/password_reset_complete/$','django.contrib.auth.views.password_reset_complete', {'template_name': 'zoll_me/account/password_reset_complete.html'}),
url(r'^account/password_reset_confirm/(?P<uidb36>\d+)/(?P<token>[\d\w-]+)$','django.contrib.auth.views.password_reset_confirm', {'template_name': 'zoll_me/account/password_reset_confirm.html'})
)
# When we're in DEBUG mode, I want to be able to access some
# resources that aren't accessible directly in production. These URLconfs
# are added to make that possible.
if settings.DEBUG:
urlpatterns += patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
}),
url(r'^400/$', 'zoll_me.views.error_400'),
url(r'^403/$', 'zoll_me.views.error_403'),
url(r'^404/$', 'zoll_me.views.error_404'),
url(r'^500/$', 'zoll_me.views.error_500'),
)
# Define error handling views.
handler403 = 'zoll_me.views.error_404'
handler404 = 'zoll_me.views.error_404'
handler500 = 'zoll_me.views.error_500'
|
Python
| 0
|
@@ -2799,33 +2799,33 @@
e.views.error_40
-4
+3
'%0Ahandler404 = '
|
9e9f831a757af01cc3b1edfe590e27f7ab53c2ce
|
define interfaces
|
zvmsdk/vmops.py
|
zvmsdk/vmops.py
|
from log import LOG
import utils as zvmutils
class VMOps(object):
def __init__(self):
self._xcat_url = zvmutils.get_xcat_url()
def _power_state(self, instance_name, method, state):
"""Invoke xCAT REST API to set/get power state for a instance."""
body = [state]
url = self._xcat_url.rpower('/' + instance_name)
return zvmutils.xcat_request(method, url, body)
def get_power_state(self, instance_name):
"""Get power status of a z/VM instance."""
LOG.debug('Query power stat of %s' % instance_name)
res_dict = self._power_state(instance_name, "GET", "stat")
@zvmutils.wrap_invalid_xcat_resp_data_error
def _get_power_string(d):
tempstr = d['info'][0][0]
return tempstr[(tempstr.find(':') + 2):].strip()
power_stat = _get_power_string(res_dict)
return power_stat
|
Python
| 0.000003
|
@@ -42,16 +42,2193 @@
utils%0A%0A%0A
+VMOPS = None%0A%0A%0Adef _get_vmops():%0A if VMOPS is None:%0A VMOPS = VMOps()%0A return VMOPS%0A%0A%0Adef run_instance(instance_name, image_id, cpu, memory,%0A login_password, ip_addr):%0A %22%22%22Deploy and provision a virtual machine.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A :image_id: Image ID%0A :cpu: vcpu%0A :memory: memory%0A :login_password: login password%0A :ip_addr: ip address%0A %22%22%22%0A pass%0A%0A%0Adef terminate_instance(instance_name):%0A %22%22%22Destroy a virtual machine.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A %22%22%22%0A pass%0A%0Adef start_instance(instance_name):%0A %22%22%22Power on a virtual machine.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A %22%22%22%0A _get_vmops()._power_state(instance_name, %22PUT%22, %22on%22)%0A%0Adef stop_instance(instance_name):%0A %22%22%22Shutdown a virtual machine.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A %22%22%22%0A pass%0A%0A%0Adef create_volume(volume_name, size):%0A %22%22%22Create a volume.%0A%0A Input parameters:%0A :volume_name: volume name%0A :size: size%0A %22%22%22%0A pass%0A%0A%0Adef delete_volume(volume_name):%0A %22%22%22Create a volume.%0A%0A Input parameters:%0A :volume_name: volume name%0A %22%22%22%0A pass%0A%0A%0Adef attach_volume(instance_name, volume_name):%0A %22%22%22Create a volume.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A :volume_name: volume name%0A %22%22%22%0A pass%0A%0A%0Adef capture_instance(instance_name, image_name):%0A %22%22%22Caputre a virtual machine image.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A :image_name: Image name%0A %22%22%22%0A pass%0A%0A%0Adef delete_image(image_name):%0A %22%22%22Delete image.%0A%0A Input parameters:%0A :image_name: Image name%0A %22%22%22%0A pass%0A%0A%0Adef detach_volume(instance_name, volume_name):%0A %22%22%22Create a volume.%0A%0A Input parameters:%0A :instance_name: USERID of the instance, last 8 if length %3E 8%0A :volume_name: volume name%0A %22%22%22%0A pass%0A%0A%0A
class VM
@@ -2240,16 +2240,17 @@
bject):%0A
+%0A
def
|
c88060026243dee24ab96af35784b38ca8b1c2e9
|
Fix migration conflict. (#1929)
|
rdr_service/alembic/versions/2c3a71f9fc04_add_genomic_set_member_columns_for_aw3_.py
|
rdr_service/alembic/versions/2c3a71f9fc04_add_genomic_set_member_columns_for_aw3_.py
|
"""add_genomic_set_member columns_for_aw3_aw4
Revision ID: 2c3a71f9fc04
Revises: df7c40b6a209
Create Date: 2020-08-25 08:57:17.987756
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '2c3a71f9fc04'
down_revision = 'df7c40b6a209'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_set_member', sa.Column('aw3_manifest_job_run_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('aw3_manifest_job_run_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member', sa.Column('aw4_manifest_job_run_id', sa.Integer(), nullable=True))
op.add_column('genomic_set_member_history', sa.Column('aw4_manifest_job_run_id', sa.Integer(), nullable=True))
op.drop_constraint('genomic_set_member_ibfk_23', 'genomic_set_member', type_='foreignkey')
op.drop_constraint('genomic_set_member_ibfk_24', 'genomic_set_member', type_='foreignkey')
op.create_foreign_key(None, 'genomic_set_member', 'genomic_job_run', ['aw3_manifest_job_run_id'], ['id'])
op.create_foreign_key(None, 'genomic_set_member', 'genomic_job_run', ['aw4_manifest_job_run_id'], ['id'])
op.drop_column('genomic_set_member', 'wgs_aw3_manifest_job_run_id')
op.drop_column('genomic_set_member_history', 'wgs_aw3_manifest_job_run_id')
op.drop_column('genomic_set_member', 'arr_aw3_manifest_job_run_id')
op.drop_column('genomic_set_member_history', 'arr_aw3_manifest_job_run_id')
# ### end Alembic commands ###
def downgrade_rdr():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('genomic_set_member', sa.Column('arr_aw3_manifest_job_run_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('genomic_set_member_history', sa.Column('arr_aw3_manifest_job_run_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('genomic_set_member', sa.Column('wgs_aw3_manifest_job_run_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('genomic_set_member_history', sa.Column('wgs_aw3_manifest_job_run_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.drop_constraint(None, 'genomic_set_member', type_='foreignkey')
op.drop_constraint(None, 'genomic_set_member', type_='foreignkey')
op.create_foreign_key('genomic_set_member_ibfk_24', 'genomic_set_member', 'genomic_job_run', ['wgs_aw3_manifest_job_run_id'], ['id'])
op.create_foreign_key('genomic_set_member_ibfk_23', 'genomic_set_member', 'genomic_job_run', ['arr_aw3_manifest_job_run_id'], ['id'])
op.drop_column('genomic_set_member', 'aw4_manifest_job_run_id')
op.drop_column('genomic_set_member_history', 'aw4_manifest_job_run_id')
op.drop_column('genomic_set_member', 'aw3_manifest_job_run_id')
op.drop_column('genomic_set_member_history', 'aw3_manifest_job_run_id')
# ### end Alembic commands ###
def upgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade_metrics():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
|
Python
| 0.000002
|
@@ -75,28 +75,28 @@
evises:
-df7c40b6a209
+c069abb92cc0
%0ACreate
@@ -307,20 +307,20 @@
= '
-df7c40b6a209
+c069abb92cc0
'%0Abr
|
dfaa289465a2cdc837884718624b9a8a65e511b3
|
Improve proxy rax example
|
examples/proxy_rax.py
|
examples/proxy_rax.py
|
import random
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
class ProxyRaxBot(sc2.BotAI):
async def on_step(self, state, iteration):
cc = self.units(COMMANDCENTER)
if not cc.exists:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for unit in self.workers | self.units(MARINE):
await self.do(unit.attack(target))
return
else:
cc = cc.first
if self.units(MARINE).idle.amount > 15 and iteration % 50 == 1:
target = self.known_enemy_structures.random_or(self.enemy_start_locations[0]).position
for marine in self.units(MARINE).idle:
await self.do(marine.attack(target))
if self.can_afford(SCV) and self.workers.amount < 16 and cc.noqueue:
await self.do(cc.train(SCV))
elif self.supply_left < 2:
if self.can_afford(SUPPLYDEPOT):
await self.build(SUPPLYDEPOT, near=cc.position.towards(self.game_info.map_center, 5))
elif self.units(BARRACKS).amount < 3 or self.minerals > 400:
if self.can_afford(BARRACKS):
p = self.game_info.map_center.towards(self.enemy_start_locations[0], 25)
await self.build(BARRACKS, near=p)
for rax in self.units(BARRACKS).ready.noqueue:
if not self.can_afford(MARINE):
break
await self.do(rax.train(MARINE))
for scv in self.units(SCV).idle:
await self.do(scv.gather(self.state.mineral_field.closest_to(cc)))
def main():
sc2.run_game(sc2.maps.get("Sequencer LE"), [
Bot(Race.Terran, ProxyRaxBot()),
Computer(Race.Zerg, Difficulty.Hard)
], realtime=True)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -956,17 +956,61 @@
_left %3C
-2
+(2 if self.units(BARRACKS).amount %3C 3 else 4)
:%0A
@@ -1199,16 +1199,17 @@
%3C 3 or
+(
self.min
@@ -1219,16 +1219,53 @@
ls %3E 400
+ and self.units(BARRACKS).amount %3C 5)
:%0A
@@ -1896,11 +1896,12 @@
ime=
-Tru
+Fals
e)%0A%0A
|
a4b7878880f5a8d275129949179b4b30044f0c86
|
Update __init__.py
|
eniric_scripts/__init__.py
|
eniric_scripts/__init__.py
|
__all__ = [
"bary_shift_atmmodel",
"phoenix_precision.py",
"precision_four_panel",
"split_atmmodel",
]
|
Python
| 0.000072
|
@@ -58,11 +58,8 @@
sion
-.py
%22,%0A
|
9f24a53297a1d196127baf15f3fd6c025716207e
|
use the right netclient call
|
test/repostest.py
|
test/repostest.py
|
#!/usr/bin/python2.4
#
# Copyright (c) 2004-2006 rPath, Inc.
#
from time import sleep
import testsuite
testsuite.setup()
from mint_rephelp import MintRepositoryHelper
import recipes
from conary.conarycfg import ConaryConfiguration, UserInformation
from conary.conaryclient import ConaryClient
from conary import repository
from conary import versions
testRecipe = """
class TestCase(PackageRecipe):
name = "testcase"
version = "1.0"
def setup(r):
r.Create("/temp/foo")
"""
testGroup = """
class GroupTest(GroupRecipe):
name = "group-test"
version = "1.0"
def setup(r):
r.add('testcase')
"""
class RepositoryTest(MintRepositoryHelper):
def testCommitStats(self):
client, userId = self.quickMintUser("testuser", "testpass")
projectId = self.newProject(client)
client.server.registerCommit('testproject.rpath.local', 'testuser',
'mytrove:source',
'/testproject.rpath.local@rpl:devel/1.0-1')
project = client.getProject(projectId)
assert([x[:2] for x in project.getCommits()] == [('mytrove:source',
'1.0-1')])
# using a bogus username should not fail
client.server.registerCommit('testproject.rpath.local',
'nonexistentuser', 'mytrove:source',
'/testproject.rpath.local@rpl:devel/1.0-1')
def testBasicRepository(self):
self.openRepository()
client, userId = self.quickMintUser("testuser", "testpass")
projectId = self.newProject(client)
self.makeSourceTrove("testcase", testRecipe)
project = client.getProject(projectId)
cfg = project.getConaryConfig()
nc = ConaryClient(cfg).getRepos()
# test that the source trove landed properly
troveNames = nc.troveNames(versions.Label("testproject.rpath.local@rpl:devel"))
assert(troveNames == ["testcase:source"])
# test that the commits table was updated
# give some time for the commit action to run
iters = 0
while True:
sleep(0.1)
iters += 1
if project.getCommits() != []:
break
if iters > 50:
self.fail("commits didn't show up")
assert([x[:2] for x in project.getCommits()] == [('testcase:source', '1.0-1')])
def testHooksResponse(self):
self.openRepository()
cfg = ConaryConfiguration(readConfigFiles = False)
cfg.installLabelPath = ['notfound.rpath.local@rpl:devel']
cfg.repositoryMap = {'notfound.rpath.local': 'http://test.rpath.local:%d/repos/notfound/' % self.port}
cfg.root = ':memory:'
cfg.dbPath = ':memory:'
repos = ConaryClient(cfg).getRepos()
try:
repos.troveNames('notfound.rpath.local')
except repository.errors.OpenError, e:
assert "404 Not Found" in str(e), \
"accessing a non-existent repository did not return a "
"404 Not Found error"
pass
else:
self.fail("accessing a non-existent repository did not return "
"an error")
def testCook(self):
self.openRepository()
client, userId = self.quickMintUser("testuser", "testpass")
projectId = self.newProject(client)
project = client.getProject(projectId)
self.makeSourceTrove("testcase", testRecipe)
self.cookFromRepository("testcase",
versions.Label("testproject.rpath.local@rpl:devel"),
ignoreDeps = True)
self.makeSourceTrove("group-test", testGroup)
self.cookFromRepository("group-test",
versions.Label("testproject.rpath.local@rpl:devel"))
cfg = project.getConaryConfig()
nc = ConaryClient(cfg).getRepos()
troveNames = nc.troveNames(versions.Label("testproject.rpath.local@rpl:devel"))
assert(troveNames == ['testcase', 'testcase:runtime', 'group-test',
'group-test:source', 'testcase:source'])
groupTroves = client.server.getGroupTroves(projectId)
assert(groupTroves == {'testproject.rpath.local@rpl:devel': ['group-test']})
if __name__ == "__main__":
testsuite.main()
|
Python
| 0
|
@@ -2924,16 +2924,24 @@
oveNames
+OnServer
('notfou
|
39f50aadc98f493a32810af0ba4d0fd87c8108e1
|
Update runsegment.py
|
bin/runsegment.py
|
bin/runsegment.py
|
#!/usr/bin/python
import os
import numpy as np
import shutil
import common
from segment import normalizefile, segmentfile
def runAll(args):
print('\n\n\nYou have requested to normalize and segment bincounts files')
print('\tWARNING:')
print('\t\tIF USING ANY REFERENCES OTHER THAN THOSE I PROVIDE I CANNOT GUARANTEE RESULT ACCURACY')
print('\n')
#Set up environment#
args.CountDirectory = common.fixDirName(args.CountDirectory)
lowessDir = os.path.dirname(args.CountDirectory[:-1]) + '/LowessBinCounts/'
segmentDir = os.path.dirname(args.CountDirectory[:-1]) + '/Segments/'
tempDir = os.path.dirname(args.CountDirectory[:-1]) + '/Temp/'
if args.output:
lowessDir = common.fixDirName(args.output) + 'LowessBinCounts/'
segmentDir = common.fixDirName(args.output) + 'Segments/'
common.makeDir(lowessDir)
if not args.normalizeonly:
common.makeDir(segmentDir)
common.makeDir(tempDir)
sampleFiles = common.getSampleList(args.CountDirectory, args.samples, 'bincounts')
info = common.importInfoFile(args.infofile, args.columns, 'normalize')
if args.infofile:
refArray = info
else:
thisDtype = info
refArray = np.array(
[ (os.path.basename(x)[:-14], 'unk', 1,) for x in sampleFiles],
dtype=thisDtype)
sampleDict = {x: [y for y in sampleFiles if x == os.path.basename(y)[:len(x)]][0] for x in refArray['name']}
#Run normalization for all samples#
methodDict = {x: False for x in np.unique(refArray['method'])}
# methodDict['NA'] = False
sampleNormMethodDict = {x: 'NA' for x in methodDict}
if not args.gconly:
for i in methodDict:
refSlice = refArray[(refArray['method'] == i) & (refArray['cells'] == 1)]
print refSlice
methodSamples = [sampleDict[x] for x in refSlice['name']]
print methodSamples
methodDict[i] = normalizefile.runMakeMethodRef(args.species, methodSamples, i, lowessDir)
print methodDict
if methodDict[i] != False:
for j in refSlice['name']:
sampleNormMethodDict[j] = i
print methodDict
print sampleNormMethodDict
raise SystemExit
#run multiprocessing for gc (+ method) correction
normArgs = [(args.species, sampleDict[x], methodDict[sampleNormMethodDict[x]], lowessDir + x + '.lowess.txt') for x in sampleDict.keys()]
common.daemon(normalizefile.runNormalizeOne, normArgs, 'normalize bincount files')
print('\nNormalization complete\n\n\n')
# if args.normalizeonly:
# shutil.rmtree(tempDir[:-1])
# return 0
#Run CBS for all samples#
if not args.normalizeonly:
segArgs = [(x, args.species, tempDir, lowessDir, segmentDir) for x in refArray['name']]
common.daemon(segmentfile.segmentOne, segArgs, 'segment bincount data')
shutil.rmtree(tempDir[:-1])
print('\nSegmentation complete\n\n\n')
|
Python
| 0.000001
|
@@ -1688,26 +1688,8 @@
1)%5D%0A
-%09%09%09print refSlice%0A
%09%09%09m
@@ -1749,34 +1749,8 @@
'%5D%5D%0A
-%09%09%09print methodSamples%0A%09%09%09
%0A%09%09%09
@@ -1883,17 +1883,17 @@
Dict%5Bi%5D
-!
+=
= False:
@@ -1893,16 +1893,38 @@
False:%0A
+%09%09%09%09continue%0A%09%09%09else:%0A
%09%09%09%09for
|
10d5e90e65e792d0fae3879dd5f512bdc7b95da6
|
Add missing dependency to perl-xml-parser (#12903)
|
var/spack/repos/builtin/packages/perl-xml-parser/package.py
|
var/spack/repos/builtin/packages/perl-xml-parser/package.py
|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#
from spack import *
class PerlXmlParser(PerlPackage):
"""XML::Parser - A perl module for parsing XML documents"""
homepage = "http://search.cpan.org/perldoc/XML::Parser"
url = "http://search.cpan.org/CPAN/authors/id/T/TO/TODDR/XML-Parser-2.44.tar.gz"
version('2.44', 'af4813fe3952362451201ced6fbce379')
depends_on('expat')
def configure_args(self):
args = []
p = self.spec['expat'].prefix.lib
args.append('EXPATLIBPATH={0}'.format(p))
p = self.spec['expat'].prefix.include
args.append('EXPATINCPATH={0}'.format(p))
return args
|
Python
| 0.000025
|
@@ -545,16 +545,74 @@
'expat')
+%0A depends_on('perl-libwww-perl', type=('build', 'run'))
%0A%0A de
|
76f60e58f1011d2ae3589a08efe183775430a44c
|
create result dir if does not exist
|
test/run_tests.py
|
test/run_tests.py
|
#!/usr/bin/env python
# Copyright 2008-2010 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from subprocess import Popen, call
from tempfile import TemporaryFile
from run_unit_tests import run_unit_tests
ROOT = os.path.dirname(__file__)
TESTDATADIR = os.path.join(ROOT, 'acceptance')
RESOURCEDIR = os.path.join(ROOT, 'resources')
SRCDIR = os.path.join(ROOT, '..', 'src')
UTESTDIR = os.path.join(ROOT, 'unit')
RESULTDIR = os.path.join(ROOT, 'results')
HTPPSERVER = os.path.join(RESOURCEDIR, 'testserver', 'testserver.py')
ROBOT_ARGS = [
'--doc', 'SeleniumSPacceptanceSPtestsSPwithSP%(browser)s',
'--outputdir', '%(outdir)s',
'--variable', 'browser:%(browser)s',
'--escape', 'space:SP',
'--report', 'none',
'--log', 'none',
#'--suite', '...',
'--loglevel', 'DEBUG',
'--pythonpath', '%(pythonpath)s',
]
REBOT_ARGS = [
'--outputdir', '%(outdir)s',
'--name', '%(browser)sSPAcceptanceSPTests',
'--escape', 'space:SP',
'--critical', 'regression',
'--noncritical', 'inprogress',
]
ARG_VALUES = {'outdir': RESULTDIR, 'pythonpath': SRCDIR}
def acceptance_tests(interpreter, browser, args):
ARG_VALUES['browser'] = browser.replace('*', '')
start_http_server()
suffix = os.sep == '\\' and 'ybot.bat' or 'ybot'
runner = "%s%s" % ('jython' == interpreter and 'j' or 'p', suffix)
execute_tests(runner)
stop_http_server()
return process_output()
def start_http_server():
server_output = TemporaryFile()
Popen(['python', HTPPSERVER ,'start'],
stdout=server_output, stderr=server_output)
def execute_tests(runner):
command = [runner] + [ arg % ARG_VALUES for arg in ROBOT_ARGS] + args +\
[ TESTDATADIR ]
syslog = os.path.join(RESULTDIR, 'syslog.txt')
call(command, env=dict(os.environ, ROBOT_SYSLOG_FILE=syslog))
def stop_http_server():
call(['python', HTPPSERVER, 'stop'])
def process_output():
print
call(['python', os.path.join(RESOURCEDIR, 'statuschecker.py'),
os.path.join(RESULTDIR, 'output.xml')])
rebot = os.sep == '\\' and 'rebot.bat' or 'rebot'
rebot_cmd = [rebot] + [ arg % ARG_VALUES for arg in REBOT_ARGS ] + \
[os.path.join(ARG_VALUES['outdir'], 'output.xml') ]
rc = call(rebot_cmd, env=os.environ)
if rc == 0:
print 'All critical tests passed'
else:
print '%d critical test%s failed' % (rc, 's' if rc != 1 else '')
return rc
if __name__ == '__main__':
if not len(sys.argv) > 2:
print 'Usage: python run_tests.py python|jython browser [options]'
print
print 'See README.txt for details.'
sys.exit(1)
interpreter = ('jython' in sys.argv[1]) and 'jython' or 'python'
browser = sys.argv[2].lower()
args = sys.argv[3:]
if not args:
print 'Running unit tests'
failures = run_unit_tests()
if failures != 0:
print '\n%d unit tests failed - not running acceptance tests!' % failures
sys.exit(1)
print 'All unit tests passed'
if browser != 'unit':
sys.exit(acceptance_tests(interpreter, browser, args))
|
Python
| 0.000001
|
@@ -2100,16 +2100,82 @@
unner):%0A
+ if not os.path.exists(RESULTDIR):%0A os.mkdir(RESULTDIR)%0A
comm
|
0153a20201cfeff37512733b2e85106f69ba5f47
|
replace use of 'unicode' builtin
|
monasca_log_api/app/base/model.py
|
monasca_log_api/app/base/model.py
|
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
import six
from monasca_common.rest import utils as rest_utils
def serialize_envelope(envelope):
"""Returns json representation of an envelope.
:return: json object of envelope
:rtype: six.text_type
"""
json = rest_utils.as_json(envelope, ensure_ascii=False)
if six.PY2:
raw = unicode(json.replace(r'\\', r'\\\\'), encoding='utf-8',
errors='replace')
else:
raw = json
return raw
class LogEnvelopeException(Exception):
pass
class Envelope(dict):
def __init__(self, log, meta):
if not log:
error_msg = 'Envelope cannot be created without log'
raise LogEnvelopeException(error_msg)
if 'tenantId' not in meta or not meta.get('tenantId'):
error_msg = 'Envelope cannot be created without tenant'
raise LogEnvelopeException(error_msg)
creation_time = self._get_creation_time()
super(Envelope, self).__init__(
log=log,
creation_time=creation_time,
meta=meta
)
@staticmethod
def _get_creation_time():
return timeutils.utcnow_ts()
@classmethod
def new_envelope(cls, log, tenant_id, region, dimensions=None):
"""Creates new log envelope
Log envelope is combined ouf of following properties
* log - dict
* creation_time - timestamp
* meta - meta block
Example output json would like this:
.. code-block:: json
{
"log": {
"message": "Some message",
"dimensions": {
"hostname": "devstack"
}
},
"creation_time": 1447834886,
"meta": {
"tenantId": "e4bd29509eda473092d32aadfee3e7b1",
"region": "pl"
}
}
:param dict log: original log element (containing message and other
params
:param str tenant_id: tenant id to be put in meta field
:param str region: region to be put in meta field
:param dict dimensions: additional dimensions to be appended to log
object dimensions
"""
if dimensions:
log['dimensions'].update(dimensions)
log_meta = {
'region': region,
'tenantId': tenant_id
}
return cls(log, log_meta)
@property
def log(self):
return self.get('log', None)
@property
def creation_time(self):
return self.get('creation_time', None)
@property
def meta(self):
return self.get('meta', None)
|
Python
| 0.000617
|
@@ -926,14 +926,20 @@
w =
-unicod
+six.text_typ
e(js
@@ -984,16 +984,22 @@
utf-8',%0A
+
|
ee75d9530bb6b9e409449c8e9d5ffb3a3578f5d8
|
Fix not coloring for new repositories
|
bin/commands/stateextensions/status.py
|
bin/commands/stateextensions/status.py
|
import os
import re
import subprocess
from colorama import Fore
def title():
return 'status'
def accent(**kwargs):
new_repository = kwargs.get('new_repository', False)
show_color = kwargs.get('show_color', 'always')
if new_repository:
status_title = '{no_color}({green}master{no_color})'.format(no_color=Fore.RESET, green=Fore.GREEN)
else:
status_title = subprocess.check_output(
('git', '-c', 'color.status=' + show_color, 'status', '--branch', '--short')
).splitlines()[0]
status_title = re.match('.*##.*? (.*)', status_title).group(1)
status_title = '{}({})'.format(Fore.RESET, status_title)
return status_title
def get(**kwargs):
new_repository = kwargs.get('new_repository', False)
show_color = kwargs.get('show_color', 'always')
show_clean_message = kwargs.get('show_clean_message', True)
if new_repository:
# check if status is empty
status_output = subprocess.check_output(['git', 'status', '--short'])
if not status_output:
status_output = 'Empty repository'
else:
status_output = subprocess.check_output(['git', '-c', 'color.status=' + show_color, 'status', '--short', '--untracked-files=all'])
if not status_output and show_clean_message:
status_output = 'nothing to commit, working directory is clean' + os.linesep
return status_output
|
Python
| 0.000001
|
@@ -1001,16 +1001,52 @@
(%5B'git',
+ '-c', 'color.status=' + show_color,
'status
|
6258026193d52de5168007b833a2fd35c807b734
|
fix main to pass until further dev is done
|
bin/slacksible.py
|
bin/slacksible.py
|
#! /Users/jhefner/python_dev/uw_python/project/bin/python
from slackclient import SlackClient
import os
import sys
# import logging
# TODOS:
# * create logs:
# 1. Debug log. slacksible_debug.log (10)
# 2. stderr log. slacksible_stderr.log (40)
# 3. usage log. slacksible_metrics.log
# * create slack bot class:
# 1. solve token issue (dont show it in code)
# TODO: change token to non-test token
# TODO: move token out to env var or file and loaded during app boot
# Example: os.environ[" ENV VAR TOKEN ALREADY LOADED "]
token = "xoxb-168959872961-Clds2jLyYvCQY3syhyEUSjKs"
sc = SlackClient(token)
class slacksible():
"""
Ansible slack bot class
"""
def __init__(self, args, **kwargs):
self.token = os.environ["slacksible_token"]
# TODO: find a better way, this is ugly as hell
# this finds one directory up from where the script is being run in /bin
self.log_path = os.path.split(os.path.abspath(os.path.dirname(sys.argv[0])))[0]+"/logs"
def setup_dirs(self):
'''
Creates directory structure for a working application environment
No return, makes changes directly on the filesystem
'''
if not os.path.exists(self.log_path):
os.makedirs(self.log_path)
# add dir creation to debug log
else:
pass
# TODO: note existence of already existing log dir to debug log.
@staticmethod
def seppuku():
'''
Restart application
'''
# TODO: note restarting of application in debug log
os.execv(__file__, sys.argv)
# TODO: app should restart and not get to next line. raise error if it does
def bot_listen(self):
'''
Connect to slack api and listen to data stream it has access to
'''
if sc.rtm_connect():
print("====================Listening====================") # move to debug log
while True: # TODO: capture exit from this loop in debug log
slack_data = sc.rtm_read() # TODO: multi-thread/async this blocking action.
if slack_data != [] and "text" in slack_data[0]:
# move to debug log
if "message" in slack_data[0]["type"]:
print("--------------------------")
print(slack_data)
if "user" in slack_data[0]:
print("user is:", slack_data[0]["user"])
if "type" in slack_data[0]:
print("type is:", slack_data[0]["type"])
if "text" in slack_data[0]:
print("message is:", slack_data[0]["text"])
if "channel" in slack_data[0]:
print("channel is:", slack_data[0]["channel"])
else:
print("Connection failed to Slack") # move to error log
def bot_query_ARA(self):
# TODO: create cli parser for reading existing runs
pass
def bot_query_ansible(self):
# TODO:
pass
def collect_usage_metrics(self):
# TODO: capture commands directed at bot and sort by order of usage.
pass
# simple api test for bot
def test():
sc.api_call(
"chat.postMessage",
channel="#slack_bot",
text="Hello from Python! :tada:"
)
def main():
print(os.path.split(os.path.abspath(os.path.dirname(sys.argv[0])))[0]+"/logs")
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -3448,85 +3448,11 @@
p
-rint(os.path.split(os.path.abspath(os.path.dirname(sys.argv%5B0%5D)))%5B0%5D+%22/logs%22)
+ass
%0A%0A%0Ai
|
c0841b6a38fc042f95f931eead6bc5733d975644
|
Update forms.py
|
cla_public/apps/base/forms.py
|
cla_public/apps/base/forms.py
|
# coding: utf-8
"Base forms"
from flask import render_template, current_app, request
from flask_wtf import Form
from flask.ext.babel import lazy_gettext as _, get_translations
from wtforms import TextAreaField, RadioField, SelectMultipleField, StringField, widgets
from wtforms.validators import InputRequired, Length
from cla_public.apps.base.constants import (
FEEL_ABOUT_SERVICE,
HELP_FILLING_IN_FORM,
REASONS_FOR_CONTACTING_CHOICES,
REASONS_FOR_CONTACTING,
)
from cla_public.libs.honeypot import Honeypot
class BabelTranslations(object):
def gettext(self, string):
t = get_translations()
if t is None:
return string
return t.ugettext(string)
def ngettext(self, singular, plural, num):
variables = {"num": num}
t = get_translations()
if t is None:
return (singular if num == 1 else plural) % variables
return t.ungettext(singular, plural, num) % variables
class BabelTranslationsFormMixin(object):
def _get_translations(self):
return BabelTranslations()
_textarea_length_validator = Length(max=1000, message=u"Field cannot contain more than %(max)d characters")
class FeedbackForm(Honeypot, BabelTranslationsFormMixin, Form):
referrer = StringField(widget=widgets.HiddenInput())
difficulty = TextAreaField(
label=_(u"Did you have any difficulty with this service?"), validators=[_textarea_length_validator]
)
ideas = TextAreaField(
label=_(u"Do you have any ideas for how it could be improved?"), validators=[_textarea_length_validator]
)
feel_about_service = RadioField(
_(u"Overall, how did you feel about the service you received today?"),
choices=FEEL_ABOUT_SERVICE,
validators=[InputRequired()],
)
help_filling_in_form = RadioField(
_(u"Did you have any help filling in this form?"), choices=HELP_FILLING_IN_FORM, validators=[InputRequired()]
)
def api_payload(self):
user_agent = request.headers.get("User-Agent")
comment_body = render_template("emails/zendesk-feedback.txt", form=self, user_agent=user_agent)
environment = current_app.config["CLA_ENV"]
subject = "CLA Public Feedback"
if environment != "prod":
subject = "[TEST] - " + subject
ticket = {
"requester_id": current_app.config["ZENDESK_DEFAULT_REQUESTER"],
"subject": subject,
"comment": {"body": comment_body},
"group_id": 23832817, # CLA Public
"tags": ["feedback", "civil_legal_advice_public"],
"custom_fields": [
{"id": 23791776, "value": user_agent}, # Browser field
{"id": 26047167, "value": self.referrer.data}, # Referrer URL field
],
}
return {"ticket": ticket}
class ReasonsForContactingForm(Honeypot, BabelTranslationsFormMixin, Form):
"""
Interstitial form to ascertain why users are dropping out of
the checker service
"""
referrer = StringField(widget=widgets.HiddenInput())
reasons = SelectMultipleField(
label=_(u"You can select more than one option"),
choices=REASONS_FOR_CONTACTING_CHOICES,
widget=widgets.ListWidget(prefix_label=False),
option_widget=widgets.CheckboxInput(),
)
other_reasons = TextAreaField(label=_(u"Please specify"), validators=[_textarea_length_validator])
REASONS_FOR_CONTACTING_OTHER = REASONS_FOR_CONTACTING.OTHER
def api_payload(self):
return {
"reasons": [{"category": category} for category in self.reasons.data],
"other_reasons": self.other_reasons.data or "",
"user_agent": request.headers.get("User-Agent") or "Unknown",
"referrer": self.referrer.data or "Unknown",
}
|
Python
| 0
|
@@ -1390,12 +1390,13 @@
lty
-with
+using
thi
@@ -1405,16 +1405,43 @@
service?
+ Tell us about the problem.
%22), vali
|
2934f0a9a7696262032fed08f1e0a7c2e6be009c
|
use `host` variable
|
api-server.py
|
api-server.py
|
#!/usr/bin/env python3
"""
This api server runs both a json-rpc api and a notification rest api.
(neo.api.JSONRPC.JsonRpcApi and neo.api.REST.NotificationRestApi)
Example systemd service config: TODO
"""
import os
import argparse
import threading
from time import sleep
from logzero import logger
from twisted.internet import reactor, task, endpoints
from twisted.web.server import Site
from neo import __version__
from neo.Core.Blockchain import Blockchain
from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain
from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB
from neo.api.JSONRPC.JsonRpcApi import JsonRpcApi
from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB
from neo.api.REST.NotificationRestApi import NotificationRestApi
from neo.Network.NodeLeader import NodeLeader
from neo.Settings import settings, DIR_PROJECT_ROOT
from neo.UserPreferences import preferences
# Logfile settings & setup
LOGFILE_FN = os.path.join(DIR_PROJECT_ROOT, 'api-server.log')
LOGFILE_MAX_BYTES = 5e7 # 50 MB
LOGFILE_BACKUP_COUNT = 3 # 3 logfiles history
settings.set_logfile(LOGFILE_FN, LOGFILE_MAX_BYTES, LOGFILE_BACKUP_COUNT)
def write_pid_file():
""" Write a pid file, to easily kill the service """
f = open('/tmp/json-rpc-api-server.pid', 'w')
f.write(str(os.getpid()))
f.close()
def custom_background_code():
""" Custom code run in a background thread.
This function is run in a daemonized thread, which means it can be instantly killed at any
moment, whenever the main thread quits. If you need more safety, don't use a daemonized
thread and handle exiting this thread in another way (eg. with signals and events).
"""
while True:
logger.info("[%s] Block %s / %s", settings.net_name, str(Blockchain.Default().Height), str(Blockchain.Default().HeaderHeight))
sleep(15)
def main():
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group()
group.add_argument("-m", "--mainnet", action="store_true", default=False,
help="Use MainNet instead of the default TestNet")
group.add_argument("-p", "--privnet", action="store_true", default=False,
help="Use PrivNet instead of the default TestNet")
group.add_argument("--coznet", action="store_true", default=False,
help="Use the CoZ network instead of the default TestNet")
group.add_argument("-c", "--config", action="store", help="Use a specific config file")
parser.add_argument("--port-rpc", type=int, help="port to use for the server", required=True)
parser.add_argument("--port-rest", type=int, help="port to use for the server", required=True)
args = parser.parse_args()
# Setup depending on command line arguments. By default, the testnet settings are already loaded.
if args.config:
settings.setup(args.config)
elif args.mainnet:
settings.setup_mainnet()
elif args.privnet:
settings.setup_privnet()
elif args.coznet:
settings.setup_coznet()
# Write a PID file to easily quit the service
write_pid_file()
# Instantiate the blockchain and subscribe to notifications
blockchain = LevelDBBlockchain(settings.LEVELDB_PATH)
Blockchain.RegisterBlockchain(blockchain)
dbloop = task.LoopingCall(Blockchain.Default().PersistBlocks)
dbloop.start(.1)
# Disable logging smart contract events
settings.set_log_smart_contract_events(False)
# Start the notification db instance
ndb = NotificationDB.instance()
ndb.start()
# Start a thread with custom code
d = threading.Thread(target=custom_background_code)
d.setDaemon(True) # daemonizing the thread will kill it when the main thread is quit
d.start()
# Run
reactor.suggestThreadPoolSize(15)
NodeLeader.Instance().Start()
host = "0.0.0.0"
logger.info("Starting json-rpc api server on http://%s:%s" % (host, args.port_rpc))
logger.info("Starting notification api server on http://%s:%s" % (host, args.port_rest))
# Setup Klein apps
api_server_rpc = JsonRpcApi(args.port_rpc)
api_server_rest = NotificationRestApi()
# Setup endpoints. One port per api
endpoint_rpc = "tcp:port={0}:interface={1}".format(args.port_rpc, '0.0.0.0')
endpoint_rest = "tcp:port={0}:interface={1}".format(args.port_rest, '0.0.0.0')
endpoints.serverFromString(reactor, endpoint_rpc).listen(Site(api_server_rpc.app.resource()))
endpoints.serverFromString(reactor, endpoint_rest).listen(Site(api_server_rest.app.resource()))
# Run the server, needs a dummy port
api_server_rest.app.run(host, 9999)
if __name__ == "__main__":
main()
|
Python
| 0.000055
|
@@ -4358,25 +4358,20 @@
rt_rpc,
-'0.0.0.0'
+host
)%0A en
@@ -4440,17 +4440,12 @@
st,
-'0.0.0.0'
+host
)%0A
|
c71730cf4f3d8937f0ef1608bf670c28ec44eb0b
|
Complete and prettified
|
Challenge3.py
|
Challenge3.py
|
# Exercise 3 (and Solution)
# Take a list, say for example this one:
# a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
# and write a program that prints out all the elements of the list that are less than 5.
# Extras:
# Instead of printing the elements one by one, make a new list that has all the elements less than 5 from this list in it and print out this new list.
# Write this in one line of Python.
# Ask the user for a number and return a list that contains only elements from the original list a that are smaller than that number given by the user.
# OUR PLAN: ask user to give 10 numbers (check to see if it is an actual interger) - for each additional number at it to a list, then check the list to see if any of the numbers are less than 5
print ("\nLet's play a game!\n I'll ask for 10 numbers.")
list = []
counter = 0
while counter < 10:
user_number = raw_input("Please give us a number:\n")
try:
val = int(user_number)
list.append(user_number)
counter = counter + 1
except ValueError:
print("That's not an int! Pleases use real numbers only. That is REAL numbers from math class.")
print list
def get_less_than_value():
start_value = raw_input("\nNow tell me a new number and I will tell you which one in your list are smaller. ")
try:
is_an_integer = int(start_value)
###MAKE A NEW FUNCTION HERE TO SPLIT list based on less than - us a for loop
except ValueError:
print("Soooo, remember we need real numbers here. Try again!")
get_less_than_value()
get_less_than_value()
|
Python
| 0
|
@@ -811,29 +811,146 @@
%22)%0A%0A
-list = %5B%5D%0Acounter = 0
+# create a list and start a counter to get desired numbers%0A%0Alist = %5B%5D%0Acounter = 0%0A%0A# ask for 10 numbers and add each one to the empty list
%0A%0Awh
@@ -1006,19 +1006,16 @@
se give
-us
a number
@@ -1075,16 +1075,20 @@
.append(
+int(
user_num
@@ -1087,24 +1087,25 @@
user_number)
+)
%0A %09%09%09count
@@ -1122,16 +1122,88 @@
er + 1%0A%0A
+ %09# if the value entered is not a number remind people about MATH :)%0A%0A
%09except
@@ -1227,16 +1227,18 @@
%09print(%22
+%5Cn
That's n
@@ -1325,48 +1325,305 @@
%22)%0A%0A
-print list%0A%0Adef get_less_than_value():%0A%09
+# Parot back the number list to the user%0A%0Aprint (%22%5CnThanks! Here are your numbers:%25s%22 %25 (list))%0A%0A# make a function that can judge which numbers in the list are less than a new value that the user gives%0A%0Adef get_less_than_value():%0A%09%0A%09# get the new value to determine which numbers are less than it%0A
%0A%09st
@@ -1655,14 +1655,11 @@
Now
-tell m
+giv
e a
@@ -1695,19 +1695,33 @@
u which
-one
+number or numbers
in your
@@ -1748,28 +1748,95 @@
)%0A%0A%09
-try:%0A%09%09is_an_integer
+# to evaluate the numbers they have to be integers so make them so%0A%0A%09try:%0A%09%09start_value
= i
@@ -1856,88 +1856,113 @@
e)%0A%09
-%09
%0A%09
-%09###MAKE A NEW FUNCTION HERE TO SPLIT list based on less than - us a for loop
+# if the number is not a interger let the user know to try again and recall the function to start again
%0A%0A%09e
@@ -1991,16 +1991,18 @@
%09print(%22
+%5Cn
Soooo, r
@@ -2076,16 +2076,623 @@
alue()%0A%0A
+%09# create the list to hold the values less than%0A%0A %09list_less_than = %5B%5D%0A%0A %09# evaluate each number and add it to the empty list%0A%0A %09for values in list:%0A%0A %09%09if values %3C start_value:%0A %09%09%09list_less_than.append(values)%0A %09%0A %09# print (list_less_than) - use to check if list built correctly%0A%0A %09# using the length of list let the user know what numbers are less or if there are no numbers that are less than%0A%0A%09if len(list_less_than) %3C 1 :%0A%09%09print(%22%5CnThere are no numbers in your list less than %25s %22 %25 (start_value))%0A%0A%09elif len(list_less_than) %3E 1 :%0A%09%09print ((%22%5CnThe numbers less than are: %25s%22) %25 (list_less_than))%0A %09%09%0A
get_less
@@ -2684,28 +2684,29 @@
))%0A %09%09%0Aget_less_than_value()
+%0A
|
d6f4d9b76d6f12cc9eae1614a33ebb9fa6aa1724
|
Fix error handling on missing dest with unarchive
|
lib/ansible/runner/action_plugins/unarchive.py
|
lib/ansible/runner/action_plugins/unarchive.py
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2013, Dylan Martin <dmartin@seattlecentral.edu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
from ansible import utils
import ansible.utils.template as template
from ansible import errors
from ansible.runner.return_data import ReturnData
## fixes https://github.com/ansible/ansible/issues/3518
# http://mypy.pythonblogs.com/12_mypy/archive/1253_workaround_for_python_bug_ascii_codec_cant_encode_character_uxa0_in_position_111_ordinal_not_in_range128.html
import sys
reload(sys)
sys.setdefaultencoding("utf8")
import pipes
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
''' handler for file transfer operations '''
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
source = os.path.expanduser(options.get('src', None))
dest = os.path.expanduser(options.get('dest', None))
copy = utils.boolean(options.get('copy', 'yes'))
if source is None or dest is None:
result = dict(failed=True, msg="src (or content) and dest are required")
return ReturnData(conn=conn, result=result)
source = template.template(self.runner.basedir, source, inject)
if copy:
if '_original_file' in inject:
source = utils.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
else:
source = utils.path_dwim(self.runner.basedir, source)
remote_md5 = self.runner._remote_md5(conn, tmp, dest)
if remote_md5 != '3':
result = dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
return ReturnData(conn=conn, result=result)
if copy:
# transfer the file to a remote tmp location
tmp_src = tmp + 'source'
conn.put_file(source, tmp_src)
# handle diff mode client side
# handle check mode client side
# fix file permissions when the copy is done as a different user
if copy:
if self.runner.sudo and self.runner.sudo_user != 'root':
self.runner._low_level_exec_command(conn, "chmod a+r %s" % tmp_src, tmp)
module_args = "%s src=%s original_basename=%s" % (module_args, pipes.quote(tmp_src), pipes.quote(os.path.basename(source)))
else:
module_args = "%s original_basename=%s" % (module_args, pipes.quote(os.path.basename(source)))
return self.runner._execute_module(conn, tmp, 'unarchive', module_args, inject=inject, complex_args=complex_args)
|
Python
| 0
|
@@ -1648,35 +1648,16 @@
urce =
-os.path.expanduser(
options.
@@ -1664,33 +1664,32 @@
get('src', None)
-)
%0A dest
@@ -1695,27 +1695,8 @@
=
-os.path.expanduser(
opti
@@ -1716,17 +1716,16 @@
', None)
-)
%0A
@@ -1955,32 +1955,72 @@
result=result)%0A%0A
+ dest = os.path.expanduser(dest)%0A
source =
@@ -2059,22 +2059,42 @@
asedir,
+os.path.expanduser(
source
+)
, inject
|
d4ee599fe9cd88315d129e036fb034111bfc2272
|
Add types to common url parameters (#50000)
|
lib/ansible/utils/module_docs_fragments/url.py
|
lib/ansible/utils/module_docs_fragments/url.py
|
# (c) 2018, John Barker<gundalow@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
url:
description:
- HTTP, HTTPS, or FTP URL in the form (http|https|ftp)://[user[:pass]]@host.domain[:port]/path
force:
description:
- If C(yes) do not get a cached copy.
aliases:
- thirsty
type: bool
default: no
http_agent:
description:
- Header to identify as, generally appears in web server logs.
default: ansible-httpget
use_proxy:
description:
- If C(no), it will not use a proxy, even if one is defined in an environment variable on the target hosts.
type: bool
default: yes
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
default: yes
type: bool
url_username:
description:
- The username for use in HTTP basic authentication.
- This parameter can be used without I(url_password) for sites that allow empty passwords
url_password:
description:
- The password for use in HTTP basic authentication.
- If the I(url_username) parameter is not specified, the I(url_password) parameter will not be used.
force_basic_auth:
description:
- Credentials specified with I(url_username) and I(url_password) should be passed in HTTP Header.
default: no
type: bool
client_cert:
description:
- PEM formatted certificate chain file to be used for SSL client
authentication. This file can also include the key as well, and if
the key is included, C(client_key) is not required.
client_key:
description:
- PEM formatted file that contains your private key to be used for SSL
client authentication. If C(client_cert) contains both the certificate
and key, this option is not required.
"""
|
Python
| 0
|
@@ -1,9 +1,45 @@
#
+ -*- coding: utf-8 -*-%0A%0A# Copyright:
(c) 201
@@ -52,16 +52,17 @@
n Barker
+
%3Cgundalo
@@ -80,634 +80,63 @@
m%3E%0A#
-%0A# This file is part of Ansible%0A#%0A# Ansible is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# Ansible is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with Ansible. If not, see %3Chttp
+ GNU General Public License v3.0+ (see COPYING or https
://w
@@ -159,10 +159,20 @@
ses/
-%3E.
+gpl-3.0.txt)
%0A%0A%0Ac
@@ -268,19 +268,20 @@
ATION =
-%22%22%22
+r'''
%0Aoptions
@@ -407,16 +407,30 @@
t%5D/path%0A
+ type: str%0A
force:
@@ -651,16 +651,30 @@
r logs.%0A
+ type: str%0A
defa
@@ -962,16 +962,24 @@
lidated.
+%0A -
This sh
@@ -995,24 +995,16 @@
be used
-%0A
on pers
@@ -1067,35 +1067,35 @@
-default: yes%0A type: bool
+type: bool%0A default: yes
%0A u
@@ -1279,16 +1279,30 @@
sswords%0A
+ type: str%0A
url_pa
@@ -1492,16 +1492,30 @@
e used.%0A
+ type: str%0A
force_
@@ -1655,34 +1655,34 @@
-default: no%0A type: bool
+type: bool%0A default: no
%0A c
@@ -1780,24 +1780,16 @@
L client
-%0A
authent
@@ -1796,16 +1796,24 @@
ication.
+%0A -
This fi
@@ -1855,24 +1855,16 @@
, and if
-%0A
the key
@@ -1908,16 +1908,30 @@
quired.%0A
+ type: str%0A
client
@@ -2029,24 +2029,16 @@
for SSL
-%0A
client
@@ -2052,16 +2052,24 @@
ication.
+%0A -
If C(cl
@@ -2112,16 +2112,8 @@
cate
-%0A
and
@@ -2151,8 +2151,22 @@
ed.%0A
-%22%22%22
+ type: str%0A'''
%0A
|
e9df15b0f084ed9e026a5de129b109a3c546f99c
|
Handle comments in parse tree.
|
src/libeeyore/parse_tree_to_cpp.py
|
src/libeeyore/parse_tree_to_cpp.py
|
import builtins
from cpp.cpprenderer import EeyCppRenderer
from environment import EeyEnvironment
from values import *
def parse_tree_string_to_values( string ):
return eval( string )
def non_empty_line( ln ):
return ( ln.strip() != "" )
def parse_tree_to_cpp( parse_tree_in_fl, cpp_out_fl ):
env = EeyEnvironment( EeyCppRenderer() )
builtins.add_builtins( self )
values = ( parse_tree_string_to_values( ln ) for ln in
filter( non_empty_line, parse_tree_in_fl ) )
cpp_out_fl.write( env.render_exe( values ) )
|
Python
| 0
|
@@ -1,8 +1,35 @@
+from itertools import imap%0A
%0Aimport
@@ -120,16 +120,74 @@
ronment%0A
+from functionvalues import *%0Afrom languagevalues import *%0A
from val
@@ -263,24 +263,120 @@
( string )%0A%0A
+def remove_comments( ln ):%0A%09i = ln.find( %22#%22 )%0A%09if i != -1:%0A%09%09return ln%5B:i%5D%0A%09else:%0A%09%09return ln%0A%0A
def non_empt
@@ -544,12 +544,11 @@
ns(
-self
+env
)%0A%0A
@@ -628,16 +628,39 @@
ty_line,
+ imap( remove_comments,
parse_t
@@ -672,16 +672,18 @@
n_fl ) )
+ )
%0A%0A%09cpp_o
|
7d09f713b929f60cd62ce48de2e2a8f27aa4de45
|
Fix unit tests.
|
Orange/tests/test_random_forest.py
|
Orange/tests/test_random_forest.py
|
import unittest
import Orange.data
import Orange.classification.random_forest as rf
from Orange.evaluation import scoring, testing
class RandomForestTest(unittest.TestCase):
def test_RandomForest(self):
table = Orange.data.Table('titanic')
forest = rf.RandomForestLearner()
results = testing.CrossValidation(table[::20], [forest], k=10)
ca = scoring.CA(results)
self.assertGreater(ca, 0.7)
self.assertLess(ca, 0.9)
def test_predict_single_instance(self):
table = Orange.data.Table('titanic')
forest = rf.RandomForestLearner()
c = forest(table)
for ins in table[::20]:
c(ins)
val, prob = c(ins, c.ValueProbs)
def test_predict_table(self):
table = Orange.data.Table('titanic')
forest = rf.RandomForestLearner()
c = forest(table)
table = table[::20]
c(table)
vals, probs = c(table, c.ValueProbs)
def test_predict_numpy(self):
table = Orange.data.Table('titanic')
forest = rf.RandomForestLearner()
c = forest(table)
X = table.X[::20]
c(X)
vals, probs = c(X, c.ValueProbs)
|
Python
| 0
|
@@ -230,39 +230,36 @@
nge.data.Table('
-titanic
+iris
')%0A fores
@@ -334,22 +334,16 @@
on(table
-%5B::20%5D
, %5Bfores
@@ -422,9 +422,9 @@
, 0.
-7
+9
)%0A
@@ -452,16 +452,17 @@
(ca, 0.9
+9
)%0A%0A d
@@ -527,39 +527,36 @@
nge.data.Table('
-titanic
+iris
')%0A fores
@@ -634,22 +634,16 @@
in table
-%5B::20%5D
:%0A
@@ -762,39 +762,36 @@
nge.data.Table('
-titanic
+iris
')%0A fores
@@ -849,36 +849,8 @@
le)%0A
- table = table%5B::20%5D%0A
@@ -981,15 +981,12 @@
le('
-titanic
+iris
')%0A
@@ -1064,37 +1064,17 @@
-X =
+c(
table.X
-%5B::20%5D%0A c(X
)%0A
@@ -1095,16 +1095,22 @@
obs = c(
+table.
X, c.Val
@@ -1105,20 +1105,21 @@
ble.X, c.ValueProbs)
+%0A
|
7041d2649b08d961cf5c7c4c663282e55526f2eb
|
Update pictures.py
|
cogs/pictures.py
|
cogs/pictures.py
|
from discord.ext import commands
import copy
import requests
class Pic:
"""Мемасики и просто картинки."""
def __init__(self, bot):
self.bot = bot
self.pic_dir = 'pictures/'
self.pic_dict = {}
self.update_pics()
def update_pics(self):
file_list = self.bot.pycopy.list_files(self.pic_dir)
for file_name in file_list:
self.pic_dict[file_name.split('.')[0]] = file_name
self.pic.aliases = list(self.pic_dict.values())
@commands.group(pass_context=True, aliases=[])
async def pic(self, ctx):
"""База картинок, мемесов etc."""
if ctx.invoked_with in self.pic_dict:
url = self.bot.pycopy.direct_link(self.pic_path + self.pic_dict[ctx.invoked_with])
r = requests.get(url, stream=True)
if r.status_code == 200:
r.raw.decode_content = True
await self.bot.upload(r.raw, self.pic_dict[ctx.invoked_with])
elif ctx.invoked_subcommand is None:
msg = copy.copy(ctx.message)
msg.content = ctx.prefix + 'help pic'
await self.bot.process_commands(msg)
@pic.command()
async def update(self):
"""Обновить список картиночек."""
self.update_pics()
await self.bot.say("Найдено {} картиночек.".format(len(self.pic_dict)))
@pic.command()
async def list(self):
"""Вывести список картиночек."""
pic_list = ''
id = 1
for pic in self.pic_dict:
pic_list += "{}. {}\n".format(id, pic)
id += 1
if len(pic_list) > 1800:
await self.bot.say(pic_list)
pic_list = ''
await self.bot.say(pic_list)
def setup(bot):
bot.add_cog(Pic(bot))
|
Python
| 0
|
@@ -707,19 +707,20 @@
-url
+file
= self.
@@ -734,19 +734,16 @@
opy.
-direct_link
+get_file
(sel
@@ -793,143 +793,8 @@
%5D)%0D%0A
- r = requests.get(url, stream=True)%0D%0A if r.status_code == 200:%0D%0A r.raw.decode_content = True%0D%0A
@@ -827,13 +827,12 @@
oad(
-r.raw
+file
, se
|
25f3da8409a6fe31eded302cd14a78b575ff2399
|
Please lxml
|
cogs/saucenao.py
|
cogs/saucenao.py
|
from discord.ext import commands
from discord.ext.commands import Cog
from lxml import etree
from bot import BeattieBot
from context import BContext
class SauceNao(Cog):
sauce_url = "https://saucenao.com/search.php"
def __init__(self, bot: BeattieBot):
self.session = bot.session
self.parser = etree.HTMLParser()
@commands.command(aliases=["sauce"])
async def saucenao(self, ctx: BContext, *, link: str = "") -> None:
"""Find the source of a linked or attached image using saucenao."""
async with ctx.typing():
if not link:
if len(ctx.message.attachments) == 1:
link = ctx.message.attachments[0].url
else:
raise commands.BadArgument
elif ctx.message.attachments:
raise commands.BadArgument
link = link.strip("<>")
payload = {"url": link}
async with self.session.post(self.sauce_url, data=payload) as resp:
root = etree.fromstring(await resp.text(), self.parser)
results = root.xpath('.//div[@class="result"]')
sim_percent = 0.0
if results:
similarity = root.find(".//div[@class='resultsimilarityinfo']").text
sim_percent = float(similarity[:-1])
if not results or sim_percent <= 60:
await ctx.send("No sauce found.")
else:
result = results[0]
if (
booru_link := result.find('.//div[@class="resultmiscinfo"]/a')
) is not None:
link = f"<{booru_link.get('href')}>"
elif (
source_link := result.find('.//div[@class="resultcontentcolumn"]/a')
) is not None:
link = f"<{source_link.get('href')}>"
else:
link = "with no author information."
await ctx.send(f"Sauce found ({similarity}) {link}")
@saucenao.error
async def saucenao_error(self, ctx: BContext, e: Exception) -> None:
if isinstance(e, commands.BadArgument):
await ctx.send("Please include a link or attach a single image.")
else:
await ctx.bot.handle_error(ctx, e)
def setup(bot: BeattieBot) -> None:
bot.add_cog(SauceNao(bot))
|
Python
| 0.998141
|
@@ -1180,23 +1180,28 @@
if
+len(
results
+)
:%0A
|
ddc0180e46588290ec53450ebff0a2cdf2adfeb4
|
rename the modrole command to setmodrole
|
cogs/settings.py
|
cogs/settings.py
|
import asyncio
from discord.ext import commands
import discord
from db.dbase import DBase
from cogs.utils.messages import MessageManager
from cogs.utils.format import format_role_name
class Settings:
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def setprefix(self, ctx, new_prefix):
"""
Change the server's command prefix (admin only)
Ex. '!setprefix $'
"""
manager = MessageManager(self.bot, ctx.author, ctx.channel, [ctx.message])
if len(new_prefix) > 5:
await manager.say("Prefix must be less than 6 characters.")
return await manager.clear()
with DBase() as db:
db.set_prefix(ctx.guild.id, new_prefix)
await manager.say("Command prefix has been changed to " + new_prefix)
return await manager.clear()
@setprefix.error
async def setprefix_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
manager = MessageManager(self.bot, ctx.author, ctx.channel, [ctx.message])
await manager.say("Oops! You didn't provide a new prefix.")
await manager.clear()
@commands.command()
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def modrole(self, ctx, *, mod_role):
"""Set the server's mod role (admin only)
Certain commands, such as creating events, require the user to have a certain privelge.
By default, the default privilege is Administrator. If a mod role is set, then any user
that is of the mod role or higher may invoke the previously Administrator only commands.
**Note:** Mentioning the role directly with this command will not work. You must the name
of the role without mentioning it.
"""
manager = MessageManager(self.bot, ctx.author, ctx.channel, [ctx.message])
guild_mod_role = None
for role in ctx.guild.roles:
if role.name in (mod_role, "@{}".format(mod_role)):
guild_mod_role = role
if not guild_mod_role:
await manager.say("I couldn't find a role called **{}** on this server. ".format(mod_role)
+ "Note that you must provide only the name of the role. "
+ "Mention it with the @ sign won't work.")
return await manager.clear()
with DBase() as db:
db.set_mod_role_id(ctx.guild.id, guild_mod_role.id)
await manager.say("The mod role has been set to: **{}**".format(format_role_name(guild_mod_role)))
return await manager.clear()
@modrole.error
async def modrole_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
manager = MessageManager(self.bot, ctx.author, ctx.channel, [ctx.message])
with DBase() as db:
rows = db.get_mod_role_id(ctx.guild.id)
mod_role = None
if len(rows[0]):
for role in ctx.guild.roles:
if role.id == rows[0][0]:
mod_role = role
if not mod_role:
role_display = 'None (Administrator)'
else:
role_display = format_role_name(mod_role)
await manager.say("The current mod role is: **{}**\n\nTo change the mod role, ".format(role_display)
+ "use `{}modrole <role_name>`".format(ctx.prefix))
await manager.clear()
@commands.command()
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def togglecleanup(self, ctx):
"""
Toggle command message cleanup on/off (admin only)
When enabled, command message spam will be deleted a few seconds
after a command has been invoked. This feature is designed to
keep bot related spam to a minimum. Only non important messages will
be deleted if this is enabled; messages like the help message or the
roster, for example, will not be removed.
"""
manager = MessageManager(self.bot, ctx.author, ctx.channel, [ctx.message])
with DBase() as db:
db.toggle_cleanup(ctx.guild.id)
cleanup = db.get_cleanup(ctx.guild.id)
status = 'enabled' if cleanup else 'disabled'
await manager.say("Command message cleanup is now *{}*".format(status))
return await manager.clear()
|
Python
| 0.00001
|
@@ -1376,24 +1376,27 @@
async def
+set
modrole(self
|
719c8d37824a890ec0044b5a83a750e9dfa56329
|
Use 'get_tags_for_user' wherever possible
|
contacts/views/contact_views.py
|
contacts/views/contact_views.py
|
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.shortcuts import (
get_object_or_404,
)
from django.views.generic import (
CreateView,
DeleteView,
ListView,
UpdateView,
FormView,
)
from django.utils import timezone
from contacts.models import (
Contact,
BookOwner,
Tag,
)
from contacts import forms
from contacts.views import BookOwnerMixin
class ContactListView(BookOwnerMixin, ListView):
model = Contact
template_name = 'contact_list.html'
def get_queryset(self):
qs = super(ContactListView, self).get_queryset()
return qs.order_by('name')
def get_context_data(self, **kwargs):
context = super(ContactListView, self).get_context_data(**kwargs)
context['tags'] = Tag.objects.filter(book__bookowner__user=self.request.user)
return context
class ContactView(BookOwnerMixin, FormView):
template_name = 'contact.html'
form_class = forms.LogEntryForm
def dispatch(self, request, **kwargs):
self.contact = get_object_or_404(
Contact.objects,
pk=self.kwargs.get('pk'),
book__bookowner__user=self.request.user,
)
return super(ContactView, self).dispatch(request, **kwargs)
def get_success_url(self):
return reverse(
'contacts-view',
kwargs={'pk': self.kwargs.get('pk')},
)
def get_context_data(self, **kwargs):
context = super(ContactView, self).get_context_data(**kwargs)
context['contact'] = self.contact
context['logs'] = self.contact.logentry_set.all().order_by('-created')
return context
def form_valid(self, form):
new_log = form.save(commit=False)
new_log.contact = self.contact
new_log.logged_by = self.request.user
if not form.cleaned_data.get('time'):
form.cleaned_data['time'] = timezone.now()
form.save()
messages.success(
self.request,
"Log added",
)
return super(ContactView, self).form_valid(form)
class CreateContactView(BookOwnerMixin, CreateView):
model = Contact
template_name = 'edit_contact.html'
form_class = forms.ContactForm
def get_success_url(self):
return reverse('contacts-view', kwargs={'pk': self.object.id})
def get_form_kwargs(self):
kwargs = super(CreateContactView, self).get_form_kwargs()
kwargs['book'] = BookOwner.objects.get(user=self.request.user).book
return kwargs
def get_context_data(self, **kwargs):
context = super(CreateContactView, self).get_context_data(**kwargs)
context['action'] = reverse('contacts-new')
return context
def form_valid(self, form):
messages.success(
self.request,
"Contact added",
)
return super(CreateContactView, self).form_valid(form)
class EditContactView(BookOwnerMixin, UpdateView):
model = Contact
template_name = 'edit_contact.html'
form_class = forms.ContactForm
def get_success_url(self):
return reverse(
'contacts-view',
kwargs={'pk': self.get_object().id},
)
def get_form_kwargs(self):
kwargs = super(EditContactView, self).get_form_kwargs()
kwargs['book'] = BookOwner.objects.get(user=self.request.user).book
return kwargs
def get_context_data(self, **kwargs):
context = super(EditContactView, self).get_context_data(**kwargs)
context['action'] = reverse(
'contacts-edit',
kwargs={'pk': self.get_object().id},
)
return context
def form_valid(self, form):
messages.success(
self.request,
"Contact updated",
)
return super(EditContactView, self).form_valid(form)
class DeleteContactView(BookOwnerMixin, DeleteView):
model = Contact
template_name = 'delete_contact.html'
def get_success_url(self):
return reverse('contacts-list')
def form_valid(self, form):
messages.success(
self.request,
"Contact deleted",
)
return super(DeleteContactView, self).form_valid(form)
class CreateTagView(BookOwnerMixin, CreateView):
model = Tag
template_name = 'edit_tag.html'
form_class = forms.TagForm
def get_success_url(self):
return reverse('contacts-list')
def get_context_data(self, **kwargs):
context = super(CreateTagView, self).get_context_data(**kwargs)
context['action'] = reverse('tags-new')
return context
def form_valid(self, form):
messages.success(
self.request,
"Tag created",
)
return super(CreateTagView, self).form_valid(form)
class TaggedContactListView(BookOwnerMixin, ListView):
model = Contact
template_name = 'contact_list.html'
def get_queryset(self):
return Contact.objects.get_contacts_for_user(self.request.user).filter(
tags__id=self.kwargs.get('pk'),
).order_by('name')
def get_context_data(self, **kwargs):
context = super(TaggedContactListView, self).get_context_data(**kwargs)
self.tag = get_object_or_404(
Tag.objects,
pk=self.kwargs.get('pk'),
book__bookowner__user=self.request.user,
)
context['tag'] = self.tag
context['tags'] = Tag.objects.get_tags_for_user(self.request.user)
return context
|
Python
| 0.00002
|
@@ -808,37 +808,26 @@
cts.
-filter(book__bookowne
+get_tags_fo
r_
-_
user
-=
+(
self
|
8a0e231dcf1cd16a0075cca6c78a7996144eb6d2
|
Include the Python version in the platform-specific build directories: with the recent change in 'get_platform()', we now have directory names like "build/lib-1.5-linux-i586". Idea and original patch by Rene Liebscher.
|
command/build.py
|
command/build.py
|
"""distutils.command.build
Implements the Distutils 'build' command."""
# created 1999/03/08, Greg Ward
__revision__ = "$Id$"
import sys, os
from distutils.core import Command
from distutils.util import get_platform
def show_compilers ():
from distutils.ccompiler import show_compilers
show_compilers()
class build (Command):
description = "build everything needed to install"
user_options = [
('build-base=', 'b',
"base directory for build library"),
('build-purelib=', None,
"build directory for platform-neutral distributions"),
('build-platlib=', None,
"build directory for platform-specific distributions"),
('build-lib=', None,
"build directory for all distribution (defaults to either " +
"build-purelib or build-platlib"),
('build-scripts=', None,
"build directory for scripts"),
('build-temp=', 't',
"temporary build directory"),
('compiler=', 'c',
"specify the compiler type"),
('debug', 'g',
"compile extensions and libraries with debugging information"),
('force', 'f',
"forcibly build everything (ignore file timestamps)"),
]
help_options = [
('help-compiler', None,
"list available compilers", show_compilers),
]
def initialize_options (self):
self.build_base = 'build'
# these are decided only after 'build_base' has its final value
# (unless overridden by the user or client)
self.build_purelib = None
self.build_platlib = None
self.build_lib = None
self.build_temp = None
self.build_scripts = None
self.compiler = None
self.debug = None
self.force = 0
def finalize_options (self):
# Need this to name platform-specific directories, but sys.platform
# is not enough -- it only names the OS and version, not the
# hardware architecture!
self.plat = get_platform ()
# 'build_purelib' and 'build_platlib' just default to 'lib' and
# 'lib.<plat>' under the base build directory. We only use one of
# them for a given distribution, though --
if self.build_purelib is None:
self.build_purelib = os.path.join (self.build_base, 'lib')
if self.build_platlib is None:
self.build_platlib = os.path.join (self.build_base,
'lib.' + self.plat)
# 'build_lib' is the actual directory that we will use for this
# particular module distribution -- if user didn't supply it, pick
# one of 'build_purelib' or 'build_platlib'.
if self.build_lib is None:
if self.distribution.ext_modules:
self.build_lib = self.build_platlib
else:
self.build_lib = self.build_purelib
# 'build_temp' -- temporary directory for compiler turds,
# "build/temp.<plat>"
if self.build_temp is None:
self.build_temp = os.path.join (self.build_base,
'temp.' + self.plat)
if self.build_scripts is None:
self.build_scripts = os.path.join (self.build_base, 'scripts')
# finalize_options ()
def run (self):
# For now, "build" means "build_py" then "build_ext". (Eventually
# it should also build documentation.)
# Invoke the 'build_py' command to "build" pure Python modules
# (ie. copy 'em into the build tree)
if self.distribution.has_pure_modules():
self.run_command ('build_py')
# Build any standalone C libraries next -- they're most likely to
# be needed by extension modules, so obviously have to be done
# first!
if self.distribution.has_c_libraries():
self.run_command ('build_clib')
# And now 'build_ext' -- compile extension modules and put them
# into the build tree
if self.distribution.has_ext_modules():
self.run_command ('build_ext')
if self.distribution.has_scripts():
self.run_command ('build_scripts')
# class build
|
Python
| 0
|
@@ -2024,16 +2024,77 @@
orm ()%0A%0A
+ plat_specifier = sys.version%5B0:3%5D + '-' + self.plat%0A%0A
@@ -2547,30 +2547,35 @@
'lib
-.
+-
' +
-self.plat
+plat_specifier
)%0A%0A
@@ -3219,22 +3219,27 @@
temp
-.
+-
' +
-self.plat
+plat_specifier
)%0A
|
d2abcb071d4c7035c1bf6818f89c2514be04c1d8
|
fix typo
|
test/test_main.py
|
test/test_main.py
|
from unittest.mock import patch, mock_open, MagicMock
import asyncio
import asynctest
import discord
import random
import unittest
import src.main
class TestMain(unittest.TestCase):
@patch('src.main.main')
def test__init_calls_main_once(self, mock_main):
with patch.object(src.main, '__name__', '__main__'):
src.main.init()
mock_main.assert_called_once_with()
@patch('builtins.open', new_callable=mock_open, read_data="{}")
def test__get_minecraft_object_can_read_empty_json(self, mock_open):
mc = src.main.get_minecraft_object_for_server_channel(42, 5)
assert not mc
@patch('builtins.open', new_callable=mock_open,
read_data="""{"42": {"5": {"host": "fake_host", "port": 1234}}}""")
def test__get_minecraft_object_can_read_host_and_port(self, mock_open):
mc = src.main.get_minecraft_object_for_server_channel(42, 5)
assert mc.mc_server.host == "fake_host"
assert mc.mc_server.port == 1234
class TestBot(asynctest.TestCase):
def setUp(self):
self.mock_server_id = str(random.randrange(999999))
self.mock_channel_id = str(random.randrange(999999))
self.patch_get_mc = patch(
'src.main.get_minecraft_object_for_server_channel',
return_value=MagicMock(spec=src.main.Minecraft),
)
self.mock_mc = self.patch_get_mc.start()()
self.bot = src.main.Bot()
self.bot.user = self._get_mock_user(bot=True)
self.patch_run = asynctest.patch.object(self.bot, 'run')
self.patch_run.start()
self.patch_send = asynctest.patch.object(self.bot, 'send_message')
self.mock_send = self.patch_send.start()
def tearDown(self):
self.patch_send.stop()
self.patch_run.stop()
self.patch_get_mc.stop()
yield from self.bot.close()
async def test__ip_command_respons_with_host_and_port(self):
self.mock_mc.mc_server = MagicMock()
mock_message = self._get_mock_command_message('!ip')
await self.bot.on_message(mock_message)
await asyncio.sleep(0.1)
self.mock_send.assert_called_once_with(
mock_message.channel,
f'{self.mock_mc.mc_server.host}:{self.mock_mc.mc_server.port}',
)
async def test__status_command_responds_even_with_connection_errors(self):
self.mock_mc.get_formatted_status_message.side_effect = \
ConnectionRefusedError
mock_message = self._get_mock_command_message('!status')
await self.bot.on_message(mock_message)
await asyncio.sleep(0.1)
self.mock_mc.get_formatted_status_message.assert_called_once()
self.mock_send.assert_called_once_with(
mock_message.channel,
'The server is not accepting connections at this time.',
)
async def test__status_command_responds_with_status_message(
self):
mock_message = self._get_mock_command_message('!status')
await self.bot.on_message(mock_message)
await asyncio.sleep(0.1)
self.mock_mc.get_formatted_status_message.assert_called_once()
self.mock_send.assert_called_once_with(
mock_message.channel,
self.mock_mc.get_formatted_status_message(),
)
def _get_mock_command_message(self, command):
return self._get_mock_message(command, channel=self.mock_channel_id)
def _get_mock_channel(self, **kwargs):
id = kwargs.pop('id', str(random.randrange(999999)))
return asynctest.MagicMock(
spec=discord.Channel,
id=id,
)
def _get_mock_server(self):
return asynctest.MagicMock(
spec=discord.Server,
id=self.mock_server_id,
me=self.bot.user,
)
def _get_mock_message(self, content, **kwargs):
channel = kwargs.pop('channel', self._get_mock_channel())
server = kwargs.pop('server', self._get_mock_server())
if type(channel) is str:
channel = self._get_mock_channel(id=channel)
return asynctest.MagicMock(
spec=discord.Message,
author=self._get_mock_user(),
channel=channel,
server=server,
content=content,
mentions=[],
)
def _get_mock_user(self, bot=None):
return asynctest.MagicMock(
spec=discord.User,
id=str(random.randrange(999999)),
name='mock_user',
bot=bot,
)
|
Python
| 0.999991
|
@@ -1895,16 +1895,17 @@
d_respon
+d
s_with_h
|
a6a78260b47f3a632564e7a80ce25b3b75e242e9
|
Add sample code for API key authentication
|
examples/authentication.py
|
examples/authentication.py
|
'''A basic example of authentication requests within a hug API'''
import hug
# Several authenticators are included in hug/authentication.py. These functions
# accept a verify_user function, which can be either an included function (such
# as the basic username/bassword function demonstrated below), or logic of your
# own. Verification functions return an object to store in the request context
# on successful authentication. Naturally, this is a trivial demo, and a much
# more robust verification function is recommended. This is for strictly
# illustrative purposes.
authentication = hug.authentication.basic(hug.authentication.verify('User1', 'mypassword'))
# Note that the logged in user can be accessed via a built-in directive.
# Directives can provide computed input parameters via an abstraction
# layer so as not to clutter your API functions with access to the raw
# request object.
@hug.get('/authenticated', requires=authentication)
def api_call1(user: hug.directives.user):
return "Successfully authenticated with user: {0}".format(user)
@hug.get('/public')
def api_call2():
return "Needs no authentication"
|
Python
| 0
|
@@ -659,16 +659,96 @@
ord'))%0A%0A
+@hug.get('/public')%0Adef public_api_call():%0A return %22Needs no authentication%22%0A
%0A# Note
@@ -1028,24 +1028,35 @@
on)%0Adef
+basic_auth_
api_call
1(user:
@@ -1047,17 +1047,16 @@
api_call
-1
(user: h
@@ -1087,17 +1087,17 @@
return
-%22
+'
Successf
@@ -1133,95 +1133,936 @@
%7B0%7D
-%22.format(user)%0A%0A%0A@hug.get('/public')%0Adef api_call2():%0A return %22Needs no authentication%22
+'.format(user)%0A%0A%0A# Here is a slightly less trivial example of how authentication might%0A# look in an API that uses keys.%0A%0A# First, the user object stored in the context need not be a string,%0A# but can be any Python object.%0Aclass APIUser(object):%0A %22%22%22A minimal example of a rich User object%22%22%22%0A def __init__(self, user_id, api_key):%0A self.user_id = user_id%0A self.api_key = api_key%0A%0Adef api_key_verify(api_key):%0A magic_key = '5F00832B-DE24-4CAF-9638-C10D1C642C6C' # Obviously, this would hit your database%0A if api_key == magic_key:%0A # Success!%0A return APIUser('user_foo', api_key)%0A else:%0A # Invalid key%0A return None%0A%0Aapi_key_authentication = hug.authentication.api_key(api_key_verify)%0A%0A@hug.get('/key_authenticated', requires=api_key_authentication)%0Adef basic_auth_api_call(user: hug.directives.user):%0A return 'Successfully authenticated with user: %7B0%7D'.format(user.user_id)
%0A
|
73fc80dd8ece1f5ecb1fb529412cd97a804ffccd
|
Test fetch_emails_from_wiki command
|
remo/profiles/tests/test_commands.py
|
remo/profiles/tests/test_commands.py
|
import os
import tempfile
from django.conf import settings
from django.core import management, mail
from django.contrib.auth.models import User
from nose.tools import eq_
from test_utils import TestCase
class CreateUserTest(TestCase):
"""
Create tests for create_user management command
"""
def setUp(self):
# check if actual email sending is enabled and if yes do not run
if settings.EMAIL_BACKEND != 'django.core.mail.backends.locmem.EmailBackend':
raise ValueError("Please change local.py to avoid "
"sending testing emails")
# create a temporaty file with emails
self.TEST_EMAILS = ["foo@example.com", "bar@example.com",
"bogusemail.com"]
self.NO_VALID_EMAILS = 2
self.temp_file = tempfile.NamedTemporaryFile(delete=False)
for email in self.TEST_EMAILS:
self.temp_file.write(email)
self.temp_file.write('\n')
self.temp_file.close()
def test_command_without_input_file(self):
args = []
opts = {}
self.assertRaises(SystemExit, management.call_command,
'create_users', *args, **opts)
def test_command_input_file_no_email(self):
args = [self.temp_file.name]
opts = {'email':False}
management.call_command('create_users', *args, **opts)
eq_(len(mail.outbox), 0)
eq_(User.objects.count(), self.NO_VALID_EMAILS)
def test_command_input_file_send_email(self):
args = [self.temp_file.name]
opts = {'email':True}
management.call_command('create_users', *args, **opts)
eq_(len(mail.outbox), self.NO_VALID_EMAILS)
eq_(User.objects.count(), self.NO_VALID_EMAILS)
def tearDown(self):
os.unlink(self.temp_file.name)
|
Python
| 0.000004
|
@@ -18,16 +18,58 @@
tempfile
+%0Aimport json%0A%0Aimport requests%0Aimport fudge
%0A%0Afrom d
@@ -206,16 +206,24 @@
port eq_
+, raises
%0Afrom te
@@ -297,16 +297,9 @@
-Create t
+T
ests
@@ -1834,32 +1834,32 @@
tearDown(self):%0A
-
os.unlin
@@ -1881,8 +1881,2202 @@
e.name)%0A
+%0A%0Aclass FetchEmailsFromWikiTest(TestCase):%0A %22%22%22%0A Tests for fetch_emails_from_wiki management command%0A %22%22%22%0A%0A @raises(SystemExit)%0A @fudge.patch('requests.get')%0A def test_command_with_connection_error(self, fake_requests_obj):%0A (fake_requests_obj.expects_call().raises(requests.ConnectionError))%0A management.call_command('fetch_emails_from_wiki')%0A%0A%0A @raises(SystemExit)%0A @fudge.patch('requests.get')%0A def test_command_with_invalid_code(self, fake_requests_obj):%0A request = requests.Request()%0A request.status_code=404%0A request.text='foo'%0A%0A (fake_requests_obj.expects_call().returns(request))%0A management.call_command('fetch_emails_from_wiki')%0A%0A%0A @raises(SystemExit)%0A @fudge.patch('requests.get')%0A def test_command_with_bogus_data(self, fake_requests_obj):%0A request = requests.Request()%0A request.status_code = 200%0A request.text='foo'%0A%0A (fake_requests_obj.expects_call().returns(request))%0A management.call_command('fetch_emails_from_wiki')%0A%0A%0A @fudge.patch('requests.get')%0A def test_command_with_valid_data(self, fake_requests_obj):%0A request = requests.Request()%0A request.status_code = 200%0A request.text = json.dumps(%0A %7B'ask': %7B%0A 'query': %7B%0A %7D,%0A 'results': %7B%0A 'items': %5B%0A %7B%0A 'properties':%7B%0A 'bugzillamail':'foo@example.com',%0A %7D,%0A %22uri%22: %22https:%5C/%5C/wiki.mozilla.org%5C/index.php?title=User:fooexample%22,%0A %7D,%0A %7B%0A 'properties':%7B%0A 'bugzillamail':'test@example.com',%0A %7D,%0A %22uri%22: %22https:%5C/%5C/wiki.mozilla.org%5C/index.php?title=User:testexample%22,%0A %7D,%0A %5D,%0A %7D%0A %7D%0A %7D)%0A%0A (fake_requests_obj.expects_call().returns(request))%0A management.call_command('fetch_emails_from_wiki')%0A%0A%0A
|
5d8929986d278d97e33d425ae10bee0d29631886
|
Encode the hostname to a str
|
mopidy/frontends/http/__init__.py
|
mopidy/frontends/http/__init__.py
|
from __future__ import absolute_import
import logging
import pykka
from mopidy import exceptions, settings
try:
import cherrypy
except ImportError as import_error:
raise exceptions.OptionalDependencyError(import_error)
logger = logging.getLogger('mopidy.frontends.http')
class HttpFrontend(pykka.ThreadingActor):
def __init__(self, core):
super(HttpFrontend, self).__init__()
self.core = core
cherrypy.config.update({
'server.socket_host': settings.HTTP_SERVER_HOSTNAME,
'server.socket_port': settings.HTTP_SERVER_PORT,
})
app = cherrypy.tree.mount(Root(self.core), '/')
self._setup_logging(app)
def _setup_logging(self, app):
cherrypy.log.access_log.setLevel(logging.NOTSET)
cherrypy.log.error_log.setLevel(logging.NOTSET)
cherrypy.log.screen = False
app.log.access_log.setLevel(logging.NOTSET)
app.log.error_log.setLevel(logging.NOTSET)
def on_start(self):
logger.debug(u'Starting HTTP server')
cherrypy.server.start()
logger.info(u'HTTP server running at %s',
cherrypy.server.base())
def on_stop(self):
cherrypy.server.stop()
class Root(object):
def __init__(self, core):
self.core = core
@cherrypy.expose
@cherrypy.tools.json_out()
def index(self):
playback_state = self.core.playback.state.get()
track = self.core.playback.current_track.get()
if track:
track = track.serialize()
return {
'playback_state': playback_state,
'current_track': track,
}
|
Python
| 0.999999
|
@@ -488,16 +488,32 @@
t_host':
+%0A
setting
@@ -534,16 +534,32 @@
HOSTNAME
+.encode('utf-8')
,%0A
|
6e660da290db674eebb0c353662e5400bc735397
|
Update backplane demo to be py3 only
|
examples/backplane_demo.py
|
examples/backplane_demo.py
|
#!/usr/bin/python
import time
from guild.actor import *
from guild.components import Backplane, PublishTo, SubscribeTo, Printer
class Producer(Actor):
@process_method
def process(self):
self.output("hello")
@late_bind_safe
def output(self, value):
pass
Backplane("HELLO").start()
p = Producer()
pr = Printer()
time.sleep(1)
pub = PublishTo("HELLO")
sub = SubscribeTo("HELLO")
print "pub", pub, repr(pub), pub.input
pipeline(p, pub)
pipeline(sub, pr)
start(p, pr, sub)
time.sleep(1.0)
stop(p, pr, sub)
wait_for(p, pr, sub)
|
Python
| 0
|
@@ -416,17 +416,17 @@
)%0A%0Aprint
-
+(
%22pub%22, p
@@ -449,16 +449,17 @@
ub.input
+)
%0A%0Apipeli
|
78130574a38db3a79f7c669a62bf3b372db39967
|
add the capability to receive absolute and relative paths
|
msg/tools/uorb_rtps_classifier.py
|
msg/tools/uorb_rtps_classifier.py
|
#!/usr/bin/env python
################################################################################
#
# Copyright 2018 PX4 Pro Development Team. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
import sys
import os
import argparse
import yaml
class Classifier():
"""
Class to classify RTPS msgs as senders, receivers or to be ignored
"""
def __init__(self, yaml_file, msg_folder):
self.msg_id_map = self.parse_yaml_msg_id_file(yaml_file)
self.msg_folder = msg_folder
self.msgs_to_send = self.set_msgs_to_send()
self.msgs_to_receive = self.set_msgs_to_receive()
self.msgs_to_ignore = self.set_msgs_to_ignore()
self.msg_files_send = self.set_msg_files_send()
self.msg_files_receive = self.set_msg_files_receive()
self.msg_files_ignore = self.set_msg_files_ignore()
# setters (for class init)
def set_msgs_to_send(self):
send = {}
for dict in self.msg_id_map['rtps']:
if 'send' in dict.keys():
send.update({dict['msg']: dict['id']})
return send
def set_msgs_to_receive(self):
receive = {}
for dict in self.msg_id_map['rtps']:
if 'receive' in dict.keys():
receive.update({dict['msg']: dict['id']})
return receive
def set_msgs_to_ignore(self):
ignore = {}
for dict in self.msg_id_map['rtps']:
if ('send' not in dict.keys()) and ('receive' not in dict.keys()):
ignore.update({dict['msg']: dict['id']})
return ignore
def set_msg_files_send(self):
return [os.path.join(self.msg_folder, msg + ".msg")
for msg in self.msgs_to_send.keys()]
def set_msg_files_receive(self):
return [os.path.join(self.msg_folder, msg + ".msg")
for msg in self.msgs_to_receive.keys()]
def set_msg_files_ignore(self):
return [os.path.join(self.msg_folder, msg + ".msg")
for msg in self.msgs_to_ignore.keys()]
@staticmethod
def parse_yaml_msg_id_file(yaml_file):
"""
Parses a yaml file into a dict
"""
try:
with open(yaml_file, 'r') as f:
return yaml.load(f)
except OSError as e:
if e.errno == errno.ENOENT:
raise IOError(errno.ENOENT, os.strerror(
errno.ENOENT), yaml_file)
else:
raise
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--send", dest='send',
action="store_true", help="Get topics to be sended")
parser.add_argument("-r", "--receive", dest='receive',
action="store_true", help="Get topics to be received")
parser.add_argument("-i", "--ignore", dest='ignore',
action="store_true", help="Get topics to be ignored")
parser.add_argument("-p", "--path", dest='path',
action="store_true", help="Get msgs and its paths")
parser.add_argument("-m", "--topic-msg-dir", dest='msgdir', type=str,
help="Topics message dir, by default msg/", default="msg")
parser.add_argument("-y", "--rtps-ids-file", dest='yaml_file', type=str,
help="RTPS msg IDs definition file absolute path, by default use relative path to msg, tools/uorb_rtps_message_ids.yaml",
default='tools/uorb_rtps_message_ids.yaml')
# Parse arguments
args = parser.parse_args()
msg_folder = args.msgdir
if args.msgdir == 'msg':
msg_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if args.yaml_file != 'tools/uorb_rtps_message_ids.yaml':
classifier = Classifier(os.path.abspath(args.yaml_file), msg_folder)
else:
classifier = Classifier(os.path.join(
msg_folder, args.yaml_file), msg_folder)
if args.send:
if args.path:
print ('send files: ' + ', '.join(str(msg_file)
for msg_file in classifier.msgs_files_send) + '\n')
else:
print (', '.join(str(msg)
for msg in classifier.msgs_to_send.keys()) + '\n')
if args.receive:
if args.path:
print ('receive files: ' + ', '.join(str(msg_file)
for msg_file in classifier.msgs_files_receive) + '\n')
else:
print (', '.join(str(msg)
for msg in classifier.msgs_to_receive.keys()) + '\n')
if args.ignore:
if args.path:
print ('ignore files: ' + ', '.join(str(msg_file)
for msg_file in classifier.msgs_files_ignore) + '\n')
else:
print (', '.join(str(msg)
for msg in classifier.msgs_to_ignore.keys()) + '\n')
|
Python
| 0
|
@@ -5089,29 +5089,26 @@
()%0A%0A msg_
-folde
+di
r = args.msg
@@ -5148,29 +5148,26 @@
msg_
-folde
+di
r = os.path.
@@ -5226,69 +5226,61 @@
-if args.yaml_file != 'tools/uorb_rtps_message_ids.yaml':%0A
+else:%0A msg_dir = os.path.abspath(args.msgdir)%0A
@@ -5284,32 +5284,33 @@
classifier =
+(
Classifier(os.pa
@@ -5345,46 +5345,60 @@
msg_
-folder)%0A else:%0A classifier =
+dir) if os.path.isabs(args.yaml_file) %5C%0A else
Cla
@@ -5422,30 +5422,14 @@
oin(
-%0A msg_folde
+msg_di
r, a
@@ -5448,22 +5448,20 @@
e), msg_
-folder
+dir)
)%0A%0A i
|
33354b12b16e5fab8db7fd7fa1d0defdb6e65a4e
|
update query to remove prefix
|
examples/flask-echo/app.py
|
examples/flask-echo/app.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
import os
import sys
import requests, json
from argparse import ArgumentParser
from flask import Flask, request, abort
from linebot import (
LineBotApi, WebhookParser
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage, TemplateSendMessage, ImageSendMessage, ImagemapSendMessage,
ButtonsTemplate, ConfirmTemplate, CarouselTemplate, CarouselColumn,
TemplateAction, PostbackTemplateAction, MessageTemplateAction, URITemplateAction,
BaseSize, URIImagemapAction, MessageImagemapAction, ImagemapArea
)
app = Flask(__name__)
# get channel_secret and channel_access_token from your environment variable
channel_secret = os.getenv('LINE_CHANNEL_SECRET', None)
channel_access_token = os.getenv('LINE_CHANNEL_ACCESS_TOKEN', None)
if channel_secret is None:
print('Specify LINE_CHANNEL_SECRET as environment variable.')
sys.exit(1)
if channel_access_token is None:
print('Specify LINE_CHANNEL_ACCESS_TOKEN as environment variable.')
sys.exit(1)
line_bot_api = LineBotApi(channel_access_token)
parser = WebhookParser(channel_secret)
@app.route("/callback", methods=['POST'])
def callback():
signature = request.headers['X-Line-Signature']
# get request body as text
body = request.get_data(as_text=True)
#app.logger.info("Request body: " + body)
#app.logger.info("Signature: " + signature)
# parse webhook body
try:
events = parser.parse(body, signature)
except InvalidSignatureError:
abort(500)
# if event is MessageEvent and message is TextMessage, then check prefix
for event in events:
text_message = event.message.text
if not isinstance(event, MessageEvent):
continue
if not isinstance(event.message, TextMessage):
continue
# if prefix is @so, check StackOverflow
if text_message.lower().startswith('@so'):
sendMessage = queryStackOverflow(text_message)
# if prefix is @go, check
elif text_message.lower().startswith('@go'):
# do nothing first
sendMessage = None
else:
continue
a = TemplateSendMessage()
a = sendMessage
line_bot_api.reply_message(
event.reply_token, a
)
return 'OK'
def queryStackOverflow(query):
query = query[3:]
url = 'https://api.stackexchange.com/2.2/search/advanced?'
payload = {
'site': 'stackoverflow',
'views':'200',
'answers':'1',
'order':'desc',
'sort':'relevance',
'pagesize':'4',
'q':query,
'body':query
}
response = requests.get(url=url, params=payload)
data = response.json()
if data['has_more']:
columns2 = []
for index, item in enumerate(data['items']):
print str(index) + ':' + str(item)
temp = CarouselColumn(
thumbnail_image_url='https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-icon.png',
title=item['title'][:36] + '...',
text='Tags: ' + item['tags'][0] + ', '+ item['tags'][1] ,
actions=[
URITemplateAction(
label='Go to Article',
uri=item['link']
),
PostbackTemplateAction(
label='Useful',
text='Article ' + str(index) + ' is useful',
data='action=buy&itemid=1'
),
MessageTemplateAction(
label='Not useful',
text='Article ' + str(index) + ' is not useful'
)
]
)
columns2.append(temp)
carousel_template_message = TemplateSendMessage(
alt_text='Test',
template=CarouselTemplate(
columns=columns2
)
)
return carousel_template_message
else:
imagemap_message = ImagemapSendMessage(
base_url='https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-icon.png?v=c78bd457575a',
alt_text='This is an imagemap',
base_size=BaseSize(height=1040, width=1040),
actions=[
URIImagemapAction(
link_uri='http://www.theodora.com/maps/new9/time_zones_4.jpg',
area=ImagemapArea(
x=0, y=0, width=520, height=1040
)
),
MessageImagemapAction(
text='hello',
area=ImagemapArea(
x=520, y=0, width=520, height=1040
)
)
]
)
return imagemap_message
def sendText(text):
text_message = TextSendMessage(text=text)
def querySearchEngine(data, type):
index = 0
app.logger.info("type:" + type)
template = ImageSendMessage(
original_content_url='https://upload.wikimedia.org/wikipedia/commons/b/b4/JPEG_example_JPG_RIP_100.jpg',
preview_image_url='https://upload.wikimedia.org/wikipedia/commons/b/b4/JPEG_example_JPG_RIP_100.jpg'
)
return template
if __name__ == "__main__":
arg_parser = ArgumentParser(
usage='Usage: python ' + __file__ + ' [--port <port>] [--help]'
)
arg_parser.add_argument('-p', '--port', default=8000, help='port')
arg_parser.add_argument('-d', '--debug', default=False, help='debug')
options = arg_parser.parse_args()
app.run(debug=options.debug, host='0.0.0.0', port=int(options.port))
|
Python
| 0
|
@@ -3013,16 +3013,17 @@
y):%0A
+#
query =
@@ -4706,24 +4706,20 @@
-imagemap
+text
_message
@@ -4721,24 +4721,20 @@
ssage =
-Imagemap
+Text
SendMess
@@ -4741,736 +4741,52 @@
age(
-%0A base_url='https://cdn.sstatic.net/Sites/stackoverflow/company/img/logos/so/so-icon.png?v=c78bd457575a',%0A alt_text='This is an imagemap',%0A base_size=BaseSize(height=1040, width=1040),%0A actions=%5B%0A URIImagemapAction(%0A link_uri='http://www.theodora.com/maps/new9/time_zones_4.jpg',%0A area=ImagemapArea(%0A x=0, y=0, width=520, height=1040%0A )%0A ),%0A MessageImagemapAction(%0A text='hello',%0A area=ImagemapArea(%0A x=520, y=0, width=520, height=1040%0A )%0A )%0A %5D%0A
+text='Not found. Please try other keywords.'
)%0A
@@ -4802,16 +4802,12 @@
urn
-imagemap
+text
_mes
@@ -4829,75 +4829,8 @@
%0A
-def sendText(text):%0A text_message = TextSendMessage(text=text)%0A%0A
def
|
0b37c0f1cba1a6e89a63f9597d61383b81b1a2d9
|
Fix typo
|
haas/client/network.py
|
haas/client/network.py
|
import json
from haas.client.base import ClientBase
class Network(ClientBase):
"""Consists of calls to query and manipulate network related
objects and relations.
"""
def list(self):
"""Lists all projects under HIL """
url = self.object_url('networks')
return self.check_response(self.httpClient.request("GET", url))
def show(self, network):
"""Shows attributes of a network. """
url = self.object_url('network', network)
return self.check_response(self.httpClient.request("GET", url))
def create(self, network, owner, access, net_id):
"""Create a link-layer <network>.
See docs/networks.md for details.
"""
url = self.object_url('network', network)
payload = json.dumps({
'owner': owner, 'access': access,
'net_id': net_id
})
return self.check_response(
self.httpClient.request("PUT", url, data=payload)
)
def delete(self, network):
"""Delete a <network>. """
url = self.object_url('network', network)
return self.check_response(self.httpClient.request("DELETE", url))
def grant_access(self, project, network):
"""Grants <project> access to <network>. """
url = self.object_url(
'network', network, 'access', project
)
return self.check_response(self.httpClient.request("PUT", url))
def revoke_access(self, project, network):
"""Removes access of <network> from <project>. """
url = self.object_url(
'network', network, 'access', project
)
return self.check_response(self.httpClient.request("DELETE", url))
|
Python
| 0.999999
|
@@ -237,23 +237,23 @@
sts all
-project
+network
s under
|
19d8443eb3e1e45225e8dc2648b5d72d2c75548d
|
Fix underlining
|
clowder/model/clowder_yaml.py
|
clowder/model/clowder_yaml.py
|
"""clowder.yaml parsing and functionality"""
import os, subprocess
from termcolor import colored
import yaml
from clowder.utility.git_utilities import git_litter, git_validate_repo_state
from clowder.utility.print_utilities import print_group, print_project_status
from clowder.model.group import Group
from clowder.model.remote import Remote
class ClowderYAML(object):
"""Class encapsulating project information from clowder.yaml"""
def __init__(self, rootDirectory):
self.root_directory = rootDirectory
self.default_ref = None
self.default_remote = None
self.groups = []
self.remotes = []
self.load_yaml()
self.clowder_path = os.path.join(self.root_directory, 'clowder')
def load_yaml(self):
"""Load clowder from yaml file"""
yaml_file = os.path.join(self.root_directory, 'clowder.yaml')
if os.path.exists(yaml_file):
with open(yaml_file) as file:
parsed_yaml = yaml.safe_load(file)
self.default_ref = parsed_yaml['defaults']['ref']
self.default_remote = parsed_yaml['defaults']['remote']
for remote in parsed_yaml['remotes']:
self.remotes.append(Remote(remote))
defaults = {'ref': self.default_ref, 'remote': self.default_remote}
for group in parsed_yaml['groups']:
self.groups.append(Group(self.root_directory,
group,
defaults,
self.remotes))
self.groups.sort(key=lambda group: group.name)
def forall(self, command):
"""Runs command in all projects"""
for group in self.groups:
print_group(group.name)
for project in group.projects:
if os.path.isdir(project.full_path):
print_project_status(self.root_directory, project.path, project.name)
running_output = colored('Running command: ', attrs=['underline'])
command_output = colored(command, attrs=['bold'])
print(running_output + command_output)
subprocess.call(command.split(),
cwd=project.full_path)
print('')
def get_all_group_names(self):
"""Returns all group names for current clowder.yaml"""
names = []
for group in self.groups:
names.append(group['name'])
return names
def get_all_project_names(self):
"""Returns all project names for current clowder.yaml"""
names = []
for group in self.groups:
names.extend(group.get_all_project_names())
return names
def litter(self):
"""Discard changes for all projects"""
git_litter(self.clowder_path)
for group in self.groups:
for project in group.projects:
git_litter(project.full_path)
def herd_all(self):
"""Sync all projects with latest upstream changes"""
self.validate_all()
for group in self.groups:
print_group(group.name)
for project in group.projects:
print_project_status(self.root_directory, project.path, project.name)
project.herd()
def herd_version_all(self, version):
"""Sync all projects to fixed versions"""
self.validate_all()
for group in self.groups:
print_group(group.name)
for project in group.projects:
print_project_status(self.root_directory, project.path, project.name)
project.herd_version(version)
def status(self):
"""Print git status for all projects"""
print_project_status(self.root_directory, 'clowder', 'clowder')
print('')
for group in self.groups:
print_group(group.name)
for project in group.projects:
print_project_status(self.root_directory, project.path, project.name)
def fix_version(self, version):
"""Fix current commits to versioned clowder.yaml"""
self.validate_all()
versions_dir = os.path.join(self.root_directory, 'clowder/versions')
version_dir = os.path.join(versions_dir, version)
if not os.path.exists(version_dir):
os.makedirs(version_dir)
yaml_file = os.path.join(version_dir, 'clowder.yaml')
if not os.path.exists(yaml_file):
with open(yaml_file, 'w') as file:
yaml.dump(self.get_yaml(), file, default_flow_style=False)
def get_yaml(self):
"""Return python object representation for saving yaml"""
groups_yaml = []
for group in self.groups:
groups_yaml.append(group.get_yaml())
remotes_yaml = []
for remote in self.remotes:
remotes_yaml.append(remote.get_yaml())
defaults_yaml = {'ref': self.default_ref, 'remote': self.default_remote}
return {'defaults': defaults_yaml,
'remotes': remotes_yaml,
'groups': groups_yaml}
def get_fixed_version_names(self):
"""Return list of all fixed versions"""
versions_dir = os.path.join(self.root_directory, 'clowder/versions')
if os.path.exists(versions_dir):
return os.listdir(versions_dir)
return None
def validate_all(self):
"""Validate status of all projects"""
for group in self.groups:
for project in group.projects:
git_validate_repo_state(project.full_path)
|
Python
| 0.000419
|
@@ -2077,17 +2077,16 @@
command:
-
', attrs
@@ -2213,16 +2213,22 @@
output +
+ ' ' +
command
|
a655372275026e23327ef8f3bf3136779f9c74d1
|
fix test to account for last_refresh in site dicts
|
test/usgs_test.py
|
test/usgs_test.py
|
import datetime
import os
import isodate
import tables
import pytest
import pyhis
TEST_FILE_PATH = '/tmp/pyhis_test.h5'
def test_init():
_remove_test_file()
assert not os.path.exists(TEST_FILE_PATH)
pyhis.usgs.pytables.init_h5(TEST_FILE_PATH)
assert os.path.exists(TEST_FILE_PATH)
def test_parse_get_sites():
site_files = ['RI_daily.xml', 'RI_instantaneous.xml']
sites = {}
for site_file in site_files:
with open(site_file, 'r') as f:
sites.update(pyhis.usgs.core._parse_sites(f))
assert len(sites) == 63
return sites
def test_update_site_table():
assert _count_rows('/usgs/sites') == 0
sites = test_parse_get_sites()
pyhis.usgs.pytables._update_site_table(sites, TEST_FILE_PATH)
assert _count_rows('/usgs/sites') == 63
def test_pytables_get_sites():
sites = pyhis.usgs.pytables.get_sites(TEST_FILE_PATH)
assert len(sites) == 63
def test_pytables_get_site():
pyhis.usgs.pytables.get_sites(TEST_FILE_PATH)
site = pyhis.usgs.pytables.get_site('01115100', TEST_FILE_PATH)
assert len(site) == 11
def test_pytables_get_site_fallback_to_core():
site_code = '08068500'
sites = pyhis.usgs.pytables.get_sites(TEST_FILE_PATH)
assert site_code not in sites
site = pyhis.usgs.pytables.get_site(site_code, TEST_FILE_PATH)
assert len(site) == 10
def test_pytables_get_site_raises_lookup():
with pytest.raises(LookupError):
pyhis.usgs.pytables.get_site('98068500', TEST_FILE_PATH)
def test_update_or_append():
h5file = tables.openFile(TEST_FILE_PATH, mode="r+")
test_table = _create_test_table(h5file, 'update_or_append', pyhis.usgs.pytables.USGSValue)
where_filter = '(datetime == "%(datetime)s")'
initial_values = [
{'datetime': isodate.datetime_isoformat(datetime.datetime(2000, 1, 1) + \
datetime.timedelta(days=i)),
'value': 'initial',
'qualifiers': ''}
for i in range(1000)]
update_values = [
{'datetime': isodate.datetime_isoformat(datetime.datetime(2000, 1, 1) + \
datetime.timedelta(days=i)),
'value': 'updated',
'qualifiers': ''}
for i in [20, 30, 10, 999, 1000, 2000, 399]]
pyhis.usgs.pytables._update_or_append(test_table, initial_values, where_filter)
h5file.close()
assert _count_rows('/test/update_or_append') == 1000
h5file = tables.openFile(TEST_FILE_PATH, mode="r+")
test_table = h5file.getNode('/test/update_or_append')
pyhis.usgs.pytables._update_or_append(test_table, update_values, where_filter)
h5file.close()
assert _count_rows('/test/update_or_append') == 1002
def test_update_site_list():
test_init()
assert _count_rows('/usgs/sites') == 0
sites = test_parse_get_sites()
pyhis.usgs.pytables.update_site_list(state_code='RI', path=TEST_FILE_PATH)
assert _count_rows('/usgs/sites') == 63
def test_core_get_sites_by_state_code():
sites = pyhis.usgs.core.get_sites(state_code='RI')
assert len(sites) == 63
def test_core_get_sites_single_site():
sites = pyhis.usgs.core.get_sites(sites='08068500')
assert len(sites) == 1
def test_core_get_sites_multiple_sites():
sites = pyhis.usgs.core.get_sites(sites=['08068500', '08041500'])
assert len(sites) == 2
def _count_rows(path):
h5file = tables.openFile(TEST_FILE_PATH, mode="r")
table = h5file.getNode(path)
number_of_rows = len([1 for i in table.iterrows()])
h5file.close()
return number_of_rows
def _create_test_table(h5file, table_name, description):
test_table = h5file.createTable('/test', table_name, description,
createparents=True)
return test_table
def _remove_test_file():
if os.path.exists(TEST_FILE_PATH):
os.remove(TEST_FILE_PATH)
|
Python
| 0
|
@@ -1352,17 +1352,17 @@
te) == 1
-0
+1
%0A%0A%0Adef t
|
49da9cb5b9c39baf153d71047536a3c3a2789a24
|
Add user profile and authentication backend settings
|
Rynda/settings.py
|
Rynda/settings.py
|
# Django settings for Rynda project.
import os
SITE_ROOT = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Moscow'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'ru'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(SITE_ROOT, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'l(39jm())q%720m2@p%wz)bep@lrfb*7k+66%+epnwes!pcbh2'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'Rynda.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'Rynda.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(SITE_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'south',
'users',
'message',
'core',
'feed',
'api',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
LOCAL_SETTINGS
except NameError:
try:
from local_settings import *
except:
pass
|
Python
| 0
|
@@ -4732,16 +4732,106 @@
cs',%0A)%0A%0A
+AUTHENTICATION_BACKENDS = ('core.backends.IonAuth',)%0AAUTH_PROFILE_MODULE = 'users.users'%0A%0A
# A samp
|
56f6339401fe5f792915279d98f553f3415e2c62
|
Fix module docstring (#163)
|
netdisco/discoverables/harmony.py
|
netdisco/discoverables/harmony.py
|
"""Discover Netgear routers."""
from . import SSDPDiscoverable
class Discoverable(SSDPDiscoverable):
"""Add support for discovering Harmony Hub remotes"""
def get_entries(self):
"""Get all the Harmony uPnP entries."""
return self.find_by_device_description({
"manufacturer": "Logitech",
"deviceType": "urn:myharmony-com:device:harmony:1"
})
|
Python
| 0
|
@@ -9,22 +9,26 @@
ver
-Netgear rou
+Harmony Hub remo
te
-r
s.%22%22
|
d5e909f6f9158ee3d5546c55e10a772c8952adf4
|
Update bounty.py
|
common/bounty.py
|
common/bounty.py
|
import os, pickle, re
from common.safeprint import safeprint
from multiprocessing import Lock
bountyList = []
bountyLock = Lock()
class Bounty:
ip = ""
btc = ""
reward = 0
data = []
def __init__(self, ipAddress, btcAddress, rewardAmount, dataList=[]):
self.ip = ipAddress
self.btc = btcAddress
self.reward = rewardAmount
self.data = dataList
def isValid(self):
try:
safeprint("Testing IP address")
#is IP valid
b = int(self.ip.split(":")[1]) in range(1024,49152)
b = int(self.ip.split(":")[0].split(".")[0]) in range(0,256) and b
b = int(self.ip.split(":")[0].split(".")[1]) in range(0,256) and b
b = int(self.ip.split(":")[0].split(".")[2]) in range(0,256) and b
b = int(self.ip.split(":")[0].split(".")[3]) in range(0,256) and b
if not b:
return False
#ping IP
#is Bitcoin address valid
safeprint("Testing Bitcoin address")
address = str(self.btc)
#The following is a soft check
#A deeper check will need to be done in order to assure this is correct
if not re.match(re.compile("^[a-zA-Z1-9]{27,35}$"),address):
return False
#is reward valid
safeprint("Testing reward")
b = int(self.reward)
return (b >= 0)
except:
return False
def isPayable(self):
#check if address has enough
return False
def verify(string):
test = pickle.loads(string)
try:
safeprint("Testing IP address")
#is IP valid
b = int(test.ip.split(":")[1]) in range(1024,49152)
b = int(test.ip.split(":")[0].split(".")[0]) in range(0,256) and b
b = int(test.ip.split(":")[0].split(".")[1]) in range(0,256) and b
b = int(test.ip.split(":")[0].split(".")[2]) in range(0,256) and b
b = int(test.ip.split(":")[0].split(".")[3]) in range(0,256) and b
if not b:
return False
#ping IP
#is Bitcoin address valid
safeprint("Testing Bitcoin address")
address = str(test.btc)
#The following is a soft check
#A deeper check will need to be done in order to assure this is correct
if not re.match(re.compile("^[a-zA-Z1-9]{27,35}$"),address):
return False
#is reward valid
safeprint("Testing reward")
b = int(test.reward)
return (b >= 0)
except:
return False
def saveToFile():
if os.path.exists("bounties.pickle"):
pickle.dump(boutyList,"bounties.pickle")
return True
return False
def loadFromFile():
if os.path.exists("settings.conf"):
bountyList = pickle.load("bounties.pickle")
return True
return False
def loadBounties():
loadFromFile()
if len(bountyList) is 0:
requestBounties()
return len(bountyList) is not 0
def requestBounties(peerList):
for peer in peerList:
bountyList.extend(requestBounty(peer))
def requestBounty(peer):
safeprint("currently unsupported")
def sendBounty(peer):
safeprint("currently unsupported")
if len(bountyList) is 0:
loadBounties()
#send bounties
dumpBounties()
def addBounty(bounty):
a = False
if type(bounty) == type("aaa"):
bounty = boundy.encode('utf-8')
if type(bounty) == type("a".encode('utf-8')):
a = verify(bounty)
bounty = pickle.loads(bounty)
else:
c = pickle.dumps(bounty,1)
if type(c) == type("aaa"):
c = c.encode('utf-8')
a = verify(c)
b = bounty.isValid()
if a and b:
with bountyLock:
bountyList.append(bounty)
def getBounty(charity, factor):
for bounty in bountyList:
if best is None:
best = bounty
elif best.rewardAmount < bounty.rewardAmount and bounty.isValid() and (isPayable(factor) or charity):
best = bounty
return best
|
Python
| 0.000001
|
@@ -3086,17 +3086,17 @@
y = boun
-d
+t
y.encode
|
b5c5f9f0e97c9273c18936ea57ad866b4865fc68
|
Revert 47344b
|
install/build.py
|
install/build.py
|
import distutils
import os
import shutil
import subprocess
import tempfile
import setuptools
from install import utils
minimum_cuda_version = 6050
minimum_cudnn_version = 2000
def check_cuda_version(compiler, settings):
out = build_and_run(compiler, '''
#include <cuda.h>
#include <stdio.h>
int main(int argc, char* argv[]) {
printf("%d", CUDA_VERSION);
return 0;
}
''', include_dirs=settings['include_dirs'])
if out is None:
utils.print_warning('Cannot check CUDA version')
return False
cuda_version = int(out)
if cuda_version < minimum_cuda_version:
utils.print_warning(
'CUDA version is too old: %d' % cuda_version,
'CUDA v6.5 or newer is required')
return False
return True
def check_cudnn_version(compiler, settings):
out = build_and_run(compiler, '''
#include <cudnn.h>
#include <stdio.h>
int main(int argc, char* argv[]) {
printf("%d", CUDNN_VERSION);
return 0;
}
''', include_dirs=settings['include_dirs'])
if out is None:
utils.print_warning('Cannot check cuDNN version')
return False
cudnn_version = int(out)
if cudnn_version < minimum_cudnn_version:
utils.print_warning(
'cuDNN version is too old: %d' % cudnn_version,
'cuDNN v2 or newer is required')
return False
return True
def build_and_run(compiler, source, libraries=[],
include_dirs=[], library_dirs=[]):
temp_dir = tempfile.mkdtemp()
try:
fname = os.path.join(temp_dir, 'a.cpp')
with open(fname, 'w') as f:
f.write(source)
try:
objects = compiler.compile([fname], output_dir=temp_dir,
include_dirs=include_dirs)
except distutils.errors.CompileError:
return None
try:
compiler.link_executable(objects,
os.path.join(temp_dir, 'a'),
libraries=libraries,
library_dirs=library_dirs)
except (distutils.errors.LinkError, TypeError):
return None
try:
out = subprocess.check_output(os.path.join(temp_dir, 'a'))
return out
except Exception:
return None
finally:
shutil.rmtree(temp_dir, ignore_errors=True)
|
Python
| 0.000001
|
@@ -1899,32 +1899,104 @@
e%0A%0A try:%0A
+ postargs = %5B'/MANIFEST'%5D if sys.platform == 'win32' else %5B%5D%0A
comp
@@ -2207,24 +2207,86 @@
library_dirs
+,%0A extra_postargs=postargs
)%0A ex
|
9e9bc5223c49bc51dbfd266f5dc89f2874016622
|
install rpm macros if RPM bin in system. Closes #56
|
install_meson.py
|
install_meson.py
|
#!/usr/bin/env python3
# Copyright 2013-2014 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script installs Meson. We can't use Meson to install itself
# because of the bootstrap problem. We can't use any other build system
# either becaust that would be just silly.
import os, sys, glob, shutil, gzip
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--prefix', default='/usr/local', dest='prefix',
help='the installation prefix (default: %(default)s)')
parser.add_argument('--destdir', default='', dest='destdir',
help='the destdir (default: %(default)s)')
options = parser.parse_args()
if options.prefix[0] != '/':
print('Error, prefix must be an absolute path.')
sys.exit(1)
if options.destdir == '':
install_root = options.prefix
else:
install_root = os.path.join(options.destdir, options.prefix[1:])
script_dir = os.path.join(install_root, 'share/meson')
module_dir = os.path.join(script_dir, 'modules')
bin_dir = os.path.join(install_root, 'bin')
bin_script = os.path.join(script_dir, 'meson.py')
gui_script = os.path.join(script_dir, 'mesongui.py')
conf_script = os.path.join(script_dir, 'mesonconf.py')
bin_name = os.path.join(bin_dir, 'meson')
gui_name = os.path.join(bin_dir, 'mesongui')
conf_name = os.path.join(bin_dir, 'mesonconf')
man_dir = os.path.join(install_root, 'share/man/man1')
in_manfile = 'man/meson.1'
out_manfile = os.path.join(man_dir, 'meson.1.gz')
in_guimanfile = 'man/mesongui.1'
out_guimanfile = os.path.join(man_dir, 'mesongui.1.gz')
in_confmanfile = 'man/mesonconf.1'
out_confmanfile = os.path.join(man_dir, 'mesonconf.1.gz')
rpmmacros_dir = os.path.join(install_root, 'lib/rpm/macros.d')
symlink_value = os.path.relpath(bin_script, os.path.dirname(bin_name))
guisymlink_value = os.path.relpath(gui_script, os.path.dirname(gui_name))
confsymlink_value = os.path.relpath(conf_script, os.path.dirname(conf_name))
files = glob.glob('*.py')
files += glob.glob('*.ui')
noinstall = ['compile_meson.py', 'install_meson.py', 'run_tests.py', 'run_cross_test.py']
files = [x for x in files if x not in noinstall]
os.makedirs(script_dir, exist_ok=True)
os.makedirs(bin_dir, exist_ok=True)
os.makedirs(man_dir, exist_ok=True)
for f in files:
print('Installing %s to %s.' %(f, script_dir))
outfilename = os.path.join(script_dir, f)
shutil.copyfile(f, outfilename)
shutil.copystat(f, outfilename)
try:
os.remove(bin_name)
except OSError:
pass
print('Creating symlinks %s and %s.' % (bin_name, gui_name))
try:
os.unlink(bin_name)
except FileNotFoundError:
pass
try:
os.unlink(gui_name)
except FileNotFoundError:
pass
try:
os.unlink(conf_name)
except FileNotFoundError:
pass
os.symlink(symlink_value, bin_name)
os.symlink(guisymlink_value, gui_name)
os.symlink(confsymlink_value, conf_name)
print('Installing manfiles to %s.' % man_dir)
open(out_manfile, 'wb').write(gzip.compress(open(in_manfile, 'rb').read()))
open(out_confmanfile, 'wb').write(gzip.compress(open(in_confmanfile, 'rb').read()))
open(out_guimanfile, 'wb').write(gzip.compress(open(in_guimanfile, 'rb').read()))
print('Installing modules to %s.' % module_dir)
if os.path.exists('modules/__pycache__'):
shutil.rmtree('modules/__pycache__')
if os.path.exists(module_dir):
shutil.rmtree(module_dir)
shutil.copytree('modules', module_dir)
if os.path.exists(os.path.join('/usr', rpmmacros_dir)):
print('Installing RPM macros to %s.' % rpmmacros_dir)
outfilename = os.path.join(rpmmacros_dir, 'macros.meson')
os.makedirs(rpmmacros_dir, exist_ok=True)
shutil.copyfile('macros.meson', outfilename)
shutil.copystat('macros.meson', outfilename)
|
Python
| 0
|
@@ -3910,43 +3910,22 @@
sts(
-os.path.join('/usr', rpmmacros_dir)
+'/usr/bin/rpm'
):%0A
|
d3425693d245c9dfa5350017903fc02a11ecd881
|
use width/height as percent base for x/y
|
compiler/lang.py
|
compiler/lang.py
|
import re
def value_is_trivial(value):
if value is None or not isinstance(value, str):
return False
if value[0] == '(' and value[-1] == ')':
value = value[1:-1]
if value == 'true' or value == 'false':
return True
try:
float(value)
return True
except:
pass
if value[0] == '"' and value[-1] == '"':
if value.count('"') == value.count('\\"') + 2:
return True
#print "?trivial", value
return False
class DocumentationString(object):
def __init__(self, text):
self.text = text
class Entity(object):
def __init__(self):
self.doc = None
class Component(Entity):
def __init__(self, name, children):
super(Component, self).__init__()
self.name = name
self.children = children
class Property(Entity):
def __init__(self, type, name, value = None):
super(Property, self).__init__()
self.type = type
self.name = name
self.value = value
def is_trivial(self):
return value_is_trivial(self.value)
class AliasProperty(Entity):
def __init__(self, name, target):
super(AliasProperty, self).__init__()
self.name = name
self.target = target
class EnumProperty(Entity):
def __init__(self, name, values, default):
super(EnumProperty, self).__init__()
self.name = name
self.values = values
self.default = default
class Constructor(Entity):
def __init__(self, args, code):
super(Constructor, self).__init__()
if len(args) != 0:
raise Exception("no arguments for constructor allowed")
self.code = code
class Method(Entity):
def __init__(self, name, args, code, event):
super(Method, self).__init__()
self.name = name
self.args = args
self.code = code
self.event = event
class IdAssignment(Entity):
def __init__(self, name):
super(IdAssignment, self).__init__()
self.name = name
class Assignment(Entity):
re_name = re.compile('<property-name>')
def __init__(self, target, value):
super(Assignment, self).__init__()
self.target = target
def replace_name(m):
dot = target.rfind('.')
name = target.substr(dot + 1) if dot >= 0 else target
return name
self.value = Assignment.re_name.sub(replace_name, value) if isinstance(value, str) else value
def is_trivial(self):
return value_is_trivial(self.value)
class AssignmentScope(Entity):
def __init__(self, target, values):
super(AssignmentScope, self).__init__()
self.target = target
self.values = values
class Behavior(Entity):
def __init__(self, target, animation):
super(Behavior, self).__init__()
self.target = target
self.animation = animation
class Signal(Entity):
def __init__(self, name):
super(Signal, self).__init__()
self.name = name
class ListElement(Entity):
def __init__(self, data):
super(ListElement, self).__init__()
self.data = data
|
Python
| 0.000003
|
@@ -1918,32 +1918,8 @@
et%0A%0A
-%09%09def replace_name(m):%0A%09
%09%09do
@@ -1942,17 +1942,25 @@
('.')%0A%09%09
-%09
+property_
name = t
@@ -1968,24 +1968,18 @@
rget
-.substr(
+%5B
dot + 1
-)
+:%5D
if
@@ -2005,20 +2005,116 @@
t%0A%09%09
-%09return name
+if property_name == 'x':%0A%09%09%09property_name = 'width'%0A%09%09elif property_name == 'y':%0A%09%09%09property_name = 'height'
%0A%0A%09%09
@@ -2153,15 +2153,16 @@
sub(
-replace
+property
_nam
|
08d11dc308db007750fe06ea906264a6ab9f44cd
|
Add logging when cloning repository
|
instance/repo.py
|
instance/repo.py
|
# -*- coding: utf-8 -*-
#
# OpenCraft -- tools to aid developing and hosting free software projects
# Copyright (C) 2015 OpenCraft <xavier@opencraft.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Git repository - Helper functions
"""
# Imports #####################################################################
import git
import tempfile
import shutil
from contextlib import contextmanager
# Functions ###################################################################
@contextmanager
def open_repository(repo_url, ref='master'):
"""
Get a `Git` object for a repository URL and switch it to the branch `ref`
Note that this clones the repository locally
"""
repo_dir_path = tempfile.mkdtemp()
git.repo.base.Repo.clone_from(repo_url, repo_dir_path)
g = git.Git(repo_dir_path)
g.checkout(ref)
yield g
shutil.rmtree(repo_dir_path)
|
Python
| 0.000001
|
@@ -1012,16 +1012,151 @@
nager%0A%0A%0A
+# Logging #####################################################################%0A%0Aimport logging%0Alogger = logging.getLogger(__name__)%0A%0A%0A
# Functi
@@ -1472,16 +1472,106 @@
dtemp()%0A
+ logger.info('Cloning repository %25s (ref=%25s) in %25s...', repo_url, ref, repo_dir_path)%0A%0A
git.
|
a96d22974a160edcceda657522ffab8e61b18dd8
|
Fix for older python
|
chainer/functions/noise/dropconnect.py
|
chainer/functions/noise/dropconnect.py
|
import numpy
import chainer
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _as_mat(x):
if x.ndim == 2:
return x
return x.reshape(len(x), -1)
class Dropconnect(function.Function):
"""Linear unit regularized by dropconnect."""
def __init__(self, ratio, mask=None):
self.ratio = ratio
self.mask = mask
def check_type_forward(self, in_types):
n_in = in_types.size()
type_check.expect(2 <= n_in, n_in <= 3)
x_type, w_type = in_types[:2]
type_check.expect(
x_type.dtype.kind == 'f',
w_type.dtype.kind == 'f',
x_type.ndim >= 2,
w_type.ndim == 2,
type_check.prod(x_type.shape[1:]) == w_type.shape[1],
)
if n_in.eval() == 3:
b_type = in_types[2]
type_check.expect(
b_type.dtype == x_type.dtype,
b_type.ndim == 1,
b_type.shape[0] == w_type.shape[0],
)
def forward(self, inputs):
scale = inputs[1].dtype.type(1. / (1 - self.ratio))
xp = cuda.get_array_module(*inputs)
mask_shape = (inputs[0].shape[0], *inputs[1].shape)
if self.mask is None:
if xp == numpy:
self.mask = xp.random.rand(*mask_shape) >= self.ratio
else:
self.mask = xp.random.rand(*mask_shape,
dtype=numpy.float32) >= self.ratio
elif isinstance(self.mask, chainer.Variable):
self.mask = self.mask.data
x = _as_mat(inputs[0])
W = inputs[1] * scale * self.mask
# ijk,ik->ij
y = xp.matmul(W, x[:, :, None])
y = y.reshape(y.shape[0], y.shape[1]).astype(x.dtype, copy=False)
if len(inputs) == 3:
b = inputs[2]
y += b
return y,
def backward(self, inputs, grad_outputs):
scale = inputs[1].dtype.type(1. / (1 - self.ratio))
x = _as_mat(inputs[0])
W = inputs[1] * scale * self.mask
gy = grad_outputs[0]
xp = cuda.get_array_module(*inputs)
# ij,ijk->ik
gx = xp.matmul(gy[:, None, :], W).reshape(inputs[0].shape)
gx = gx.astype(x.dtype, copy=False)
# ij,ik,ijk->jk
gW = (gy[:, :, None] * x[:, None, :] * self.mask).sum(0) * scale
gW = gW.astype(W.dtype, copy=False)
if len(inputs) == 3:
gb = gy.sum(0)
return gx, gW, gb
else:
return gx, gW
def dropconnect(x, W, b=None, ratio=.5, train=True, mask=None):
"""Linear unit regularized by dropconnect.
Dropconnect drops weight matrix elements randomly with probability
``ratio`` and scales the remaining elements by factor ``1 / (1 - ratio)``.
It accepts two or three arguments: an input minibatch ``x``, a weight
matrix ``W``, and optionally a bias vector ``b``. It computes
:math:`Y = xW^\\top + b`.
In testing mode, zero will be used as dropconnect ratio instead of
``ratio``.
Notice:
This implementation cannot be used for reproduction of the paper.
There is a differences between the current implementation and the
original version dropconnect.
The original version uses sampling with gaussian distribution before
passing activation function, the current implementation averages
before activation.
Args:
x (chainer.Variable or :class:`numpy.ndarray` or cupy.ndarray):
Input variable. Its first dimension is assumed
to be the *minibatch dimension*. The other dimensions are treated
as concatenated one dimension whose size must be ``N``.
W (~chainer.Variable): Weight variable of shape ``(M, N)``.
b (~chainer.Variable): Bias variable (optional) of shape ``(M,)``.
ratio (float):
Dropconnect ratio.
If ``mask`` is not ``None``, this value is ignored.
train (bool):
If ``True``, executes dropconnect.
Otherwise, dropconnect function works as a linear function.
mask (chainer.Variable or :class:`numpy.ndarray` or cupy.ndarray):
If ``None``, randomized dropconnect mask is generated.
If not ``None``, this value is used as a dropconnect mask.
The mask shape must be ``(M, N)``.
Main purpose of the latter option is debugging.
Returns:
~chainer.Variable: Output variable.
.. seealso:: :class:`~chainer.links.Dropconnect`
"""
if not train:
ratio = 0
if b is None:
return Dropconnect(ratio, mask)(x, W)
else:
return Dropconnect(ratio, mask)(x, W, b)
|
Python
| 0.000006
|
@@ -1206,17 +1206,16 @@
ape%5B0%5D,
-*
inputs%5B1
@@ -1217,24 +1217,69 @@
uts%5B1%5D.shape
+%5B0%5D,%0A inputs%5B1%5D.shape%5B1%5D
)%0A if
@@ -3961,72 +3961,8 @@
io.%0A
- If %60%60mask%60%60 is not %60%60None%60%60, this value is ignored.%0A
|
a2a4a8e4636051fa84a5cfbaf7f4ff796c59171a
|
Add build.parent to api response
|
changes/api/serializer/models/build.py
|
changes/api/serializer/models/build.py
|
from changes.api.serializer import Serializer, register
from changes.constants import Result, Status
from changes.models.build import Build
@register(Build)
class BuildSerializer(Serializer):
def serialize(self, instance):
# TODO(dcramer): this shouldnt be calculated at runtime
last_5_builds = list(Build.query.filter_by(
result=Result.passed,
status=Status.finished,
project=instance.project,
).order_by(Build.date_finished.desc())[:3])
if last_5_builds:
avg_build_time = sum(
b.duration for b in last_5_builds
if b.duration
) / len(last_5_builds)
else:
avg_build_time = None
data = instance.data or {}
backend_details = data.get('backend')
if backend_details:
external = {
'link': backend_details['uri'],
'label': backend_details['label'],
}
else:
external = None
return {
'id': instance.id.hex,
'name': instance.label,
'result': instance.result,
'status': instance.status,
'project': instance.project,
'cause': instance.cause,
'author': instance.author,
'parent_revision': {
'sha': instance.parent_revision_sha,
},
'message': instance.message,
'duration': instance.duration,
'estimatedDuration': avg_build_time,
'link': '/builds/%s/' % (instance.id.hex,),
'external': external,
'dateCreated': instance.date_created.isoformat(),
'dateModified': instance.date_modified.isoformat() if instance.date_modified else None,
'dateStarted': instance.date_started.isoformat() if instance.date_started else None,
'dateFinished': instance.date_finished.isoformat() if instance.date_finished else None,
}
|
Python
| 0.000001
|
@@ -1005,32 +1005,254 @@
xternal = None%0A%0A
+ if instance.parent_id:%0A parent = %7B%0A 'id': instance.parent_id.hex,%0A 'link': '/builds/%25s/' %25 (instance.parent_id.hex,),%0A %7D%0A else:%0A parent = None%0A%0A
return %7B
@@ -1619,16 +1619,46 @@
%7D,%0A
+ 'parent': parent,%0A
|
5a0edac271645f17e6989020aac77e65f29f0749
|
Fix typo.
|
webpy_helpers.py
|
webpy_helpers.py
|
'''
web.py Custom HTTP Error Response Helpers
Helper classes extending web.py's HTTPError class to set custom error
response bodies and headers, including JSON bodies and headers appropriate
for use in JSON based APIs.
'''
import web
try:
import simplejson as json
except ImportError:
import json
#### Util function and metaclass to conver HTML error classes to JSON
def _make_json_init(init_f):
"""Wrap an error class __init__ function and prep to return JSON, not HTML."""
def __init__(
self,
message=None,
headers={"Content-type":"application/json"}
):
if not message:
message = {"message":self.status[1]}
if isinstance(message, basestring):
message = {"message":unicode(message)}
init_f(self, json.dumps(message), headers)
return __init__
class _JsonHttpErrorMeta(type):
'''Metaclass to create error subclass with JSON response body from HTTPError subclass.'''
def __new__(self, name, bases, dir):
overrides = {
'__init__': _make_json_init(bases[0].__init__),
'__doc__': "%s with JSON response body" % bases[0].__doc__,
}
# override __init__ and __doc__ of base class, but only if not explicitly set in subclass
overrides.update(dir)
return type.__new__(self, name, bases, overrides)
#### API
class HTTPError(web.HTTPError):
"""Base class for customized error message bodies"""
message = None
def __init__(self,message=None,headers={"Content-type":"text/html"}):
if message == None:
message = self.message if self.message else self.status[1]
web.HTTPError.__init__(self, " ".join(self.status), headers, message)
class BadRequest(HTTPError):
'''Allow customized messages on 400 errors'''
status = "400", "Bad Request"
class JsonBadRequest(BadRequest):
__metaclass__ = _JsonHttpErrorMeta
class Unauthorized(HTTPError):
'''Allow customized messages on 401 errors'''
status = "401", "Unauthorized"
class JsonUnauthorized(Unauthorized):
__metaclass__ = _JsonHttpErrorMeta
class Forbidden(HTTPError):
'''Allow customized messages on 403 errors'''
status = "403", "Forbidden"
class JsonForbidden(Forbidden):
__metaclass__ = _JsonHttpErrorMeta
class NotFound(HTTPError):
'''Allow customized messages on 404 errors'''
status = "404", "Not Found"
class JsonNotFound(NotFound):
__metaclass__ = _JsonHttpErrorMeta
class MethodNotAllowed(HTTPError):
'''Allow customized messages on 405 errors'''
status = "405", "Method Not Allowed"
class JsonMethodNotAllowed(MethodNotAllowed):
__metaclass__ = _JsonHttpErrorMeta
# for web.py compat
NoMethod = MethodNotAllowed
JsonNoMethod = JsonMethodNotAllowed
class Gone(HTTPError):
'''Allow customized messages on 410 errors'''
status = "410", "Gone"
class JsonGone(Gone):
__metaclass__ = _JsonHttpErrorMeta
class Conflict(HTTPError):
'''Allow customized messages on 409 errors'''
status = "409", "Conflict"
class JsonConflict(Conflict):
__metaclass__ = _JsonHttpErrorMeta
class UnsupportedMediaType(HTTPError):
'''Allow customized messages on 415 errors'''
status = "415", "Unsupported Media Type"
class JsonUnsupportedMediaType(UnsupportedMediaType):
__metaclass__ = _JsonHttpErrorMeta
class RequestedRangeNotSatisfiable(HTTPError):
'''Allow customized messages on 415 errors'''
status = "416", "Requested Range Not Satisfiable"
class JsonRequestedRangeNotSatisfiable(RequestedRangeNotSatisfiable):
__metaclass__ = _JsonHttpErrorMeta
class InternalServerError(HTTPError):
'''Allow customized messages on 500 errors'''
status = "500", "Internal Server Error"
class JsonInternalServerError(InternalServerError):
__metaclass__ = _JsonHttpErrorMeta
# web.py compat
InternalError = InternalServerError
JsonInternalError = JsonInternalServerError
|
Python
| 0.001604
|
@@ -339,16 +339,17 @@
o conver
+t
HTML er
|
74cf1930f5c5df03f7d30657eb41ae0c2fc74f8e
|
Change Indent level
|
SentenceParser.py
|
SentenceParser.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import pandas as pd
import os.path
import logging
from sklearn.feature_extraction.text import CountVectorizer
from nltk.corpus import stopwords
from bs4 import BeautifulSoup
import sys
default_stdout = sys.stdout
default_stderr = sys.stderr
reload(sys)
sys.setdefaultencoding('utf-8')
sys.stdout = default_stdout
sys.stderr = default_stderr
logger = logging.getLogger(__name__)
def printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█'):
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
sys.stdout.write('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix))
sys.stdout.flush()
class SentenceParser:
regex_str = [
r'<[^>]+>', # HTML tags
r'(?:@[\w_]+)', # @-mentions
r"(?:\#+[\w_]+[\w\'_\-]*[\w_]+)", # hash-tags
r'http[s]?://(?:[a-z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-f][0-9a-f]))+', # URLs
r'(?:(?:\d+,?)+(?:\.?\d+)?)', # numbers
r"(?:[a-z][a-z'\-_]+[a-z])", # words with - and '
r'(?:[\w_]+)', # other words
r'(?:\S)' # anything else
]
def __init__(self, loggingLevel = 20):
self.data = None
logging.basicConfig(level=loggingLevel)
pass
def readfile(self, filepath, filetype, encod ='ISO-8859-1', header =None):
logger.info('Start reading File')
if not os.path.isfile(filepath):
logger.error("File Not Exist!")
sys.exit()
if filetype == 'csv':
df = pd.read_csv(filepath, encoding=encod, header =header)
elif filetype == 'json':
df = pd.read_json(filepath, encoding=encod, lines=True)
elif filetype == 'xlsx':
df = pd.read_excel(filepath, encoding=encod, header =header)
else:
logger.error("Extension Type not Accepted!")
sys.exit()
logger.debug(df)
self.data = df
def importdata(self, data):
logger.info('Import DataFrame')
if isinstance(data, pd.core.frame.DataFrame):
self.data = data
else:
logger.error("Data Type not Accepted! Please use pandas.core.frame.DataFrame")
sys.exit()
def dfmerge(self, columns, name):
logger.info('Merge headers %s to %s', str(columns), name)
self.data[name] = ''
for header in columns:
self.data[name] += ' ' + self.data[header]
def splitbycolumn(self,column, reset_index = False):
logger.info("Start Spliting data through the column values")
mylist = self.data[column].unique()
print "Unique Values: " + str(mylist)
result = {}
printProgressBar(0, mylist.shape[0], prefix='Progress:', suffix='Complete', length=50)
idx =0
for row in mylist:
if reset_index:
result[row] = self.data.loc[self.data[column] == row].reset_index(drop = True)
else:
result[row] = self.data.loc[self.data[column] == row]
printProgressBar(idx+1, mylist.shape[0], prefix='Progress:', suffix='Complete', length=50)
idx += 1
return result
def get_all_headers(self):
return list(self.data.columns.values)
def get_column(self,column):
return self.data[column].values.tolist()
def processtext(self, column, removeSymbol = True, remove_stopwords=False):
logger.info("Start Data Cleaning...")
self.data[column] = self.data[column].str.replace(r'[\n\r\t]+', ' ')
# Remove URLs
self.data[column] = self.data[column].str.replace(self.regex_str[3],' ')
tempcol = self.data[column].values.tolist()
stops = set(stopwords.words("english"))
# This part takes a lot of times
printProgressBar(0, len(tempcol), prefix='Progress:', suffix='Complete', length=50)
for i in range(len(tempcol)):
row = BeautifulSoup(tempcol[i],'html.parser').get_text()
if removeSymbol:
row = re.sub('[^a-zA-Z0-9]', ' ', row)
words = row.split()
if remove_stopwords:
words = [w for w in words if not w in stops and not w.replace('.', '', 1).isdigit()]
row = ' '.join(words)
tempcol[i] = row.lower()
printProgressBar(i+1, len(tempcol), prefix='Progress:', suffix='Complete', length=50)
print "\n"
return tempcol
def create_vectorizer(self, text, max_features = 1000):
logger.info("Creating Counting Vectorizer...")
self.vectorizer = CountVectorizer(analyzer = "word",
tokenizer = None,
preprocessor = None,
stop_words = None,
max_features = max_features)
data_vector = self.vectorizer.fit_transform(text)
data_vector = data_vector.toarray()
vocab = self.vectorizer.get_feature_names()
self.data_df = pd.DataFrame(data=data_vector, columns=vocab)
return self.data_df
def get_top(self):
return self.data_df.sum().sort_values(ascending=False)
if __name__ == '__main__':
SP = SentenceParser(10)
SP.readfile('../NVIDIA_TEMP/dataset/nvbugs.json','json')
SP.importdata(SP.data)
SP.dfmerge(['Module','Description','Synopsis'],'X')
# print SP.processtext('X', True, False)[0]
text = SP.processtext('X', True, True)
print SP.create_vectorizer(text)
print SP.get_top()[0:20]
print SP.splitbycolumn('Module').values()[0]
|
Python
| 0.000002
|
@@ -3028,17 +3028,20 @@
-%09
+
if reset
@@ -3053,33 +3053,36 @@
x:%0D%0A
-%09
+
result%5Brow%5D = se
@@ -3180,9 +3180,12 @@
+
-%09
+
resu
@@ -3331,32 +3331,106 @@
e', length=50)%0D%0A
+ print %22%5CnThe Shape of %22+ row + %22 is %22+str(result%5Brow%5D.shape)%0D%0A
idx
|
39154c1c5dcb192079060083ecc0a97e776cee3d
|
Fix bug in redirect
|
website/views.py
|
website/views.py
|
import bcrypt
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AuthenticationForm, UserCreationForm
from django.contrib.auth.models import User
from django.shortcuts import render, redirect
from website.models import UndergroundComptoir
from .forms import AnonymousForm, RegisteredForm
def index(req):
# Homepage of the website
# If a form was posted
if "form_type" in req.POST.keys():
username = req.POST.get("username")
# Anonymous registration
if req.POST["form_type"] == "anonymous":
password = getattr(settings, "ANONYMOUS_PASSWORD", None)
# If username does not exist,
# create it with default anonymous password
user, created = User.objects.get_or_create(username=username)
if created:
print("user created")
user.set_password(password)
user.save()
elif req.POST["form_type"] == "registered":
password = req.POST.get("password")
print(username, password)
user = authenticate(username=username, password=password)
if user is not None and user.is_authenticated():
login(req, user)
return render(req, "website/index.html", {
'anonymousForm': AnonymousForm(),
'registeredForm': RegisteredForm(),
})
@login_required(login_url="index")
def underground_comptoir(req, label):
# First, key provided or not?
if "key" in req.POST.keys():
key = req.POST["key"]
elif "key" in req.GET.keys():
key = req.GET["key"]
else:
key = None
# First, see if comptoir exists
try:
comptoir = UndergroundComptoir.objects.get(label=label)
# Get fingerprint from the key
if key is not None:
salt = comptoir.keyprint
keyprint = bcrypt.hashpw(key.encode("utf-8"), salt.encode("utf-8"))
else:
keyprint = None
# Check the keyprint
if keyprint != comptoir.keyprint:
# TODO error message
# TODO handle error correctly
# (redirect to home is NOT a reliable way to do)
return redirect("index")
# If not
except ObjectDoesNotExist:
# We create it
if key is None:
keyprint = None
else:
keyprint = bcrypt(key.encode("utf-8"), bcrypt.gensalt()),
comptoir = UndergroundComptoir(
label=label,
keyprint=keyprint,
)
comptoir.save()
# We want to show the last 50 messages, ordered most-recent-last
messages = reversed(comptoir.messages.order_by('-timestamp')[:50])
return render(req, "website/comptoir.html", {
'comptoir': comptoir,
'messages': messages,
})
# Authentication views
def register(req):
reg_form = UserCreationForm(req.POST or None, label_suffix='')
# Check the form validity
if reg_form.is_valid():
# Register the new user
new_user = reg_form.save()
# Authenticate
new_user = authenticate(username=new_user.username, password=req.POST["password1"])
# Log the new user in
login(req, new_user)
else:
# TODO better
print(reg_form.errors)
return redirect("index")
|
Python
| 0
|
@@ -1365,24 +1365,146 @@
(req, user)%0A
+ next_page = req.POST.get(%22next%22)%0A if next_page is not None and next_page != %22%22:%0A return redirect(next_page)%0A
return r
|
540dedd3f64687aeedd57280408e452da4034ace
|
remove unused import.
|
website/views.py
|
website/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from django.views.generic import View
from django.db import transaction
from django.contrib import messages
from dash.models import Author
from rest.models import RemoteNode
from .forms import ProfileForm
from .forms import UserRegisterForm
import requests
import json
from requests.auth import HTTPBasicAuth
import uuid
# Create your views here.
@login_required(login_url="login/")
def home(request):
return render(request, "home.html")
def register_success(request):
return render(request, "register_success.html")
@login_required(login_url="login/")
def friend_requests(request):
if (request.method == 'GET'):
return render(request, 'requests.html')
return HttpResponse(status=405)
@login_required(login_url="login/")
def friends(request):
if (request.method == 'GET'):
return render(request, 'friends.html')
return HttpResponse(status=405)
class UserRegisterForm(View):
form_class = UserRegisterForm
template_name = 'register.html'
def get(self, request):
form = self.form_class(None)
return render(request, self.template_name, {'form': form})
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = User()
user.username = form.cleaned_data['username']
user.set_password(form.cleaned_data['password'])
user.is_active = False # Need admin to activate
user.save()
author = Author()
author.user = user
author.host = 'http://' + request.get_host()
# The id is the objects URI
author.id = 'http://' + request.get_host() + '/author/' +\
uuid.uuid4().hex
# URL is the same as the id -- So says the Hindle
author.url = author.id
author.save()
return render(request, "register_success.html")
return render(request, self.template_name, {'form': form})
@login_required(login_url="login/")
@transaction.atomic
def update_profile(request):
if request.method == 'POST':
profile_form = ProfileForm(request.POST, instance=request.user.author)
if profile_form.is_valid():
profile_form.save()
# TODO: send some verification message
# TODO: should have an else: send some failure message. possibly
# not needed.
else:
profile_form = ProfileForm(instance=request.user.author)
return render(request, 'profile.html', {
'profile_form': profile_form
})
@login_required(login_url="login/")
def view_profile(request, id):
if (request.method == 'GET'):
host = request.GET.get('host', '')
if (host != 'https://cmput404t06.herokuapp.com/dash/'):
nodes = RemoteNode.objects.all()
json_profile = {}
for node in nodes:
if (node.url == host):
if (node.useauth):
response = requests.get(node.url + "author/" + str(id), auth=HTTPBasicAuth(node.username, node.password))
else:
response = requests.get(node.url + "author/", auth=HTTPBasicAuth(node.username, node.password))
json_profile = json.loads(response.content)
break
user = request.user
request_id = user.author.id
if not json_profile:
return HttpResponse(status=404)
else:
json_profile['url'] = json_profile['host'] + 'author/' + str(json_profile['id'])
return render(request, 'author.html', {'author': json_profile, 'user_id': request_id, 'request_user': user, 'profile_user': json_profile})
else:
author = Author.objects.get(id=id)
display = Author.objects.get(id=id)
user = request.user
request_id = user.author.id
author.url = author.host + 'author/' + str(author.id)
return render(request, 'author.html', {'author':author, 'user_id':request_id,'request_user':user, 'profile_user':display })
return HttpResponse(status=405)
|
Python
| 0
|
@@ -295,44 +295,8 @@
tion
-%0Afrom django.contrib import messages
%0A%0Afr
|
469fc6ff805e845ac922a7334612f67f194eb93b
|
Fix flake8 errors in late_command script
|
deployment/puppet/cobbler/templates/scripts/late_command.py
|
deployment/puppet/cobbler/templates/scripts/late_command.py
|
#!/usr/bin/python
#
# Copyright (C) 2011 Mirantis Inc.
#
# Authors: Vladimir Kozhukalov <vkozhukalov@mirantis.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# flake8: noqa
from base64 import b64encode
from cStringIO import StringIO
from gzip import GzipFile
import commands, os
TEMPLATE_FILE = (
"sh -c 'filename=${1}; shift; echo ${0} | base64 --decode | "
"gunzip -c > ${filename} && chmod %(mode)s ${filename}' "
"%(content64)s %(destfile)s"
)
TEMPLATE_COMMAND = (
"sh -c 'echo ${0} | base64 --decode | gunzip -c | sh -' %(content64)s"
)
TEMPLATE_FILE_PLAIN = (
"sh -c 'filename=${1}; shift; echo ${0} | base64 --decode "
"> ${filename} && chmod %(mode)s ${filename}' "
"%(content64)s %(destfile)s"
)
TEMPLATE_COMMAND_PLAIN = (
"sh -c 'echo ${0} | base64 --decode | sh -' %(content64)s"
)
def base64_gzip(content, gzip=True):
"""
This method returns content gzipped and then base64 encoded
so such line can be inserted into preseed file
"""
if gzip:
gzipped = StringIO()
gzip_file = GzipFile(fileobj=gzipped, mode="wb", compresslevel=9)
gzip_file.write(content)
gzip_file.close()
content2 = gzipped.getvalue()
else:
content2 = content
return b64encode(content2)
def get_content(source, source_method):
if source_method == 'file':
try:
f = open(source, 'r')
content = f.read()
f.close()
except:
return ""
else:
return content
return source
def get_content64(source, source_method, gzip=True):
return base64_gzip(get_content(source, source_method), gzip).strip()
def late_file(source, destfile, source_method='file', mode='0644', gzip=True):
if gzip:
return TEMPLATE_FILE % {
'mode': mode,
'content64': get_content64(source, source_method, True),
'destfile': destfile,
}
else:
return TEMPLATE_FILE_PLAIN % {
'mode': mode,
'content64': get_content64(source, source_method, False),
'destfile': destfile,
}
def late_command(source, source_method='file', gzip=True):
if gzip:
return TEMPLATE_COMMAND % {
'content64': get_content64(source, source_method, True)
}
else:
return TEMPLATE_COMMAND_PLAIN % {
'content64': get_content64(source, source_method, False)
}
|
Python
| 0.00004
|
@@ -732,23 +732,8 @@
s/%3E.
-%0A# flake8: noqa
%0A%0Afr
@@ -820,28 +820,8 @@
ile%0A
-import commands, os%0A
%0A%0ATE
@@ -1412,24 +1412,64 @@
ue):%0A %22%22%22
+Gzip and enconde bas64 provided content%0A
%0A This me
@@ -2027,32 +2027,32 @@
f.close()%0A
-
except:%0A
@@ -2049,16 +2049,26 @@
except
+ Exception
:%0A
|
2413a2042745a00b5a220a753aa46177065f3793
|
bump version to 0.0.3
|
nose_warnings_filters/__init__.py
|
nose_warnings_filters/__init__.py
|
"""
Nose plugin to add warnings filters (turn them into error) using nose.cfg file.
"""
__version__ = '0.0.2'
from nose.plugins import Plugin
import warnings
import sys
if sys.version_info < (3,):
import builtins
else:
builtins = __builtins__
class WarningFilter(Plugin):
def options(self, parser, env):
"""
Add options to command line.
"""
super(WarningFilter, self).options(parser, env)
parser.add_option("--warningfilters",
default=None,
help="Treat warnings that occur WITHIN tests as errors.")
def configure(self, options, conf):
"""
Configure plugin.
"""
for opt in options.warningfilters.split( '\n'):
vs = [s.strip() for s in opt.split('|')]
vs[2] = getattr(builtins, vs[2])
warnings.filterwarnings(*vs)
super(WarningFilter, self).configure(options, conf)
def prepareTestRunner(self, runner):
"""
Treat warnings as errors.
"""
return WarningFilterRunner(runner)
class WarningFilterRunner(object):
def __init__(self, runner):
self.runner=runner
def run(self, test):
return self.runner.run(test)
|
Python
| 0.000001
|
@@ -103,17 +103,17 @@
= '0.0.
-2
+3
'%0A%0A%0Afrom
|
f8685d8ca3d4d18ca5895d765185993ed2d5bcd7
|
Fix citizen subscription to report : DatabaseError: current transaction is aborted, commands ignored until end of transaction block
|
django_fixmystreet/fixmystreet/views/reports/subscribers.py
|
django_fixmystreet/fixmystreet/views/reports/subscribers.py
|
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext as _
from django.contrib import messages
from django.db import IntegrityError
from django_fixmystreet.fixmystreet.models import FMSUser
from django_fixmystreet.fixmystreet.models import Report, ReportSubscription
def create(request, report_id):
report = get_object_or_404(Report, id=report_id)
#CREATE USER CITIZEN IF NECESSARY
try:
user = FMSUser.objects.get(email=request.REQUEST.get('citizen_email'))
except FMSUser.DoesNotExist:
#Add information about the citizen connected if it does not exist
user = FMSUser.objects.create(username=request.REQUEST.get('citizen_email'), email=request.REQUEST.get('citizen_email'), first_name='ANONYMOUS', last_name='ANONYMOUS', agent=False, contractor=False, manager=False, leader=False)
#VERIFY THAT A SUBSCRIPTION DOES NOT ALREADY EXIST
try:
subscriber = ReportSubscription(subscriber=user, report=report)
subscriber.save()
messages.add_message(request, messages.SUCCESS, _("You have subscribed from updates successfully"))
except IntegrityError:
#Do nothing. A subscription for this user already exists...
messages.add_message(request, messages.SUCCESS, _("You have subscribed from updates successfully"))
return HttpResponseRedirect(report.get_absolute_url())
def remove(request, report_id):
report = get_object_or_404(Report, id=report_id)
try:
user = FMSUser.objects.get(email=request.REQUEST.get('citizen_email'))
except FMSUser.DoesNotExist:
HttpResponseRedirect(report.get_absolute_url())
#VERIFY THAT A SUBSCRIPTION DOES NOT ALREADY EXIST
try:
subscription = ReportSubscription.objects.get(subscriber=user, report=report)
subscription.delete()
messages.add_message(request, messages.SUCCESS, _("You have unsubscribed from updates successfully"))
except ReportSubscription.DoesNotExist:
#Do nothing. A subscription for this user already exists...
messages.add_message(request, messages.SUCCESS, _("You have unsubscribed from updates successfully"))
return HttpResponseRedirect(report.get_absolute_url())
|
Python
| 0.000001
|
@@ -955,35 +955,113 @@
READY EXIST%0A
-try
+if not ReportSubscription.objects.filter(subscriber=user, report=report).exists()
:%0A subscr
@@ -1148,215 +1148,9 @@
e()%0A
- messages.add_message(request, messages.SUCCESS, _(%22You have subscribed from updates successfully%22))%0A except IntegrityError:%0A #Do nothing. A subscription for this user already exists...%0A
+%0A
|
9a87f83c7060b66f7f95f2823db11b5e86a4fd67
|
fix #210
|
src/you_get/downloader/dailymotion.py
|
src/you_get/downloader/dailymotion.py
|
#!/usr/bin/env python
__all__ = ['dailymotion_download']
from ..common import *
def dailymotion_download(url, output_dir = '.', merge = True, info_only = False):
html = get_html(url)
html = parse.unquote(html).replace('\/', '/')
title = r1(r'meta property="og:title" content="([^"]+)"', html)
title = escape_file_path(title)
for quality in ['hd720URL', 'hqURL', 'sdURL']:
real_url = r1(r',\"' + quality + '\"\:\"([^\"]+?)\",', html)
if real_url:
break
type, ext, size = url_info(real_url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([real_url], title, ext, size, output_dir, merge = merge)
site_info = "Dailymotion.com"
download = dailymotion_download
download_playlist = playlist_not_supported('dailymotion')
|
Python
| 0
|
@@ -166,187 +166,303 @@
-html = get_html(url)%0A html = parse.unquote(html).replace('%5C/', '/')%0A %0A title = r1(r'meta property=%22og:title%22 content=%22(%5B%5E%22%5D+)%22', html)%0A title = escape_file_path(
+%22%22%22Downloads Dailymotion videos by URL.%0A %22%22%22%0A %0A id = match1(url, r'/video/(%5B%5E%5C?%5D+)')%0A embed_url = 'http://www.dailymotion.com/embed/video/%25s' %25 id%0A html = get_content(embed_url)%0A %0A info = json.loads(match1(html, r'var%5Cs*info%5Cs*=%5Cs*(%7B.+%7D),%5Cn'))%0A %0A title = info%5B'
title
-)
+'%5D
%0A
@@ -488,106 +488,151 @@
n %5B'
-hd720URL', 'hqURL', 'sdURL'%5D:%0A real_url = r1(r',%5C%22' + quality + '%5C%22%5C:%5C%22(%5B%5E%5C%22%5D+?)%5C%22,', html)
+stream_h264_hd1080_url', 'stream_h264_hd_url', 'stream_h264_hq_url', 'stream_h264_url', 'stream_h264_ld_url'%5D:%0A real_url = info%5Bquality%5D
%0A
@@ -667,16 +667,21 @@
break%0A
+ %0A
type
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.