commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
66056c97972011831fb36ce0ae37cc9bd490ddba
|
Swap In New Function
|
web/impact/impact/v1/helpers/program_helper.py
|
web/impact/impact/v1/helpers/program_helper.py
|
from impact.models import Program
from impact.v1.helpers.model_helper import (
FLOAT_FIELD,
INTEGER_FIELD,
ModelHelper,
PK_FIELD,
STRING_FIELD,
)
PROGRAM_FIELDS = {
"id": PK_FIELD,
"name": STRING_FIELD,
"program_family_id": INTEGER_FIELD,
"program_family_name": STRING_FIELD,
"cycle_id": INTEGER_FIELD,
"cycle_name": STRING_FIELD,
"description": STRING_FIELD,
"start_date": STRING_FIELD,
"end_date": STRING_FIELD,
"location": STRING_FIELD,
"program_status": STRING_FIELD,
"currency_code": STRING_FIELD,
"regular_application_fee": FLOAT_FIELD,
"url_slug": STRING_FIELD,
"overview_start_date": STRING_FIELD,
"overview_deadline_date": STRING_FIELD,
}
class ProgramHelper(ModelHelper):
model = Program
@classmethod
def fields(cls):
return PROGRAM_FIELDS
@property
def cycle_name(self):
return self.get_field_value("cycle").name
@property
def program_family_name(self):
return self.get_field_value("program_family").name
|
Python
| 0
|
@@ -916,31 +916,29 @@
elf.
-get_
field_
-value
+element
(%22cycle%22
).na
@@ -933,22 +933,25 @@
(%22cycle%22
-).
+, %22
name
+%22)
%0A%0A @p
@@ -1017,23 +1017,21 @@
elf.
-get_
field_
-value
+element
(%22pr
@@ -1047,11 +1047,14 @@
ily%22
-).
+, %22
name
+%22)
%0A
|
0b46aed8c55ecf0b6757e6bcc4a73e5e6d2fe77d
|
Fix active field initialization on vlan.
|
neutron/plugins/ml2/drivers/datacom/dcclient/xml_manager/data_structures.py
|
neutron/plugins/ml2/drivers/datacom/dcclient/xml_manager/data_structures.py
|
""" Data structures used to build the XML
"""
import xml.etree.ElementTree as ET
import neutron.plugins.ml2.drivers.datacom.utils as utils
class Pbits(object):
""" Class pbits represents bitmasks (usually from ports)
This class has one property:
bits: Internally this property is implemented as an integer.
It can be set by using either an integer or a list that
will be converted to the corresponding integer.
For example using it with class.bits = [1,3,4] will
result in an integer with the bits 1, 3, 4 set (1101)=13.
To instantiate this class you need the following parameters:
bits (required): int or list.
"""
def __init__(self, bits):
self.bits = bits
def __add__(self, other):
if isinstance(other, Pbits):
self.add_bits(other.bits)
else:
self.add_bits(other)
return self
def __sub__(self, other):
if isinstance(other, Pbits):
self.remove_bits(other.bits)
else:
self.remove_bits(other)
return self
@property
def bits(self):
return self._bits
@bits.setter
def bits(self, bits):
assert type(bits) is int or type(bits) is list
if type(bits) is int:
self._bits = bits
else:
self._bits = sum([1 << (i-1) for i in set(bits)])
@bits.deleter
def bits(self):
self._bits = 0
def as_xml(self):
""" Method that returns the xml form of the object
"""
xml = ET.Element("pbits")
xml.attrib["id0"] = "0"
xml.text = str(self.bits)
return xml
def as_xml_text(self):
return ET.tostring(self.as_xml())
def add_bits(self, bits):
assert type(bits) is int or type(bits) is list
if type(bits) is int:
new_bits = bits
else:
new_bits = sum([1 << (i-1) for i in set(bits)])
self.bits = self.bits | new_bits
def remove_bits(self, bits):
assert type(bits) is int or type(bits) is list
if type(bits) is int:
new_bits = bits
else:
new_bits = sum([1 << (i-1) for i in set(bits)])
self.bits = self.bits & ~ new_bits
class Vlan_global(object):
""" Class vlanglobal represents a VLan.
This class has three properties:
vid: This property is an integer. It is used as the id of the vlan.
ports: This property is a pbits. Ultimatly is used as a binary,
this binary is what defines wich ports asociated with the
vlan. the add_bits and remove_birs methods are used to
change this property hence changing the ports.
name: This property is a string. It is used to refer to the vlan
in a more friendly way, rather then using the vid.
To instantiate this class you need the following parameters:
vid (required): int.
name (optional): string.
ports (optional): Pbits.
"""
# TODO: adicionar checagens de limites nas properties
def __init__(self, vid, name='', ports=None, active=None):
self.vid = vid
if ports:
self.ports = ports
else:
self.ports = Pbits(0)
self.name = name
self.active = True
@property
def active(self):
return self._active
@active.setter
def active(self, active):
assert type(active) is bool
self._active = active
@active.deleter
def active(self):
self._active = False
@property
def name(self):
return self._name
@name.setter
def name(self, name):
assert type(name) is str
self._name = name
@name.deleter
def name(self):
self._name = ''
@property
def vid(self):
return self._vid
@vid.setter
def vid(self, vid):
assert type(vid) is int
assert vid >= utils.MIN_VLAN and vid <= utils.MAX_VLAN
self._vid = vid
@vid.deleter
def vid(self):
self._vid = 0
@property
def ports(self):
return self._ports
@ports.setter
def ports(self, ports):
assert isinstance(ports, Pbits)
self._ports = ports
@ports.deleter
def ports(self, ports):
del self.ports
def as_xml(self):
""" Method that returns the xml form of the object
"""
xml = ET.Element("vlan_global")
xml.attrib["id0"] = str(self.vid)
ET.SubElement(xml, "vid").text = str(self.vid)
if self.active == True:
ET.SubElement(xml, "active").text = "1"
else:
ET.SubElement(xml, "active").text = "0"
if self.name:
ET.SubElement(xml, "name").text = self.name
if self.ports:
pbmp_untagged = ET.SubElement(xml, "pbmp_untagged", {"id0": "0"})
pbmp_untagged.append(self.ports.as_xml())
return xml
def as_xml_text(self):
return ET.tostring(self.as_xml())
class Cfg_data(object):
""" One class to contain them all
This class has one property:
vlans: This property is a vlan_global. The cfg_data is the main
class to the xml, so it will receive everything that is
needed to the xml.
To instantiate this class you need the following parameters:
vlans (optional): list (elements of type Vlan_global).
"""
def __init__(self, vlans=None):
if vlans:
self.vlans = vlans
else:
self.vlans = []
@property
def vlans(self):
return self._vlans
@vlans.setter
def vlans(self, vlans):
assert type(vlans) is list
# first check if every member of the list is a vlan
for vlan in vlans:
assert isinstance(vlan, Vlan_global)
# now create the list and add each vlan
self._vlans = []
for vlan in vlans:
self._vlans.append(vlan)
@vlans.deleter
def vlans(self):
for vlan in self.vlans:
del vlan.active
def as_xml(self):
xml = ET.Element("cfg_data")
for vlan in self.vlans:
xml.append(vlan.as_xml())
return xml
def as_xml_text(self):
return ET.tostring(self.as_xml())
class Interface(object):
""" Class interface represents a switch interface
"""
pass
if __name__ == '__main__':
vlan = Vlan_global(42)
ports = Pbits([2, 3, 6])
vlan.active = True
vlan.ports = ports
c = Cfg_data()
c.vlans = [vlan]
|
Python
| 0
|
@@ -3242,19 +3242,19 @@
active=
-Non
+Tru
e):%0A
@@ -3416,19 +3416,21 @@
ctive =
-Tru
+activ
e%0A%0A @
|
0ab8381aeefc4492cce6101260d080e603357ae0
|
Use secretmanager v1 API instead of v1beta1
|
event_handler/sources.py
|
event_handler/sources.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hmac
from hashlib import sha1
import os
from google.cloud import secretmanager_v1beta1
PROJECT_NAME = os.environ.get("PROJECT_NAME")
class EventSource(object):
"""
A source of event data being delivered to the webhook
"""
def __init__(self, name, signature_header, verification_func):
self.name = name
self.signature = signature_header
self.verification = verification_func
def github_verification(signature, body):
"""
Verifies that the signature received from the github event is accurate
"""
if not signature:
raise Exception("Github signature is empty")
expected_signature = "sha1="
try:
# Get secret from Cloud Secret Manager
secret = get_secret(PROJECT_NAME, "event-handler", "1")
# Compute the hashed signature
hashed = hmac.new(secret, body, sha1)
expected_signature += hashed.hexdigest()
except Exception as e:
print(e)
return hmac.compare_digest(signature, expected_signature)
def simple_token_verification(token, body):
"""
Verifies that the token received from the event is accurate
"""
if not token:
raise Exception("Token is empty")
secret = get_secret(PROJECT_NAME, "event-handler", "1")
return secret.decode() == token
def get_secret(project_name, secret_name, version_num):
"""
Returns secret payload from Cloud Secret Manager
"""
try:
client = secretmanager_v1beta1.SecretManagerServiceClient()
name = client.secret_version_path(
project_name, secret_name, version_num
)
secret = client.access_secret_version(name)
return secret.payload.data
except Exception as e:
print(e)
def get_source(headers):
"""
Gets the source from the User-Agent header
"""
if "X-Gitlab-Event" in headers:
return "Gitlab"
if "Ce-Type" in headers and "tekton" in headers["Ce-Type"]:
return "Tekton"
if "User-Agent" in headers:
if "/" in headers["User-Agent"]:
return headers["User-Agent"].split("/")[0]
return headers["User-Agent"]
return None
AUTHORIZED_SOURCES = {
"GitHub-Hookshot": EventSource(
"github", "X-Hub-Signature", github_verification
),
"Gitlab": EventSource(
"gitlab", "X-Gitlab-Token", simple_token_verification
),
"Tekton": EventSource(
"tekton", "tekton-secret", simple_token_verification
)
}
|
Python
| 0.000001
|
@@ -655,24 +655,16 @@
tmanager
-_v1beta1
%0A%0APROJEC
|
74c3095e553759b05ccc57b7c4ffd291ee9568a2
|
Update get_main_movies_base_data function
|
webs/douban/tasks/get_main_movies_base_data.py
|
webs/douban/tasks/get_main_movies_base_data.py
|
# -*- coding: utf-8 -*-
import requests
import gevent
import models
from gevent.pool import Pool
from helpers import random_str, get_video_douban_ids
from webs.douban import parsers
from config import sqla
types = ['movie', 'tv']
sorts = ['recommend', 'time', 'rank']
tags_dict = {
'tv': ['热门', '美剧', '英剧', '韩剧', '日剧', '国产剧', '港剧', '日本动画'],
'movie': ['热门', '最新', '经典', '可播放', '豆瓣高分', '冷门佳片',
'华语', '欧美', '韩国', '日本', '动作', '喜剧', '爱情',
'科幻', '悬疑', '恐怖', '动画']
}
douban_movie_api_url = 'http://movie.douban.com/j/search_subjects/'
cookies = {
'bid': ''
}
def create_requests_and_save_datas(type, tag, sort):
session = sqla['session']
cookies['bid'] = random_str(11)
params = {
'type': type,
'tag': tag,
'sort': sort,
'page_limit': 2000,
'page_start': 0
}
r = requests.get(
douban_movie_api_url,
params=params,
cookies=cookies,
timeout=20
)
if r.status_code != 200:
return
datas = parsers.douban_api.start_parser(r.text)
for data in datas:
douban_id = data.get('douban_id')
if douban_id in video_douban_ids:
continue
data['subtype'] = type
data['crawler_tag'] = tag
data['crawler_sort'] = sort
if type == 'movie':
video = models.Movie(**data)
elif type == 'tv' and tag == '日本动画':
video = models.Animation(**data)
else:
video = models.TV(**data)
session.add(video)
session.commit()
video_douban_ids.add(douban_id)
print(','.join(
[douban_id, data.get('title')]
))
def task(pool_number):
video_douban_ids = set(get_video_douban_ids())
global video_douban_ids
pool = Pool(pool_number)
for type in types:
for tag in tags_dict[type]:
for sort in sorts:
pool.spawn(
create_requests_and_save_datas,
type=type,
tag=tag,
sort=sort
)
pool.join()
return list(video_douban_ids)
|
Python
| 0.000001
|
@@ -1732,16 +1732,63 @@
l_number
+, types=types, tags_dict=tags_dict, sorts=sorts
):%0A v
|
fe6d37efa59cbf222dd703a52456de2aa628fecf
|
Update random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
# Time: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
Python
| 0
|
@@ -819,16 +819,30 @@
random%0A
+import bisect%0A
%0A%0Aclass
|
c261c85fb3f3858222577974e97fde2f3713cdc5
|
Allow Editors to update People
|
src/ggrc_basic_permissions/roles/Editor.py
|
src/ggrc_basic_permissions/roles/Editor.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
scope = "System"
description = """
This role grants a user basic object creation and editing permission.
"""
permissions = {
"read": [
"Audit",
"Categorization",
"Category",
"ControlCategory",
"ControlAssertion",
"Control",
"ControlAssessment",
"CustomAttributeDefinition",
"CustomAttributeValue",
"Issue",
"DataAsset",
"Directive",
"Contract",
"Policy",
"Regulation",
"Standard",
"Document",
"Facility",
"Help",
"Market",
"Objective",
"ObjectDocument",
"ObjectOwner",
"ObjectPerson",
"Option",
"OrgGroup",
"Vendor",
"PopulationSample",
"Product",
"Project",
"Relationship",
"RelationshipType",
"SectionBase",
"Section",
"Clause",
"SystemOrProcess",
"System",
"Process",
"Person",
"Program",
"Role",
"Request",
"Response",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Context",
{
"type": "BackgroundTask",
"terms": {
"property_name": "modified_by",
"value": "$current_user"
},
"condition": "is"
},
],
"create": [
"Audit",
"Workflow"
"Categorization",
"Category",
"ControlCategory",
"ControlAssertion",
"Control",
"ControlAssessment",
"Issue",
"DataAsset",
"Directive",
"Contract",
"Policy",
"Regulation",
"Standard",
"Document",
"Facility",
"Help",
"Market",
"Objective",
"ObjectDocument",
"ObjectPerson",
"Option",
"OrgGroup",
"Vendor",
"PopulationSample",
"Product",
"Project",
"Relationship",
"RelationshipType",
"SectionBase",
"Section",
"Clause",
"SystemOrProcess",
"System",
"Process",
"ObjectOwner",
"Person",
"Program",
"Role",
"UserRole",
"Request",
"Response",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Context",
{
"type": "BackgroundTask",
"terms": {
"property_name": "modified_by",
"value": "$current_user"
},
"condition": "is"
},
],
"view_object_page": [
"__GGRC_ALL__"
],
"update": [
"Audit",
"Workflow"
"Categorization",
"Category",
"ControlCategory",
"ControlAssertion",
"Control",
"ControlAssessment",
"Issue",
"DataAsset",
"Directive",
"Contract",
"Policy",
"Regulation",
"Standard",
"Document",
"Facility",
"Help",
"Market",
"Objective",
"ObjectDocument",
"ObjectPerson",
"Option",
"OrgGroup",
"Vendor",
"PopulationSample",
"Product",
"Project",
"Relationship",
"RelationshipType",
"SectionBase",
"Section",
"Clause",
"SystemOrProcess",
"System",
"Process",
"ObjectOwner",
"Program",
"Role",
"UserRole",
"Request",
"Response",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Context",
{
"type": "BackgroundTask",
"terms": {
"property_name": "modified_by",
"value": "$current_user"
},
"condition": "is"
},
],
"delete": [
"Audit",
"Workflow"
"Categorization",
"Category",
"ControlCategory",
"ControlAssertion",
"Control",
"ControlAssessment",
"Issue",
"DataAsset",
"Directive",
"Contract",
"Policy",
"Regulation",
"Standard",
"Document",
"Facility",
"Help",
"Market",
"Objective",
"ObjectDocument",
"ObjectPerson",
"Option",
"OrgGroup",
"Vendor",
"PopulationSample",
"Product",
"Project",
"Relationship",
"RelationshipType",
"SectionBase",
"Section",
"Clause",
"SystemOrProcess",
"System",
"Process",
"ObjectOwner",
"Program",
"Role",
"UserRole",
"Request",
"Response",
"DocumentationResponse",
"InterviewResponse",
"PopulationSampleResponse",
"Context",
{
"type": "BackgroundTask",
"terms": {
"property_name": "modified_by",
"value": "$current_user"
},
"condition": "is"
},
]
}
|
Python
| 0
|
@@ -3439,32 +3439,50 @@
%22ObjectPerson%22,%0A
+ %22Person%22,%0A
%22Option%22
|
ea660e370b05cfe34dc819211b2f28992a924194
|
Update random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
Python/random-pick-with-weight.py
|
# Time: O(logn)
# Space: O(n)
# Given an array w of positive integers,
# where w[i] describes the weight of index i,
# write a function pickIndex which randomly picks an index in proportion to its weight.
#
# Note:
#
# 1 <= w.length <= 10000
# 1 <= w[i] <= 10^5
# pickIndex will be called at most 10000 times.
# Example 1:
#
# Input:
# ["Solution","pickIndex"]
# [[[1]],[]]
# Output: [null,0]
# Example 2:
#
# Input:
# ["Solution","pickIndex","pickIndex","pickIndex","pickIndex","pickIndex"]
# [[[1,3]],[],[],[],[],[]]
# Output: [null,0,1,1,1,0]
# Explanation of Input Syntax:
#
# The input is two lists: the subroutines called and their arguments.
# Solution's constructor has one argument, the array w. pickIndex has no arguments.
# Arguments are always wrapped with a list, even if there aren't any.
import random
import bisect
class Solution(object):
def __init__(self, w):
"""
:type w: List[int]
"""
self.__prefix_sum = list(w)
for i in xrange(1, len(w)):
self.__prefix_sum[i] += self.__prefix_sum[i-1]
def pickIndex(self):
"""
:rtype: int
"""
target = random.randint(0, self.__prefix_sum[-1]-1)
return bisect.bisect_right(self.__prefix_sum, target)
# Your Solution object will be instantiated and called as such:
# obj = Solution(w)
# param_1 = obj.pickIndex()
|
Python
| 0
|
@@ -1,16 +1,47 @@
# Time:
+ ctor: O(n)%0A# pickIndex:
O(logn)
|
056a1b769db7f05402b41ffdcb565585db06bf97
|
Update top-k-frequent-elements.py
|
Python/top-k-frequent-elements.py
|
Python/top-k-frequent-elements.py
|
# Time: O(n)
# Space: O(n)
# Given a non-empty array of integers,
# return the k most frequent elements.
#
# For example,
# Given [1,1,1,2,2,3] and k = 2, return [1,2].
#
# Note:
# You may assume k is always valid,
# 1 <= k <= number of unique elements.
# Your algorithm's time complexity must be better
# than O(n log n), where n is the array's size.
from random import randint
class Solution(object):
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
counts = collections.defaultdict(int)
for i in nums:
counts[i] += 1
p = []
for key, val in counts.iteritems():
p.append((val, key))
self.kthElement(p, k);
result = []
for i in xrange(k):
result.append(p[i][1])
return result
def kthElement(self, nums, k):
def PartitionAroundPivot(left, right, pivot_idx, nums):
pivot_value = nums[pivot_idx][0]
new_pivot_idx = left
nums[pivot_idx], nums[right] = nums[right], nums[pivot_idx]
for i in xrange(left, right):
if nums[i][0] > pivot_value:
nums[i], nums[new_pivot_idx] = nums[new_pivot_idx], nums[i]
new_pivot_idx += 1
nums[right], nums[new_pivot_idx] = nums[new_pivot_idx], nums[right]
return new_pivot_idx
left, right = 0, len(nums) - 1
while left <= right:
pivot_idx = randint(left, right)
new_pivot_idx = PartitionAroundPivot(left, right, pivot_idx, nums)
if new_pivot_idx == k - 1:
return
elif new_pivot_idx > k - 1:
right = new_pivot_idx - 1
else: # new_pivot_idx < k - 1.
left = new_pivot_idx + 1
|
Python
| 0
|
@@ -728,17 +728,16 @@
, key))%0A
-%0A
@@ -843,17 +843,16 @@
%5Bi%5D%5B1%5D)%0A
-%0A
|
c7c6b27c0678ba1da224d466e690f9b052079d15
|
Update __init__.py
|
pyramid_igniter/__init__.py
|
pyramid_igniter/__init__.py
|
# encoding: utf-8
import inspect
from functools import wraps
from pyramid.renderers import render_to_response
from re import sub
def route(rule='/', **options):
def decorator(f):
if not hasattr(f, 'rules'):
f.rules = []
f.rules.append((rule, options))
return f
return decorator
def wrap_view(f):
@wraps(f)
def inner(request):
abort = inner._handle_view(request)
if abort is not True:
return abort
return f(request)
return inner
def prettify_name(name):
return sub(r'(?<=.)([A-Z])', r' \1', name)
def get_members(base, current):
base_members = dir(base)
all_members = inspect.getmembers(current, predicate=inspect.ismethod)
return [member for member in all_members
if not member[0] in base_members and not member[0].startswith('_')]
class Inner(object):
__slots__ = ('igniter', 'name')
class Igniter(object):
url = ''
brand = ''
def __init__(self, config, view, debug=False, footer_class='col-md-3'):
self.config = config
self.route_base = view.route_base.rstrip('/')
self.menu = []
self.menu_categories = {}
self.footer = []
self.footer_categories = {}
self.debug = debug
self.footer_class = footer_class
self.add_view(view)
def add_view(self, view, visible=True, category=None):
if isinstance(view, tuple) and visible: # who can choose False?
name, url = view
self.add_to_menu(name, url, [], category, True)
return
base = self.route_base if view.route_base != self.route_base else ''
name, url, views = view.get_views(self)
if not self.brand:
self.brand = name
if not self.url:
self.url = url
if visible and self.url != url:
self.add_to_menu(name, self.build_url(base, url), views, category)
for current_view in views:
route_name, url, function, options = current_view
url = self.build_url(base, url)
function.func_dict['igniter'] = self
function.func_dict['name'] = name
self.config.add_route(route_name, url)
self.config.add_view(function, route_name=route_name, **options)
if self.debug:
print "%s => '%s'" % (url, route_name)
def add_to_menu(self, name, url, views, category=None, is_link=False):
if category:
if category not in self.menu_categories:
self.menu_categories[category] = []
self.menu.append(dict(name=category, url=None, is_link=is_link))
self.menu_categories[category].append(dict(name=name, url=url))
return
children = {view[0] for view in views} # very fast set
self.menu.append(dict(name=name, url=url, is_link=is_link,
children=children))
def add_footer(self, view, category=None):
if not isinstance(view, tuple):
raise TypeError('The footer item should be a tuple (name, url)')
name, url = view
if category:
if category not in self.footer_categories:
self.footer_categories[category] = []
self.footer.append(dict(name=category, url=None))
self.footer_categories[category].append(dict(name=name, url=url))
return
self.footer.append(dict(name=name, url=url))
@staticmethod
def build_url(base, url):
return '%s%s' % (base, url)
def is_active(self, request, item):
if item['is_link']: # check only giving url
return request.matched_route.path == self.route_base + item['url']
return request.matched_route.name in item['children']
class IgniterView(object):
route_base = '/'
igniter = None #
__view_defaults__ = {}
def __init__(self, name=None, route_base=None):
n = self.__class__.__name__
self.name = name or prettify_name(n)
prefix = n.lower()
self.prefix = prefix
if not route_base:
self.route_base = '' if prefix == 'index' else '/%s' % prefix
elif route_base == '/':
self.route_base = ''
else:
self.route_base = '/%s' % route_base.strip('/')
self.prefix = route_base.strip('/')
def get_views(self, igniter):
if not self.igniter:
self.igniter = igniter
views = []
is_handle_view = hasattr(self, '_handle_view')
for name, _ in get_members(IgniterView, self):
view_defaults = self.__view_defaults__.copy()
route_name = '%s.%s' % (self.prefix, name)
options = view_defaults
url = self.build_url(name, name)
attr = getattr(self, name)
if hasattr(attr, 'rules'):
for idx, (url, options) in enumerate(attr.rules):
url = self.build_url(name, url)
route_name = options.pop('route_name', None)
view_defaults = self.__view_defaults__.copy()
view_defaults.update(options or {})
options = view_defaults
if not route_name:
route_name = '%s.%s' % (self.prefix, name)
if len(attr.rules) > 1:
route_name = '%s_%d' % (route_name, idx)
if is_handle_view:
attr = wrap_view(attr)
attr.func_dict['_handle_view'] = self._handle_view
views.append((route_name, url, attr, options))
else:
if is_handle_view:
attr = wrap_view(attr)
attr.func_dict['_handle_view'] = self._handle_view
views.append((route_name, url, attr, options))
return self.name, self.route_base + '/', views
def build_url(self, name, url=''):
if name == 'index' and url == name:
url = ''
return '%s/%s' % (self.route_base, url.lstrip('/'))
def render(self, request, template, args=None):
Inner.igniter = self.igniter
Inner.name = self.name
args = args or {}
args['view'] = Inner
return render_to_response(template, args, request=request)
|
Python
| 0.000072
|
@@ -3982,19 +3982,16 @@
r = None
- #
%0D%0A __
|
3a156a11cd7b8a9bfc40b515a2f1d1351969ce3a
|
Simplify loading config for instagram middleware
|
me_api/middleware/instagram.py
|
me_api/middleware/instagram.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import requests
from flask import Blueprint, jsonify, request, redirect
from me_api.configs import Config
from me_api.cache import cache
config = Config.modules['modules']['instagram']
path = config['path']
client_secret, access_token, client_id = (
config['data']['client_secret'],
config['data']['access_token'],
config['data']['client_id']
)
instagram_api = Blueprint('instagram', __name__, url_prefix=path)
@instagram_api.route('/')
@cache.cached(timeout=3600)
def instagram():
if not access_token:
return 'Need access token, please authenticate your app first.'
response = requests.get(
("https://api.instagram.com/v1/users/"
"self/media/recent/?access_token={0}").format(access_token)
)
return jsonify(instagram=response.json())
@instagram_api.route('/login')
def authorization():
if access_token:
return "You've already had an access token in the config file."
authorization_url = 'https://api.instagram.com/oauth/authorize'
return redirect(
'{0}?client_id={1}&redirect_uri={2}&response_type=code'.format(
authorization_url, client_id,
os.path.join(request.url, 'redirect')
)
)
@instagram_api.route('/login/redirect')
def get_access_toekn():
authorization_code = request.args.get('code', '')
token_url = 'https://api.instagram.com/oauth/access_token'
post_data = {
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': request.base_url,
'grant_type': 'authorization_code',
'code': authorization_code
}
response = requests.post(token_url, data=post_data)
return response.text
|
Python
| 0.000001
|
@@ -180,22 +180,20 @@
pi.c
-onfigs
+ache
import
Conf
@@ -188,22 +188,21 @@
import
-Config
+cache
%0Afrom me
@@ -210,247 +210,88 @@
api.
-cache import cache%0A%0A%0Aconfig = Config.modules%5B'modules'%5D%5B'instagram'%5D%0Apath = config%5B'path'%5D%0Aclient_secret, access_token, client_id = (%0A config%5B'data'%5D%5B'client_secret'%5D,%0A config%5B'data'%5D%5B'access_token'%5D,%0A config%5B'data'%5D%5B'client_id'%5D%0A
+middleware.utils import MiddlewareConfig%0A%0A%0Aconfig = MiddlewareConfig('instagram'
)%0Ain
@@ -348,16 +348,23 @@
_prefix=
+config.
path)%0A%0A%0A
@@ -445,16 +445,23 @@
if not
+config.
access_t
@@ -669,16 +669,23 @@
.format(
+config.
access_t
@@ -803,16 +803,23 @@
%0A if
+config.
access_t
@@ -1088,16 +1088,23 @@
on_url,
+config.
client_i
@@ -1394,16 +1394,23 @@
nt_id':
+config.
client_i
@@ -1437,16 +1437,23 @@
ecret':
+config.
client_s
|
aeeb62f47a7211d945aafd294edb3d39d5d5cf6e
|
Modify error message
|
pytablereader/_validator.py
|
pytablereader/_validator.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
import abc
import os.path
import dataproperty
import pathvalidate as pv
import six
from six.moves.urllib.parse import urlparse
from ._constant import SourceType
from .error import EmptyDataError
from .error import InvalidFilePathError
from .error import InvalidUrlError
@six.add_metaclass(abc.ABCMeta)
class ValidatorInterface(object):
@abc.abstractproperty
def source_type(self):
pass
@abc.abstractmethod
def validate(self):
pass
class BaseValidator(ValidatorInterface):
@property
def source(self):
return self.__source
def __init__(self, source):
self.__source = source
class FileValidator(BaseValidator):
@property
def source_type(self):
return SourceType.FILE
def validate(self):
try:
pv.validate_file_path(self.source)
except pv.NullNameError:
raise IOError("file path is empty")
except (pv.InvalidCharError, pv.InvalidLengthError) as e:
raise InvalidFilePathError(e)
if not os.path.isfile(self.source):
raise IOError("file not found")
class TextValidator(BaseValidator):
@property
def source_type(self):
return SourceType.TEXT
def validate(self):
if dataproperty.is_empty_string(self.source):
raise EmptyDataError("data source is empty")
class UrlValidator(BaseValidator):
@property
def source_type(self):
return SourceType.URL
def validate(self):
if dataproperty.is_empty_string(self.source):
raise InvalidUrlError("url is empty")
scheme = urlparse(self.source).scheme
if scheme not in ["http", "https"]:
raise InvalidUrlError("invalid schema: {:s}".format(scheme))
|
Python
| 0.000001
|
@@ -1842,16 +1842,33 @@
rlError(
+%0A
%22invalid
@@ -1877,24 +1877,71 @@
chem
-a: %7B:s%7D%22.format(
+e: expected=http/https, actual=%7B%7D%22.format(%0A
sche
|
6c095c0e14c084666b9417b4bd269f396804bfab
|
Update interface with the latest changes in functionality.
|
src/ensign/_interfaces.py
|
src/ensign/_interfaces.py
|
# pylint: skip-file
from zope.interface import Attribute, Interface
class IFlag(Interface):
"""
Flag Interface.
Any kind of flag must implement this interface.
"""
TYPE = Attribute("""Flag type""")
store = Attribute("""Flag storage backend""")
name = Attribute("""Flag name""")
value = Attribute("""Flag value""")
active = Attribute("""Flag activity indicator""")
info = Attribute("""Flag descriptive information""")
def create(name, store, **kwargs):
"""
Create a new flag with the given name and, optionally, extra data,
persisted in the given store.
"""
def _check():
"""
Check whether the flag current value means the feature is active.
"""
class IStorage(Interface):
"""
Storage Interface.
Any kind of backing storage for flags must implement this interface.
"""
def create(name, type, **kwargs):
"""Create a new flag."""
def exists(name):
"""Check if the flag exists in the store."""
def load(name, type):
"""Load a value."""
def store(name, value, type):
"""Store a value."""
def used(name):
"""Get last used date."""
def info(name):
"""Get flag descriptive information."""
|
Python
| 0
|
@@ -625,32 +625,118 @@
e.%0A %22%22%22%0A%0A
+ def all(store):%0A %22%22%22%0A Retrieve all flags in the store.%0A %22%22%22%0A%0A
def _check()
@@ -1364,12 +1364,57 @@
rmation.%22%22%22%0A
+%0A def all():%0A %22%22%22Get all flags.%22%22%22%0A
|
249fa5a158cb554b2963eeed443e8d10197c267a
|
Revert "One hour is now 20 minutes"
|
ebs_snapshots/snapshot_manager.py
|
ebs_snapshots/snapshot_manager.py
|
""" Module handling the snapshots """
import datetime
import yaml
from boto.exception import EC2ResponseError
import kayvee
import logging
""" Configure the valid backup intervals """
VALID_INTERVALS = [
u'hourly',
u'daily',
u'weekly',
u'monthly',
u'yearly']
def run(connection, volume_id, interval='daily', max_snapshots=0, name=''):
""" Ensure that we have snapshots for a given volume
:type connection: boto.ec2.connection.EC2Connection
:param connection: EC2 connection object
:type volume_id: str
:param volume_id: identifier for boto.ec2.volume.Volume
:type max_snapshots: int
:param max_snapshots: number of snapshots to keep (0 means infinite)
:returns: None
"""
try:
volumes = connection.get_all_volumes([volume_id])
except EC2ResponseError as error:
logging.error(kayvee.formatLog("ebs-snapshots", "error", "failed to connect to AWS", {"msg": error.message}))
return
for volume in volumes:
_ensure_snapshot(connection, volume, interval, name)
_remove_old_snapshots(connection, volume, max_snapshots)
def _create_snapshot(connection, volume, name=''):
""" Create a new snapshot
:type volume: boto.ec2.volume.Volume
:param volume: Volume to snapshot
:returns: boto.ec2.snapshot.Snapshot -- The new snapshot
"""
logging.info(kayvee.formatLog("ebs-snapshots", "info", "creating new snapshot", {"volume": volume.id}))
snapshot = volume.create_snapshot(
description="automatic snapshot by ebs-snapshots")
if not name:
name = '{}-snapshot'.format(volume.id)
connection.create_tags(
[snapshot.id], dict(Name=name, creator='ebs-snapshots'))
logging.info(kayvee.formatLog("ebs-snapshots", "info", "created snapshot successfully", {
"name": name,
"volume": volume.id,
"snapshot": snapshot.id
}))
return snapshot
def _ensure_snapshot(connection, volume, interval, name):
""" Ensure that a given volume has an appropriate snapshot
:type connection: boto.ec2.connection.EC2Connection
:param connection: EC2 connection object
:type volume: boto.ec2.volume.Volume
:param volume: Volume to check
:returns: None
"""
if interval not in VALID_INTERVALS:
logging.warning(kayvee.formatLog("ebs-snapshots", "warning", "invalid snapshotting interval", {
"volume": volume.id,
"interval": interval
}))
return
snapshots = connection.get_all_snapshots(filters={'volume-id': volume.id})
# Create a snapshot if we don't have any
if not snapshots:
_create_snapshot(connection, volume, name)
return
min_delta = 3600 * 24 * 365 * 10 # 10 years :)
for snapshot in snapshots:
timestamp = datetime.datetime.strptime(
snapshot.start_time,
'%Y-%m-%dT%H:%M:%S.000Z')
delta_seconds = int(
(datetime.datetime.utcnow() - timestamp).total_seconds())
if delta_seconds < min_delta:
min_delta = delta_seconds
logging.info(kayvee.formatLog("ebs-snapshots", "info", 'The newest snapshot for {} is {} seconds old'.format(volume.id, min_delta)))
if interval == 'hourly' and min_delta > 1200:
_create_snapshot(connection, volume, name)
elif interval == 'daily' and min_delta > 3600*24:
_create_snapshot(connection, volume, name)
elif interval == 'weekly' and min_delta > 3600*24*7:
_create_snapshot(connection, volume, name)
elif interval == 'monthly' and min_delta > 3600*24*30:
_create_snapshot(connection, volume, name)
elif interval == 'yearly' and min_delta > 3600*24*365:
_create_snapshot(connection, volume, name)
else:
logging.info(kayvee.formatLog("ebs-snapshots", "info", "no snapshot needed", {"volume": volume.id}))
def _remove_old_snapshots(connection, volume, max_snapshots):
""" Remove old snapshots
:type connection: boto.ec2.connection.EC2Connection
:param connection: EC2 connection object
:type volume: boto.ec2.volume.Volume
:param volume: Volume to check
:returns: None
"""
retention = max_snapshots
if not type(retention) is int and retention >= 0:
logging.warning(kayvee.formatLog("ebs-snapshots", "warning", "invalid max_snapshots value", {
"volume": volume.id,
"max_snapshots": retention
}))
return
snapshots = connection.get_all_snapshots(filters={'volume-id': volume.id})
# Sort the list based on the start time
snapshots.sort(key=lambda x: x.start_time)
# Remove snapshots we want to keep
snapshots = snapshots[:-int(retention)]
if not snapshots:
logging.info(kayvee.formatLog("ebs-snapshots", "info", "no old snapshots to remove"))
return
for snapshot in snapshots:
logging.info(kayvee.formatLog("ebs-snapshots", "info", "deleting snapshot", {"snapshot": snapshot.id}))
try:
snapshot.delete()
except EC2ResponseError as error:
logging.warning(kayvee.formatLog("ebs-snapshots", "warning", "could not remove snapshot", {
"snapshot": snapshot.id,
"msg": error.message
}))
logging.info(kayvee.formatLog("ebs-snapshots", "info", "done deleting snapshots"))
|
Python
| 0.004639
|
@@ -3261,10 +3261,10 @@
a %3E
-12
+36
00:%0A
|
4484bee2c018a4db3951193c6615a34b76880fe3
|
add tree migrate anonymous check
|
python/federatedml/protobuf/model_migrate/converter/tree_model_converter.py
|
python/federatedml/protobuf/model_migrate/converter/tree_model_converter.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from federatedml.util import consts
from federatedml.protobuf.generated.boosting_tree_model_meta_pb2 import BoostingTreeModelMeta
from federatedml.protobuf.generated.boosting_tree_model_param_pb2 import BoostingTreeModelParam
from federatedml.protobuf.model_migrate.converter.converter_base import AutoReplace
from federatedml.protobuf.model_migrate.converter.converter_base import ProtoConverterBase
class HeteroSBTConverter(ProtoConverterBase):
def convert(self, param: BoostingTreeModelParam, meta: BoostingTreeModelMeta,
guest_id_mapping: Dict,
host_id_mapping: Dict,
arbiter_id_mapping: Dict,
tree_plan_delimiter='_'
):
feat_importance_list = list(param.feature_importances)
tree_list = list(param.trees_)
tree_plan = list(param.tree_plan)
replacer = AutoReplace(guest_id_mapping, host_id_mapping, arbiter_id_mapping)
# fp == feature importance
for fp in feat_importance_list:
fp.sitename = replacer.replace(fp.sitename)
fp.fullname = replacer.replace(fp.fullname)
for tree in tree_list:
tree_nodes = list(tree.tree_)
for node in tree_nodes:
node.sitename = replacer.replace(node.sitename)
new_tree_plan = []
for str_tuple in tree_plan:
param.tree_plan.remove(str_tuple)
tree_mode, party_id = str_tuple.split(tree_plan_delimiter)
if int(party_id) != -1:
new_party_id = replacer.plain_replace(party_id, role=consts.HOST)
else:
new_party_id = party_id
new_tree_plan.append(tree_mode+tree_plan_delimiter+new_party_id)
param.tree_plan.extend(new_tree_plan)
return param, meta
|
Python
| 0
|
@@ -1455,16 +1455,160 @@
tances)%0A
+ fid_feature_mapping = dict(param.feature_name_fid_mapping)%0A feature_fid_mapping = %7Bv: k for k, v in fid_feature_mapping.items()%7D%0A
@@ -1898,16 +1898,75 @@
tename)%0A
+ if fp.fullname not in feature_fid_mapping:%0A
@@ -2694,9 +2694,8 @@
am, meta
-%0A
|
a9253d6382c8eeb4261d0fc533d943046b51d109
|
Remove unused variable
|
account_tax_analysis/account_tax_analysis.py
|
account_tax_analysis/account_tax_analysis.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville. Copyright 2013-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
class AccountTaxDeclarationAnalysis(models.TransientModel):
_name = 'account.vat.declaration.analysis'
_description = 'Account Vat Declaration'
fiscalyear_id = fields.Many2one(
comodel_name='account.fiscalyear',
string='Fiscalyear',
help='Fiscalyear to look on',
required=True,
)
period_list = fields.Many2many(
comodel_name='account.period',
relation='account_tax_period_rel',
column1='tax_analysis',
column2='period_id',
string='Periods',
required=True,
)
@api.multi
def show_vat(self):
action_obj = self.env['ir.actions.act_window']
if not self.period_list:
raise exceptions.Warning(_("You must select periods"))
domain = [('period_id', 'in', self.period_list.ids)]
action = self.env.ref('account_tax_analysis.action_view_tax_analysis')
action_fields = action.read()[0]
action_fields['domain'] = domain
return action_fields
|
Python
| 0.000015
|
@@ -1596,63 +1596,8 @@
f):%0A
- action_obj = self.env%5B'ir.actions.act_window'%5D%0A
|
d2f36dda1e6585a2c59bdf8c8290c34e573e4829
|
Remove $? from exit status for Fish Shell compatibility
|
src/output.py
|
src/output.py
|
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
import os
import pickle
import re
import logger
import stateFiles
DEBUG = '~/.fbPager.debug.text'
RED_COLOR = u'\033[0;31m'
NO_COLOR = u'\033[0m'
INVALID_FILE_WARNING = '''
Warning! Some invalid or unresolvable files were detected.
'''
GIT_ABBREVIATION_WARNING = '''
It looks like one of these is a git abbreviated file with
a triple dot path (.../). Try to turn off git's abbreviation
with --numstat so we get actual paths (not abbreviated
versions which cannot be resolved.
'''
CONTINUE_WARNING = 'Are you sure you want to continue? Ctrl-C to quit'
# The two main entry points into this module:
#
def execComposedCommand(command, lineObjs):
if not len(command):
editFiles(lineObjs)
return
logger.addEvent('command_on_num_files', len(lineObjs))
command = composeCommand(command, lineObjs)
appendAliasExpansion()
appendIfInvalid(lineObjs)
appendFriendlyCommand(command)
appendExit()
def editFiles(lineObjs):
partialCommands = []
logger.addEvent('editing_num_files', len(lineObjs))
for lineObj in lineObjs:
(file, num) = (lineObj.getFile(), lineObj.getLineNum())
partialCommands.append(getEditFileCommand(file, num))
command = joinEditCommands(partialCommands)
appendIfInvalid(lineObjs)
appendToFile(command)
appendExit()
# Private helpers
def appendIfInvalid(lineObjs):
# lastly lets check validity and actually output an
# error if any files are invalid
invalidLines = [line for line in lineObjs if not line.isResolvable()]
if not invalidLines:
return
appendError(INVALID_FILE_WARNING)
if len([line for line in invalidLines if line.isGitAbbreviatedPath()]):
appendError(GIT_ABBREVIATION_WARNING)
appendToFile('read -p "%s" -r' % CONTINUE_WARNING)
def debug(*args):
for arg in args:
appendToFile('echo "DEBUG: ' + str(arg) + '"')
def outputSelection(lineObjs):
filePath = stateFiles.getSelectionFilePath()
indices = [l.index for l in lineObjs]
file = open(filePath, 'wb')
pickle.dump(indices, file)
file.close()
def getEditorAndPath():
editor_path = os.environ.get('FPP_EDITOR') or os.environ.get('VISUAL') or \
os.environ.get('EDITOR')
if editor_path:
editor = os.path.basename(editor_path)
logger.addEvent('using_editor_' + editor)
return editor, editor_path
return 'vim', 'vim'
def getEditFileCommand(filePath, lineNum):
editor, _editor_path = getEditorAndPath()
if editor == 'vim' and lineNum != 0:
return '\'%s\' +%d' % (filePath, lineNum)
elif editor in ['vi', 'nvim', 'nano', 'joe', 'emacs',
'emacsclient'] and lineNum != 0:
return '+%d \'%s\'' % (lineNum, filePath)
elif editor in ['subl', 'sublime', 'atom'] and lineNum != 0:
return '\'%s:%d\'' % (filePath, lineNum)
else:
return "'%s'" % filePath
def expandPath(filePath):
# expand ~/ paths
filePath = os.path.expanduser(filePath)
# and in case of grep, expand ./ as well
return os.path.abspath(filePath)
def joinEditCommands(partialCommands):
editor, editor_path = getEditorAndPath()
if editor in ['vim', 'mvim']:
return editor_path + ' -O ' + ' '.join(partialCommands)
# Assume that all other editors behave like emacs
return editor_path + ' ' + ' '.join(partialCommands)
def composeCdCommand(command, lineObjs):
filePath = os.path.expanduser(lineObjs[0].getDir())
filePath = os.path.abspath(filePath)
# now copy it into clipboard for cdp-ing
# TODO -- this is pretty specific to
# pcottles workflow
command = 'echo "' + filePath + '" > ~/.dircopy'
return command
def isCdCommand(command):
return command[0:3] in ['cd ', 'cd']
def composeCommand(command, lineObjs):
if isCdCommand(command):
return composeCdCommand(command, lineObjs)
else:
return composeFileCommand(command, lineObjs)
def composeFileCommand(command, lineObjs):
command = command.decode('utf-8')
files = ["'%s'" % lineObj.getFile() for lineObj in lineObjs]
file_str = ' '.join(files)
if '$F' in command:
command = command.replace('$F', file_str)
else:
command = command + ' ' + file_str
return command
def outputNothing():
appendToFile('echo "nothing to do!" && exit 1')
def clearFile():
writeToFile('')
def appendAliasExpansion():
# zsh by default expands aliases when running in interactive mode
# (see ../fpp). bash (on this author's Yosemite box) seems to have
# alias expansion off when run with -i present and -c absent,
# despite documentation hinting otherwise.
#
# so here we must ask bash to turn on alias expansion.
appendToFile("""
if type shopt > /dev/null; then
shopt -s expand_aliases
fi
""")
def appendFriendlyCommand(command):
header = 'echo "executing command:"\n' + \
'echo "' + command.replace('"', '\\"') + '"'
appendToFile(header)
appendToFile(command)
def appendError(text):
appendToFile('printf "%s%s%s\n"' % (RED_COLOR, text, NO_COLOR))
def appendToFile(command):
file = open(stateFiles.getScriptOutputFilePath(), 'a')
file.write(command + '\n')
file.close()
logger.output()
def appendExit():
appendToFile('exit $?;')
def writeToFile(command):
file = open(stateFiles.getScriptOutputFilePath(), 'w')
file.write(command + '\n')
file.close()
logger.output()
|
Python
| 0.000004
|
@@ -5608,11 +5608,8 @@
exit
- $?
;')%0A
|
c433c649a9a4b32095a170f75c7e4aae9382089b
|
use absolute imports
|
em_examples/__init__.py
|
em_examples/__init__.py
|
import Attenuation
import BiotSavart
import CondUtils
import DC_cylinder
import DCLayers
import DCsphere
import DCWidget
import DCWidgetPlate2_5D
import DCWidgetPlate_2D
import DCWidgetResLayer2_5D
import DCWidgetResLayer2D
import DipoleWidget1D
import DipoleWidgetFD
import DipoleWidgetTD
import EMcircuit
import FDEMDipolarfields
import FDEMPlanewave
import FreqtoTime
import HarmonicVMDCylWidget
import InductionLoop
import InductionSphereFEM
import Loop
import MT
import PlanewaveWidgetFD
import Reflection
import sphereElectrostatic_example
import TransientVMDCylWidget
import View
import VolumeWidget
import VolumeWidgetPlane
__version__ = '0.0.8'
__author__ = 'GeoScixyz developers'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017 GeoScixyz developers'
|
Python
| 0.000146
|
@@ -1,15 +1,16 @@
import
+.
Attenuat
@@ -20,16 +20,17 @@
%0Aimport
+.
BiotSava
@@ -39,16 +39,17 @@
%0Aimport
+.
CondUtil
@@ -57,16 +57,17 @@
%0Aimport
+.
DC_cylin
@@ -77,16 +77,17 @@
%0Aimport
+.
DCLayers
@@ -94,16 +94,17 @@
%0Aimport
+.
DCsphere
@@ -103,32 +103,33 @@
DCsphere%0Aimport
+.
DCWidget%0Aimport
@@ -120,32 +120,33 @@
DCWidget%0Aimport
+.
DCWidgetPlate2_5
@@ -146,32 +146,33 @@
late2_5D%0Aimport
+.
DCWidgetPlate_2D
@@ -171,32 +171,33 @@
Plate_2D%0Aimport
+.
DCWidgetResLayer
@@ -208,16 +208,17 @@
%0Aimport
+.
DCWidget
@@ -227,32 +227,33 @@
sLayer2D%0Aimport
+.
DipoleWidget1D%0Ai
@@ -250,32 +250,33 @@
Widget1D%0Aimport
+.
DipoleWidgetFD%0Ai
@@ -281,16 +281,17 @@
%0Aimport
+.
DipoleWi
@@ -304,16 +304,17 @@
%0Aimport
+.
EMcircui
@@ -318,24 +318,25 @@
cuit%0Aimport
+.
FDEMDipolarf
@@ -348,16 +348,17 @@
%0Aimport
+.
FDEMPlan
@@ -370,16 +370,17 @@
%0Aimport
+.
FreqtoTi
@@ -389,16 +389,17 @@
%0Aimport
+.
Harmonic
@@ -410,32 +410,33 @@
ylWidget%0Aimport
+.
InductionLoop%0Aim
@@ -440,16 +440,17 @@
%0Aimport
+.
Inductio
@@ -467,16 +467,17 @@
%0Aimport
+.
Loop%0Aimp
@@ -480,16 +480,17 @@
%0Aimport
+.
MT%0Aimpor
@@ -491,16 +491,17 @@
%0Aimport
+.
Planewav
@@ -517,16 +517,17 @@
%0Aimport
+.
Reflecti
@@ -536,16 +536,17 @@
%0Aimport
+.
sphereEl
@@ -572,16 +572,17 @@
%0Aimport
+.
Transien
@@ -602,16 +602,17 @@
%0Aimport
+.
View%0Aimp
@@ -607,32 +607,33 @@
rt .View%0Aimport
+.
VolumeWidget%0Aimp
@@ -636,16 +636,17 @@
%0Aimport
+.
VolumeWi
|
ae0f08294d52a5c32eccfc8b63b950bd2e336669
|
fix argparse error printing
|
email_actions/server.py
|
email_actions/server.py
|
import logging
import asyncio
import argparse
import socket
from aiosmtpd.handlers import Message
from aiosmtpd.controller import Controller
from functools import partial
from email_actions.constants import VERSION
from email_actions.filters import Filter
from email_actions.config import check_config
def bind(family, type, proto):
"""Create (or recreate) the actual socket object."""
sock = socket.socket(family, type, proto)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
# If listening on IPv6, activate dual-stack.
if family == socket.AF_INET6:
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, False)
return sock
class EAController(Controller):
def make_socket(self):
host = self.hostname
port = self.port
try:
# First try to determine the socket type.
info = socket.getaddrinfo(
host, port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
0,
socket.AI_PASSIVE,
)
except socket.gaierror:
# Infer the type from the host.
addr = host, port
if ':' in host:
addr += 0, 0
type_ = socket.AF_INET6
else:
type_ = socket.AF_INET
info_0 = type_, socket.SOCK_STREAM, 0, '', addr
info = info_0,
family, type, proto, canonname, addr = next(iter(info))
sock = bind(family, type, proto)
return sock
class MessageHandler(Message):
filter_obj = None
def __init__(self, message_class=None):
super().__init__(message_class)
self.filter_obj = Filter()
def handle_message(self, message):
logging.debug(message)
loop = asyncio.get_event_loop()
filter_action = partial(
self.filter_obj.filter, message['From'], message['To'],
message['Subject'], message.get_payload()
)
loop.run_in_executor(None, filter_action)
class EASMPTServer():
host = 'localhost'
port = 8025
def __init__(self, host, port):
self.host = host
self.port = port
logging.debug("Using host: %s and port %d for smtp server"
% (host, port))
@asyncio.coroutine
def serve(self, loop):
controller = EAController(MessageHandler(), hostname=self.host,
port=self.port)
controller.start()
@asyncio.coroutine
def stop(self):
Controller.stop()
def main():
parser = argparse.ArgumentParser(prog='email-actions')
parser.add_argument('-v', '--version', action='version',
version='%(prog)s version ' + VERSION)
parser.add_argument('-H', '--hostname', action='store',
help='Host IP or name to bind the server to',
default='localhost')
parser.add_argument('-p', '--port', type=int, action='store',
help='Port number to bind the server to',
default=8025)
parser.add_argument('-l', '--log', type=int, action='store',
help='Set log level. 0=> Warning, 1=>Info, 2=>Debug',
default=0)
req_args = parser.add_argument_group('required arguments')
req_args.add_argument('-c', '--config', required=True,
help='Specify config file (yaml format) to be used. '
'If it doesn\'t exist, we\'ll try to create it')
args = parser.parse_args()
if args.log >= 2:
log_level = logging.DEBUG
elif args.log == 1:
log_level = logging.INFO
else:
log_level = logging.WARNING
logging.basicConfig(level=log_level,
format='%(asctime)s: [EA] %(filename)s '
'- %(message)s')
cfg_status = check_config(args.config)
if not cfg_status:
exit(1)
server = EASMPTServer(args.hostname, args.port)
loop = asyncio.get_event_loop()
loop.create_task(server.serve(loop))
try:
logging.info("Starting server")
loop.run_forever()
except KeyboardInterrupt:
logging.info("Stopping server")
server.stop()
loop.stop()
if __name__ == "__main__":
main()
|
Python
| 0.000005
|
@@ -3301,16 +3301,25 @@
te it')%0A
+ try:%0A
args =
@@ -3338,16 +3338,38 @@
e_args()
+%0A except:%0A exit(1)
%0A%0A if a
|
7d9cec11479ce4fee821e5943fa1f1ef65938416
|
monitor default:2
|
src/gofer/agent/config.py
|
src/gofer/agent/config.py
|
#
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU Lesser General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (LGPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of LGPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/lgpl-2.0.txt.
#
# Jeff Ortel <jortel@redhat.com>
#
from gofer import NAME, Singleton
from gofer.config import Config, Graph
from gofer.config import REQUIRED, OPTIONAL, ANY, BOOL, NUMBER
#
# [main]
#
# monitor
# Plugin monitoring delay (seconds). (0=disabled).
#
# [logging]
# <module>
# Logging level
#
# [pam]
# service
# The default PAM service for authentication. Default:passwd
#
AGENT_SCHEMA = (
('main', REQUIRED,
(
('monitor', OPTIONAL, NUMBER),
)
),
('logging', REQUIRED,
[]
),
('pam', REQUIRED,
(
('service', OPTIONAL, ANY),
)
),
)
#
# [main]
#
# enabled
# Plugin enabled/disabled (0|1)
# name
# The (optional) plugin name. The basename of the descriptor is used when not specified.
# plugin
# The (optional) fully qualified module to be loaded from the PYTHON path.
# threads
# The (optional) number of threads for the RMI dispatcher.
# accept
# Accept forwarding from. A comma (,) separated list of plugin names (,=none|*=all).
# forward
# Forward to. A comma (,) separated list of plugin names (,=none|*=all).
#
# [messaging]
#
# uuid
# The (optional) agent identity. This value also specifies the queue name.
# url
# The (optional) broker connection URL.
# cacert
# The (optional) SSL CA certificate used to validate the server certificate.
# clientcert
# The (optional) SSL client certificate. PEM encoded and contains both key and certificate.
# host_validation
# The (optional) flag indicates SSL host validation should be performed.
# authenticator
# The (optional) fully qualified Authenticator to be loaded from the PYTHON path.
#
# [model]
#
# managed
# The (optional) level of broker model management. Default: 2.
# - 0 = none
# - 1 = declare and bind queue.
# - 2 = declare and bind queue; drain and delete queue on explicit detach.
# queue
# The (optional) AMQP queue name. This overrides the uuid.
# expiration
# The (optional) auto-deleted queue expiration (seconds).
# exchange
# The (optional) AMQP exchange.
#
#
PLUGIN_SCHEMA = (
('main', REQUIRED,
(
('enabled', REQUIRED, BOOL),
('name', OPTIONAL, ANY),
('plugin', OPTIONAL, ANY),
('threads', OPTIONAL, NUMBER),
('accept', OPTIONAL, ANY),
('forward', OPTIONAL, ANY),
)
),
('messaging', REQUIRED,
(
('url', OPTIONAL, ANY),
('uuid', OPTIONAL, ANY),
('cacert', OPTIONAL, ANY),
('clientcert', OPTIONAL, ANY),
('clientkey', OPTIONAL, ANY),
('host_validation', OPTIONAL, BOOL),
('authenticator', OPTIONAL, ANY),
)
),
('model', OPTIONAL,
(
('managed', OPTIONAL, '(0|1|2)'),
('queue', OPTIONAL, ANY),
('exchange', OPTIONAL, ANY),
('expiration', OPTIONAL, NUMBER)
)
),
)
AGENT_DEFAULTS = {
'main': {
'monitor': '0'
},
'logging': {
},
'pam': {
'service': 'passwd'
}
}
PLUGIN_DEFAULTS = {
'main': {
'enabled': '0',
'threads': '1',
'accept': ',',
'forward': ','
},
'model': {
'managed': '2'
}
}
class AgentConfig(Graph):
"""
The gofer agent configuration.
:cvar PATH: The absolute path to the config directory.
:type PATH: str
"""
__metaclass__ = Singleton
PATH = '/etc/%s/agent.conf' % NAME
def __init__(self, path=None):
"""
Read the configuration.
"""
conf = Config(AGENT_DEFAULTS, path or AgentConfig.PATH)
conf.validate(AGENT_SCHEMA)
Graph.__init__(self, conf)
|
Python
| 0.000001
|
@@ -3667,17 +3667,17 @@
itor': '
-0
+2
'%0A %7D,
|
19a78d29742f1f32e9e2502f066f16d0d2b5663c
|
Apply patch from PyMYSQL PR 592
|
tap_mysql/connection.py
|
tap_mysql/connection.py
|
#!/usr/bin/env python3
import backoff
import pymysql
from pymysql.constants import CLIENT
import singer
import ssl
LOGGER = singer.get_logger()
CONNECT_TIMEOUT_SECONDS = 30
READ_TIMEOUT_SECONDS = 3600
# We need to hold onto this for self-signed SSL
match_hostname = ssl.match_hostname
@backoff.on_exception(backoff.expo,
(pymysql.err.OperationalError),
max_tries=5,
factor=2)
def connect_with_backoff(connection):
connection.connect()
warnings = []
with connection.cursor() as cur:
try:
cur.execute('SET @@session.time_zone="+0:00"')
except pymysql.err.InternalError as e:
warnings.append('Could not set session.time_zone. Error: ({}) {}'.format(*e.args))
try:
cur.execute('SET @@session.wait_timeout=2700')
except pymysql.err.InternalError as e:
warnings.append('Could not set session.wait_timeout. Error: ({}) {}'.format(*e.args))
try:
cur.execute("SET @@session.net_read_timeout={}".format(READ_TIMEOUT_SECONDS))
except pymysql.err.InternalError as e:
warnings.append('Could not set session.net_read_timeout. Error: ({}) {}'.format(*e.args))
try:
cur.execute('SET @@session.innodb_lock_wait_timeout=2700')
except pymysql.err.InternalError as e:
warnings.append(
'Could not set session.innodb_lock_wait_timeout. Error: ({}) {}'.format(*e.args)
)
if warnings:
LOGGER.info(("Encountered non-fatal errors when configuring MySQL session that could "
"impact performance:"))
for w in warnings:
LOGGER.warning(w)
return connection
def parse_internal_hostname(hostname):
# special handling for google cloud
if ":" in hostname:
parts = hostname.split(":")
if len(parts) == 3:
return parts[0] + ":" + parts[2]
return parts[0] + ":" + parts[1]
return hostname
class MySQLConnection(pymysql.connections.Connection):
def __init__(self, config):
# Google Cloud's SSL involves a self-signed certificate. This certificate's
# hostname matches the form {instance}:{box}. The hostname displayed in the
# Google Cloud UI is of the form {instance}:{region}:{box} which
# necessitates the "parse_internal_hostname" function to get the correct
# hostname to match.
# The "internal_hostname" config variable allows for matching the SSL
# against a host that doesn't match the host we are connecting to. In the
# case of Google Cloud, we will be connecting to an IP, not the hostname
# the SSL certificate expects.
# The "ssl.match_hostname" function is patched to check against the
# internal hostname rather than the host of the connection. In the event
# that the connection fails, the patch is reverted by reassigning the
# patched out method to it's original spot.
args = {
"user": config["user"],
"password": config["password"],
"host": config["host"],
"port": int(config["port"]),
"cursorclass": config.get("cursorclass") or pymysql.cursors.SSCursor,
"connect_timeout": CONNECT_TIMEOUT_SECONDS,
"read_timeout": READ_TIMEOUT_SECONDS,
"charset": "utf8",
}
ssl_arg = None
if config.get("database"):
args["database"] = config["database"]
# Attempt self-signed SSL if config vars are present
use_self_signed_ssl = config.get("ssl_ca")
if use_self_signed_ssl:
LOGGER.info("Using custom certificate authority")
# Config values MUST be paths to files for the SSL module to read them correctly.
ssl_arg = {
"ca": config["ssl_ca"],
"check_hostname": config.get("check_hostname", "true") == "true"
}
# If using client authentication, cert and key are required
if config.get("ssl_cert") and config.get("ssl_key"):
ssl_arg["cert"] = config["ssl_cert"]
ssl_arg["key"] = config["ssl_key"]
# override match hostname for google cloud
if config.get("internal_hostname"):
parsed_hostname = parse_internal_hostname(config["internal_hostname"])
ssl.match_hostname = lambda cert, hostname: match_hostname(cert, parsed_hostname)
super().__init__(defer_connect=True, ssl=ssl_arg, **args)
# Configure SSL without custom CA
# Manually create context to override default behavior of
# CERT_NONE without a CA supplied
if config.get("ssl") == 'true' and not use_self_signed_ssl:
LOGGER.info("Attempting SSL connection")
# For compatibility with previous version, verify mode is off by default
verify_mode = config.get("verify_mode", "false") == 'true'
if not verify_mode:
LOGGER.warn("Not verifying server certificate. The connection is encrypted, but the server hasn't been verified. Please provide a root CA certificate to enable verification.")
self.ssl = True
self.ctx = ssl.create_default_context()
check_hostname = config.get("check_hostname", "false") == 'true'
self.ctx.check_hostname = check_hostname
self.ctx.verify_mode = ssl.CERT_REQUIRED if verify_mode else ssl.CERT_NONE
self.client_flag |= CLIENT.SSL
def __enter__(self):
return self
def __exit__(self, *exc_info):
del exc_info
self.close()
def make_connection_wrapper(config):
class ConnectionWrapper(MySQLConnection):
def __init__(self, *args, **kwargs):
config["cursorclass"] = kwargs.get('cursorclass')
super().__init__(config)
connect_with_backoff(self)
return ConnectionWrapper
|
Python
| 0
|
@@ -285,16 +285,726 @@
stname%0A%0A
+# MySQL 8.0 Patch:%0A# Workaround to support MySQL 8.0 without upgrading the PyMySQL version%0A# since there are breaking changes between these versions, this should suffice to allow%0A# new character sets to be used with MySQL 8.0 instances.%0A# FIXME: Remove when PyMYSQL upgrade behavior has been evaluated.%0A# Patch Originally Found Here: https://github.com/PyMySQL/PyMySQL/pull/592%0Aoriginal_charset_by_id = pymysql.charset.charset_by_id%0Adef charset_wrapper(*args, **kwargs):%0A unknown_charset = pymysql.charset.Charset(None, None, None, None)%0A try:%0A return original_charset_by_id(*args, **kwargs)%0A except KeyError:%0A return unknown_charset%0Apymysql.connections.charset_by_id = charset_wrapper%0A%0A
@backoff
|
13af44ee804508afc85711d2f0a0f4c9a09b131e
|
add logging to rpc connection
|
src/payout.py
|
src/payout.py
|
import json
import socket
import time
from httplib import CannotSendRequest
from threading import Timer
from bitcoinrpc.authproxy import JSONRPCException
from src import database
from src.utils import get_rpc
__author__ = 'sammoth'
def pay(app, log):
"""
Pay all users who have a balance greater than the minimum payout
:param log:
:param rpc:
:param app:
:return:
"""
log.info('payout started')
# get the credit details from the database
conn = database.get_db(app)
db = conn.cursor()
db.execute("SELECT c.id,c.key,c.reward,u.address FROM credits AS c INNER JOIN "
"users AS u on u.key=c.key WHERE c.paid=0")
rewards = db.fetchall()
# Calculate the total credit for each unique address
user_rewards = {}
for reward in rewards:
if reward[3] not in user_rewards:
user_rewards[reward[3]] = 0.00
user_rewards[reward[3]] += float(reward[2])
# remove those which don't meet the minimum payout threshold
# and round to 6dp
user_payouts = user_rewards.copy()
for address in user_rewards:
if user_rewards[address] < float(app.config['pool.minimum_payout']):
del(user_payouts[address])
continue
user_payouts[address] = round(float(user_payouts[address]), 6)
if not user_payouts:
log.info('no-one to payout to: %s', user_rewards)
timer_time = 86400.0
else:
# SendMany from nud. Report any error to log output
try:
# get an rpc connection
rpc = get_rpc(app)
rpc.sendmany("", user_payouts)
log.info('payout successful: \'%s\'', json.dumps(user_payouts))
# mark credits to paid addresses as paid
for reward in rewards:
if reward[3] in user_payouts:
db.execute('UPDATE credits SET paid=1 WHERE id=%s', (reward[0],))
# set the timer for the next payout
timer_time = 86400.0
except JSONRPCException as e:
log.error('Payout failed - %s: \'%s\'', e.message, json.dumps(user_payouts))
timer_time = 120.0
except (socket.error, CannotSendRequest, ValueError):
log.error('Payout failed - no connection with nud: \'%s\'', json.dumps(
user_payouts))
timer_time = 120.0
# reset timer
payout_timer = Timer(timer_time, pay,
kwargs={'app': app, 'log': log})
payout_timer.name = 'payout_timer'
payout_timer.daemon = True
payout_timer.start()
# update the next payout time
db.execute('UPDATE info SET value=%s WHERE key=%s', (int(time.time() + timer_time),
'next_payout_time'))
conn.commit()
conn.close()
|
Python
| 0
|
@@ -1565,24 +1565,29 @@
get_rpc(app
+, log
)%0A
|
881c745646f3f638527d07bd1cfab9a443950f23
|
add get_one method
|
tempoiq/protocol/row.py
|
tempoiq/protocol/row.py
|
from tempoiq.temporal.validate import convert_iso_stamp
from query.selection import AndClause, Compound, OrClause, ScalarSelector
class Row(object):
"""Data from one or more sensors at a single timestamp. Returned when
reading sensor data.
Example values dict of a row with a single sensor, *temperature*\ , on a
single device, *test1*\ ::
{'test1': {'temperature': 500.0} }
:var timestamp: DateTime of the sensor data
:var values: dict mapping device key to a dict of sensor keys to values
"""
def __init__(self, row_json):
self.timestamp = convert_iso_stamp(row_json['t'])
self.values = row_json['data']
def __getitem__(self, key):
return self.values[key]
def __iter__(self):
for device in self.values:
for sensor in self.values[device]:
yield ((device, sensor), self.values[device][sensor])
class StreamInfo(object):
def __init__(self, headers):
self.headers = headers
def filter(self, selection):
evaluator = SelectionEvaluator(selection)
return evaluator.filter(self.headers)
class SelectionEvaluator(object):
def __init__(self, selection):
self.selection = selection
def filter(self, headers):
for header in headers:
result = self._evaluate_selector(self.selection.selection, header)
if result:
yield header
def _evaluate_compound_clause(self, clause, header):
if isinstance(clause, AndClause):
return self._evaluate_and_clause(clause, header)
elif isinstance(clause, OrClause):
return self._evaluate_or_clause(clause, header)
else:
raise ValueError('Invalid compound clause in selection')
def _evaluate_and_clause(self, clause, header):
for selector in clause.selectors:
matches = self._evaluate_selector(selector, header)
if not matches:
return False
return True
def _evaluate_or_clause(self, clause, header):
all_matches = []
for selector in clause.selectors:
matches = self._evaluate_selector(selector, header)
all_matches.append(matches)
if any(all_matches):
return True
return False
def _evaluate_selector_on_object(self, selector, header, object_type):
if selector.key == 'key':
return selector.value == header[object_type]['key']
elif selector.key == 'name':
return selector.value == header[object_type]['name']
else:
key = selector.value.keys()[0]
header_value = header[object_type]['attributes'].get(key)
return selector.value[key] == header_value
def _evaluate_device_selector(self, selector, header):
return self._evaluate_selector_on_object(selector, header, 'device')
def _evaluate_sensor_selector(self, selector, header):
return self._evaluate_selector_on_object(selector, header, 'sensor')
def _evaluate_scalar_selector(self, selector, header):
if selector.selection_type == 'devices':
return self._evaluate_device_selector(selector, header)
elif selector.selection_type == 'sensors':
return self._evaluate_sensor_selector(selector, header)
else:
raise ValueError('Invalid selection type in selection')
def _evaluate_selector(self, selector, header):
if isinstance(selector, Compound):
return self._evaluate_compound_clause(selector, header)
elif isinstance(selector, ScalarSelector):
return self._evaluate_scalar_selector(selector, header)
else:
raise ValueError('Invalid selector in selection')
|
Python
| 0.000002
|
@@ -1125,16 +1125,409 @@
aders)%0A%0A
+ def get_one(self, selection):%0A evaluator = SelectionEvaluator(selection)%0A results = %5Br for r in evaluator.filter(self.headers)%5D%0A if len(results) %3C 1:%0A raise ValueError('Selection would return no results')%0A elif len(results) %3E 1:%0A raise ValueError('Selection would return more than one result')%0A else:%0A return results%5B0%5D%0A%0A
%0Aclass S
|
2b8b0ea4c87f550dea60ad9dc769c822f181e397
|
raise error on unknown top level element
|
src/pyrate.py
|
src/pyrate.py
|
#!/usr/bin/env python3
#
# Copyright (C) 2015 BMW Car IT GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import sys
import datetime
import argparse
import yaml
from exception import ParseException
from model.common import needs_token
from model.teststep import TestStep
from output.terminal import *
from util import duration
__version__ = "0.1"
KEY_TESTCASE = 'testcase'
def create_step(yaml_tree, name, shared_steps):
if type(yaml_tree) is str:
# the step refers to a shared step
if yaml_tree not in shared_steps:
raise ParseException(
"%s '%s': undefined reference to %s '%s'" %
(KEY_TESTCASE, name, TestStep.KEY, yaml_tree))
return shared_steps[yaml_tree]
elif type(yaml_tree) is dict:
for key, value in yaml_tree.items():
if key != TestStep.KEY:
raise ParseException("%s '%s': unexpected token '%s'" %
(KEY_TESTCASE, name, key))
return TestStep(value)
else:
raise ParseException("%s '%s': unexpected type %s" %
(KEY_TESTCASE, name, type(yaml_tree)))
class TestCase:
KEY = 'testcase'
KEY_NAME = 'name'
KEY_STEPS = 'steps'
KEY_FATAL = 'fatal'
def __init__(self, yaml_tree, shared_steps):
self.name = None
self.steps = None
self.fatal = False
self.failed = False
for key, value in yaml_tree.items():
if key == self.KEY_NAME:
self.name = value
elif key == self.KEY_STEPS:
self.steps = []
for yamlStep in value:
self.steps.append(create_step(yamlStep, self.name,
shared_steps))
elif key == self.KEY_FATAL:
if type(value) is not bool:
raise ParseException("%s '%s': error parsing %s (%s) : "
"must be a bool" %
(self.KEY,
self.name,
self.KEY_FATAL,
value))
self.fatal = value
else:
raise ParseException("%s (%s): Unknown token '%s'" % (
self.KEY, self.name, key))
needs_token(self.name, self.KEY, self.KEY_NAME, self.name)
needs_token(self.steps, self.KEY, self.KEY_STEPS, self.name)
def run(self, summary):
start = datetime.datetime.now()
print("%s %s" % (STATUS_SEP, self.name))
summary.start_test_case()
for step in self.steps:
# run returns false if a fatal test step failed
if not step.run(self, summary):
break
print("%s %s : %d tests (%d ms total)\n" %
(STATUS_SEP, self.name, len(self.steps), duration(start)))
# check if fatal and at least one failure
failed = [step for step in self.steps if step.failed]
return not (len(failed) > 0 and self.fatal)
class TestSummary:
def __init__(self):
self.failedTestCases = 0
self.failedTestSteps = 0
self.testCaseSummaries = []
self.testCasesRun = 0
self.testStepsRun = 0
def start_test_case(self):
self.testCasesRun += 1
def start_test_step(self):
self.testStepsRun += 1
def main():
parser = argparse.ArgumentParser()
parser.add_argument("file", help="the test specification")
parser.add_argument("-d", "--dry",
help="dry run (only parse the test specification)",
action="store_true")
args = parser.parse_args()
start = datetime.datetime.now()
testspec = yaml.safe_load(open(args.file, "r"))
steps = {}
cases = []
try:
# first find all shared test steps
for item in testspec:
for key, value in item.items():
if key == TestStep.KEY:
new_step = TestStep(value)
steps[new_step.name] = new_step
# now we can parse all test cases
for item in testspec:
for key, value in item.items():
if key == KEY_TESTCASE:
new_case = TestCase(value, steps)
cases.append(new_case)
except ParseException as e:
print("Parse error: %s" % e, file=sys.stderr)
sys.exit(1)
# skip remaining stuff when doing a dry run
if args.dry:
print("Parsing the test specification succeeded. "
"Skip tests due to dry run.")
sys.exit(0)
summary = TestSummary()
for testcase in cases:
if not testcase.run(summary):
# break on fatal failure
break
print(STATUS_SEP)
print("%s %d tests from %d test cases run. (%d ms total" % (
STATUS_END, summary.testStepsRun,
summary.testCasesRun, duration(start)))
if summary.failedTestCases > 0:
print("failed")
else:
print(STATUS_PASSED)
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -4958,16 +4958,177 @@
w_case)%0A
+ elif key == TestStep.KEY:%0A pass%0A else:%0A raise ParseException(%22unexpected token '%25s'%22 %25 key)%0A
exce
|
3c2887d5589cf634faf9b4466b4fb3711581a2b6
|
rename cache to shops
|
src/webapp.py
|
src/webapp.py
|
#!/usr/bin/env python
import os
import json
import uuid
import tornado.ioloop
import tornado.web
import tornado.options
tornado.options.define('cookie_secret', default='sssecccc', help='Change this to a real secret')
tornado.options.define('favicon', default='static/favicon.ico', help='Path to favicon.ico')
tornado.options.define('static_path', default='static/', help='Path static items')
tornado.options.define('port', default=8888, help='Port to run webservice')
tornado.options.define('config', default='server.conf', help='Config file location')
JSON_CONTENT = 'application/vnd.api+json'
cache = {}
class ProductHandler(tornado.web.RequestHandler):
def get(self, shopid):
if shopid not in cache:
inventory = {'pizzas':
[ {'id': 1, 'name': 'Pizza lagano', 'price': 45},
{'id': 2, 'name': 'Pizza vegan', 'price': 50},
{'id': 3, 'name': 'Pizza %s' % shopid, 'price': 55},
],
'toppings': [
{'id': 1, 'name': 'garlic', 'price': 1},
{'id': 2, 'name': 'extra cheese', 'price': 5},
{'id': 3, 'name': 'pepperoni', 'price': 2}
],
'sizes': [
{'id': 28, 'name': '28 cm', 'price': -5},
{'id': 32, 'name': '32 cm', 'price': 0},
{'id': 36, 'name': '36 cm', 'price': 5}
]
}
cache[shopid] = json.dumps(inventory)
self.set_header('Content-Type', JSON_CONTENT)
if not self.get_cookie('user'):
self.set_cookie('user', str(uuid.uuid1()))
self.write(cache[shopid])
def post(self, shopid):
if shopid not in cache:
raise tornado.web.HTTPError(404)
if self._check_header('Content-Type') and self._check_header('Accept'):
self.set_header('Content-Type', JSON_CONTENT)
try:
price = self.validate_content(shopid)
if price > 0:
self.write(str(price))
else:
raise tornado.web.HTTPError(400)
except ValueError:
raise tornado.web.HTTPError(400)
else:
raise tornado.web.HTTPError(406)
def _validate_content(self, shopid):
content = json.loads(self.request.body)
try:
inventory = json.loads(cache[shopid])
pizzas = dict([(x['id'], x) for x in inventory['pizzas']])
sizes = dict([(x['id'], x) for x in inventory['sizes']])
toppings = dict([(x['id'], x) for x in inventory['toppings']])
if not isinstance(content, list):
return -1
price = 0
for piece in content:
if not isinstance(piece, dict):
return -1
if piece['id'] not in pizzas or piece['size'] not in sizes:
return -1
price += pizzas[piece['id']]['price']
price += sizes[piece['size']]['price']
if 'toppings' in piece:
for t in piece['toppings']:
price += toppings[t['id']]['price']
return price
except Exception:
return -1
def _check_header(self, key, value=None):
return key in self.request.headers and self.request.headers.get(key).lower() == (value or JSON_CONTENT).lower()
def main():
tornado.options.parse_command_line()
options = tornado.options.options
if os.path.exists(options.config):
tornado.options.parse_config_file(options.config)
handlers = [
(r'/api/products/([^/]+)/', ProductHandler)
]
settings = {
'static_path': os.path.join(os.path.dirname(__file__), '..', options.static_path),
'cookie_secret': options.cookie_secret,
'login_url': '/login',
'xsrf_cookies': False,
'autoreload': True
}
application = tornado.web.Application(handlers, **settings)
application.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
Python
| 0.000002
|
@@ -592,21 +592,21 @@
+json'%0A%0A
-cache
+shops
= %7B%7D%0A%0Ac
@@ -698,37 +698,37 @@
f shopid not in
-cache
+shops
:%0A in
@@ -1530,21 +1530,21 @@
-cache
+shops
%5Bshopid%5D
@@ -1736,21 +1736,21 @@
f.write(
-cache
+shops
%5Bshopid%5D
@@ -1805,21 +1805,21 @@
not in
-cache
+shops
:%0A
@@ -2041,16 +2041,17 @@
= self.
+_
validate
@@ -2496,13 +2496,13 @@
ads(
-cache
+shops
%5Bsho
|
e433bf1c6b8777921cdec4f84f7fc875a8f4677b
|
Fix DataProxy.partial()
|
umongo/data_proxy.py
|
umongo/data_proxy.py
|
from marshmallow import ValidationError, missing
from .abstract import BaseDataObject
from .exceptions import FieldNotLoadedError
__all__ = ('DataProxy', 'missing')
class DataProxy:
__slots__ = ('not_loaded_fields', '_schema', '_fields', '_data',
'_modified_data', '_fields_from_mongo_key')
def __init__(self, schema, data=None):
self.not_loaded_fields = ()
self._schema = schema
self._fields = schema.fields
self._data = {}
self._modified_data = set()
fields_from_mongo_key = {}
for k, v in self._fields.items():
if v.attribute:
k = v.attribute
fields_from_mongo_key[k] = v
self._fields_from_mongo_key = fields_from_mongo_key
self.load(data if data else {})
@property
def partial(self):
return bool(self.not_loaded_fields)
def to_mongo(self, update=False):
if update:
return self._to_mongo_update()
else:
return self._to_mongo()
def _to_mongo(self):
mongo_data = {}
for k, v in self._data.items():
field = self._fields_from_mongo_key[k]
v = field.serialize_to_mongo(v)
if v is not missing:
mongo_data[k] = v
return mongo_data
def _to_mongo_update(self):
mongo_data = {}
set_data = {}
unset_data = []
for name, field in self._fields.items():
name = field.attribute or name
v = self._data[name]
if name in self._modified_data or (
isinstance(v, BaseDataObject) and v.is_modified()):
v = field.serialize_to_mongo(v)
if v is missing:
unset_data.append(name)
else:
set_data[name] = v
if set_data:
mongo_data['$set'] = set_data
if unset_data:
mongo_data['$unset'] = {k: "" for k in unset_data}
return mongo_data or None
def from_mongo(self, data, partial=False):
self._data = {}
for k, v in data.items():
field = self._fields_from_mongo_key[k]
self._data[k] = field.deserialize_from_mongo(v)
if partial:
self._collect_partial_fields(data.keys(), as_mongo_fields=True)
self._add_missing_fields()
self.clear_modified()
def dump(self, schema=None):
schema = schema or self._schema
data, err = schema.dump(self._data)
if err:
raise ValidationError(err)
return data
def _mark_as_modified(self, key):
self._modified_data.add(key)
def update(self, data, schema=None):
schema = schema or self._schema
# Always use marshmallow partial load to skip required checks
loaded_data, err = schema.load(data, partial=True)
if err:
raise ValidationError(err)
self._data.update(loaded_data)
for key in loaded_data:
self._mark_as_modified(key)
def load(self, data, partial=False, schema=None):
schema = schema or self._schema
# Always use marshmallow partial load to skip required checks
loaded_data, err = schema.load(data, partial=True)
if err:
raise ValidationError(err)
self._data = loaded_data
if partial:
self._collect_partial_fields(data)
self._add_missing_fields()
self.clear_modified()
def get_by_mongo_name(self, name):
value = self._data[name]
if self._fields_from_mongo_key[name] in self.not_loaded_fields:
raise FieldNotLoadedError(name)
return value
def set_by_mongo_name(self, name, value):
self._data[name] = value
if self._fields_from_mongo_key[name] in self.not_loaded_fields:
raise FieldNotLoadedError(name)
self._mark_as_modified(name)
def delete_by_mongo_name(self, name):
self.set_by_mongo_name(name, missing)
def _get_field(self, name, to_raise):
if name not in self._fields:
raise to_raise(name)
field = self._fields[name]
if field in self.not_loaded_fields:
raise FieldNotLoadedError(name)
name = field.attribute or name
return name, field
def get(self, name, to_raise=KeyError):
name, field = self._get_field(name, to_raise)
value = self._data[name]
if value is missing and field.default is not missing:
return field.default
return value
def set(self, name, value, to_raise=KeyError):
name, field = self._get_field(name, to_raise)
value = field._deserialize(value, name, None)
field._validate(value)
self._data[name] = value
self._mark_as_modified(name)
def delete(self, name, to_raise=KeyError):
name, _ = self._get_field(name, to_raise)
self._data[name] = missing
self._mark_as_modified(name)
def __repr__(self):
return "<DataProxy(%s)>" % self._data
def __eq__(self, other):
if isinstance(other, dict):
return self._data == other
else:
return self._data == other._data
def get_modified_fields_by_mongo_name(self):
return self._modified_data
def get_modified_fields(self):
modified = []
for name, field in self._fields.items():
value_name = field.attribute or name
if value_name in self._modified_data:
modified.append(name)
return modified
def clear_modified(self):
self._modified_data.clear()
for v in self._data.values():
if isinstance(v, BaseDataObject):
v.clear_modified()
def is_modified(self):
return (bool(self._modified_data) or
any(isinstance(v, BaseDataObject) and v.is_modified()
for v in self._data.values()))
def _collect_partial_fields(self, loaded_fields, as_mongo_fields=False):
if as_mongo_fields:
self.not_loaded_fields = set(
self._fields_from_mongo_key[k]
for k in self._fields_from_mongo_key.keys() - set(loaded_fields))
else:
self.not_loaded_fields = set(
self._fields[k] for k in self._fields.keys() - set(loaded_fields))
def _add_missing_fields(self):
# TODO: we should be able to do that by configuring marshmallow...
for name, field in self._fields.items():
mongo_name = field.attribute or name
if mongo_name not in self._data:
if callable(field.missing):
self._data[mongo_name] = field.missing()
else:
self._data[mongo_name] = field.missing
|
Python
| 0.000001
|
@@ -2333,24 +2333,78 @@
ields=True)%0A
+ else:%0A self.not_loaded_fields = ()%0A
self
@@ -3477,24 +3477,78 @@
ields(data)%0A
+ else:%0A self.not_loaded_fields = ()%0A
self
|
c55e9136ee9c86dcd4088ba416043dbff7e65eac
|
Fix Fast.com autoupdate (#57552)
|
homeassistant/components/fastdotcom/__init__.py
|
homeassistant/components/fastdotcom/__init__.py
|
"""Support for testing internet speed via Fast.com."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from fastdotcom import fast_com
import voluptuous as vol
from homeassistant.const import CONF_SCAN_INTERVAL
from homeassistant.core import HomeAssistant, ServiceCall
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType
DOMAIN = "fastdotcom"
DATA_UPDATED = f"{DOMAIN}_data_updated"
_LOGGER = logging.getLogger(__name__)
CONF_MANUAL = "manual"
DEFAULT_INTERVAL = timedelta(hours=1)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL): vol.All(
cv.time_period, cv.positive_timedelta
),
vol.Optional(CONF_MANUAL, default=False): cv.boolean,
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Fast.com component."""
conf = config[DOMAIN]
data = hass.data[DOMAIN] = SpeedtestData(hass)
if not conf[CONF_MANUAL]:
async_track_time_interval(hass, data.update, conf[CONF_SCAN_INTERVAL])
def update(service_call: ServiceCall | None = None) -> None:
"""Service call to manually update the data."""
data.update()
hass.services.async_register(DOMAIN, "speedtest", update)
hass.async_create_task(async_load_platform(hass, "sensor", DOMAIN, {}, config))
return True
class SpeedtestData:
"""Get the latest data from fast.com."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the data object."""
self.data: dict[str, Any] | None = None
self._hass = hass
def update(self) -> None:
"""Get the latest data from fast.com."""
_LOGGER.debug("Executing fast.com speedtest")
self.data = {"download": fast_com()}
dispatcher_send(self._hass, DATA_UPDATED)
|
Python
| 0
|
@@ -104,16 +104,26 @@
e import
+ datetime,
timedel
@@ -2030,16 +2030,45 @@
ate(self
+, now: datetime %7C None = None
) -%3E Non
|
2c8151bb9a4300f4f7c1314aa80c3f612998c58b
|
Fix bad attribute in feeder handler.
|
src/main/resources/vertigo/_feeder.py
|
src/main/resources/vertigo/_feeder.py
|
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import org.vertx.java.core.Handler
import org.vertx.java.core.json.JsonObject
from core.javautils import map_from_java, map_to_java
from ._component import Component
class Feeder(Component):
"""A data feeder."""
type = 'feeder'
RETRY_UNLIMITED = -1
_start_handler = None
_ack_handler = None
def __init__(self, feeder):
super(Feeder, self).__init__(feeder)
self._feeder = feeder
def set_feed_queue_max_size(self, queue_size):
"""The maximum number of messages processing at any given time."""
self._feeder.setFeedQueueMaxSize(queue_size)
def get_feed_queue_max_size(self):
"""The maximum number of messages processing at any given time."""
return self._feeder.getFeedQueueMaxSize()
max_queue_size = property(get_feed_queue_max_size, set_feed_queue_max_size)
def set_auto_retry(self, retry):
"""Indicates whether to automatically retry sending failed messages."""
self._feeder.setAutoRetry(retry)
def is_auto_retry(self):
"""Indicates whether to automatically retry sending failed messages."""
return self._feeder.isAutoRetry()
auto_retry = property(is_auto_retry, set_auto_retry)
def set_auto_retry_attempts(self, attempts):
"""Indicates how many times to retry sending failed messages."""
self._feeder.setAutoRetryAttempts(attempts)
def get_auto_retry_attempts(self):
"""Indicates how many times to retry sending failed messages."""
return self._feeder.getAutoRetryAttempts()
auto_retry_attempts = property(get_auto_retry_attempts, set_auto_retry_attempts)
def set_feed_interval(self, interval):
"""Indicates the interval at which to poll for new messages."""
self._executor.setFeedInterval(interval)
return self
def get_feed_interval(self):
"""Indicates the interval at which to poll for new messages."""
return self._executor.getFeedInterval()
feed_interval = property(get_feed_interval, set_feed_interval)
def feed_queue_full(self):
"""Indicates whether the feeder queue is full."""
return self._feeder.feedQueueFull()
def ack_handler(self, handler):
"""Sets a default ack handler on the feeder.
Keyword arguments:
@param handler: A default ack handler to be used when no other ack handler
is present.
@return: The added handler
"""
self._ack_handler = handler
return handler
def feed_handler(self, handler):
"""Sets a feed handler on the feeder.
Keyword arguments:
@param handler: A handler to be called with the feeder as its only argument.
@return: The feeder instance.
"""
self._feeder.feedHandler(_FeedHandler(handler, self))
return self
def drain_handler(self, handler):
"""Sets a drain handler on the feeder.
Keyword arguments:
@param handler: A handler to be called when the feeder is prepared to
accept new message.
@return: self
"""
self._feeder.drainHandler(_VoidHandler(handler))
return self
def _convert_data(self, data):
return org.vertx.java.core.json.JsonObject(map_to_java(data))
def emit(self, body, stream=None, handler=None):
"""Emits a message from the feeder.
Keyword arguments:
@param body: A dictionary of data to emit.
@param stream: An optional stream to which to emit the data. If no stream
is provided then the data will be emitted to the default stream.
@param handler: An optional asynchronous handler to be called once the
message has been fully processed. Feeders implement a special type of
ack handler. Whether the message is successfully processed or fails,
the second argument to the ack handler will always be the unique message
correlation identifier.
@return: The unique emitted message correlation identifier.
"""
if handler is None and self._ack_handler is not None:
handler = self._ack_handler
if stream is not None:
if handler is not None:
return self._feeder.emit(stream, self._convert_data(body), _AckHandler(handler)).correlationId()
else:
return self._feeder.emit(stream, self._convert_data(body)).correlationId()
else:
if handler is not None:
return self._feeder.emit(self._convert_data(body), _AckHandler(handler)).correlationId()
else:
return self._feeder.emit(self._convert_data(body)).correlationId()
class _FeedHandler(org.vertx.java.core.Handler):
"""A feed handler."""
def __init__(self, handler, feeder):
self._handler = handler
self._feeder = feeder
def handle(self, feeder):
self._handler(self.feeder)
class _AckHandler(org.vertx.java.core.AsyncResultHandler):
"""An ack handler."""
def __init__(self, handler):
self._handler = handler
def handle(self, result):
self._handler(result.cause(), result.result().correlationId())
class _VoidHandler(org.vertx.java.core.Handler):
"""A void handler."""
def __init__(self, handler):
self.handler = handler
def handle(self, void):
self.handler()
|
Python
| 0
|
@@ -5546,16 +5546,17 @@
er(self.
+_
feeder)%0A
|
d66995e313e99bff213cdc4eabe42d9f46e3599a
|
Add docstring
|
src/organizations/backends/modeled.py
|
src/organizations/backends/modeled.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2020, Ben Lopatin and contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer. Redistributions in binary
# form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with
# the distribution
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Invitations that use an invitation model
"""
import email.utils
from typing import List # noqa
from typing import Optional # noqa
from typing import Text # noqa
from typing import Tuple # noqa
from django.conf import settings
from django.contrib.auth.models import AbstractUser # noqa
from django.core.mail import EmailMessage
from django.http import HttpRequest # noqa
from django.http import HttpResponse # noqa
from django.http import HttpResponseForbidden
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.shortcuts import render
from django.template import loader
from django.urls import path
from django.utils.translation import gettext_lazy as _
from organizations.backends.defaults import InvitationBackend
from organizations.backends.forms import UserRegistrationForm
from organizations.base import AbstractBaseOrganization # noqa
from organizations.base import OrganizationInvitationBase # noqa
class ModelInvitation(InvitationBackend):
"""
"""
notification_subject = "organizations/email/notification_subject.txt"
notification_body = "organizations/email/notification_body.html"
invitation_subject = "organizations/email/modeled_invitation_subject.txt"
invitation_body = "organizations/email/modeled_invitation_body.html"
reminder_subject = "organizations/email/modeled_reminder_subject.txt"
reminder_body = "organizations/email/modeled_reminder_body.html"
invitation_join_template = "organizations/invitation_join.html"
form_class = UserRegistrationForm
def __init__(self, org_model=None, namespace=None):
super().__init__(org_model=org_model, namespace=namespace)
self.invitation_model = (
self.org_model.invitation_model
) # type: OrganizationInvitationBase
def get_invitation_queryset(self):
"""Return this to use a custom queryset that checks for expiration, for example"""
return self.invitation_model.objects.all()
def get_invitation_accepted_url(self):
"""Returns the redirect URL after user accepts invitation"""
return "/"
def get_invitation_accepted_registered_url(self):
"""Returns the redirect URL after new user accepts invitation"""
return self.get_invitation_accepted_url()
def activation_router(self, request, guid):
""""""
invitation = get_object_or_404(self.get_invitation_queryset(), guid=guid)
if invitation.invitee:
return redirect(self.get_invitation_accepted_url())
if request.user.is_authenticated:
return self.activate_existing_user_view(request, invitation)
else:
return self.activate_new_user_view(request, invitation)
def activate_existing_user_view(self, request, invitation):
# type: (HttpRequest, OrganizationInvitationBase) -> HttpResponse
""""""
if request.user == invitation.invited_by:
return HttpResponseForbidden(_("This is not your invitation"))
if request.method == "POST":
invitation.activate(request.user)
return redirect(self.get_invitation_accepted_url())
return render(
request, self.invitation_join_template, {"invitation": invitation}
)
def activate_new_user_view(self, request, invitation):
# type: (HttpRequest, OrganizationInvitationBase) -> HttpResponse
""""""
form = self.get_form(data=request.POST or None)
if request.method == "POST" and form.is_valid():
new_user = form.save() # type: AbstractUser
invitation.activate(new_user)
return redirect(self.get_invitation_accepted_registered_url())
return render(
request,
self.registration_form_template,
{"invitation": invitation, "form": form},
)
def get_urls(self):
# type: () -> List[path]
return [
path(
"<uuid:guid>/", view=self.activation_router, name="invitations_register"
)
]
@property
def urls(self):
# type: () -> Tuple[List[path], Text]
return self.get_urls(), self.namespace or "registration"
def invite_by_email(self, email, user, organization, **kwargs):
"""
Primary interface method by which one user invites another to join
Args:
email:
request:
**kwargs:
Returns:
an invitation instance
Raises:
MultipleObjectsReturned if multiple matching users are found
"""
# TODO(bennylope): verify no such user already?
# try:
# invitee = self.user_model.objects.get(email__iexact=email)
# except self.user_model.DoesNotExist:
# invitee = None
# TODO allow sending just the OrganizationUser instance
user_invitation = self.invitation_model.objects.create(
invitee_identifier=email.lower(),
invited_by=user,
organization=organization,
)
self.send_invitation(user_invitation)
return user_invitation
def send_invitation(self, invitation, **kwargs):
"""
Sends an invitation message for a specific invitation.
This could be overridden to do other things, such as sending a confirmation
email to the sender.
Args:
invitation:
Returns:
"""
return self.email_message(
invitation.invitee_identifier,
self.invitation_subject,
self.invitation_body,
invitation.invited_by,
**kwargs
).send()
def email_message(
self,
recipient, # type: Text
subject_template, # type: Text
body_template, # type: Text
sender=None, # type: Optional[AbstractUser]
message_class=EmailMessage,
**kwargs
):
"""
Returns an invitation email message. This can be easily overridden.
For instance, to send an HTML message, use the EmailMultiAlternatives message_class
and attach the additional conent.
"""
from_email = "%s %s <%s>" % (
sender.first_name,
sender.last_name,
email.utils.parseaddr(settings.DEFAULT_FROM_EMAIL)[1],
)
reply_to = "%s %s <%s>" % (sender.first_name, sender.last_name, sender.email)
headers = {"Reply-To": reply_to}
kwargs.update({"sender": sender, "recipient": recipient})
subject_template = loader.get_template(subject_template)
body_template = loader.get_template(body_template)
subject = subject_template.render(
kwargs
).strip() # Remove stray newline characters
body = body_template.render(kwargs)
return message_class(subject, body, from_email, [recipient], headers=headers)
|
Python
| 0.000005
|
@@ -2383,22 +2383,64 @@
%0A %22%22%22
-%0A%0A
+Invitation backend for model-tracked invitations
%22%22%22%0A%0A
|
57cd7ea1500827942e042b4aabf2c44d489c430f
|
remove unneeed assert
|
ceph_installer/tests/controllers/test_mon.py
|
ceph_installer/tests/controllers/test_mon.py
|
from ceph_installer.controllers import mon
class TestMonController(object):
def setup(self):
self.configure_data = dict(
monitor_secret="secret",
cluster_network="0.0.0.0/24",
public_network="0.0.0.0/24",
host="node1",
monitor_interface="eth0",
fsid="1720107309134",
)
def test_index_get(self, session):
result = session.app.get("/api/mon/")
assert result.status_int == 200
def test_install_missing_hosts(self, session):
result = session.app.post_json("/api/mon/install/", params=dict(),
expect_errors=True)
assert result.status_int == 400
def test_install_bogus_field(self, session):
data = dict(hosts=["google.com"], bogus="foo")
result = session.app.post_json("/api/mon/install/", params=data,
expect_errors=True)
assert result.status_int == 400
def test_install_hosts_not_a_list(self, session):
data = dict(hosts="google.com")
result = session.app.post_json("/api/mon/install/", params=data,
expect_errors=True)
assert result.status_int == 400
def test_install_hosts(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
data = dict(hosts=["node1"])
result = session.app.post_json("/api/mon/install/", params=data,
expect_errors=True)
assert result.json['endpoint'] == '/api/mon/install/'
assert result.json['identifier'] is not None
def test_configure_missing_fields(self, session):
data = dict()
result = session.app.post_json("/api/mon/configure/", params=data,
expect_errors=True)
assert result.status_int == 400
def test_configure_hosts(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
result = session.app.post_json("/api/mon/configure/", params=self.configure_data)
assert result.json['endpoint'] == '/api/mon/configure/'
assert result.json['identifier'] is not None
def test_configure_monitors_not_a_list(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
data = self.configure_data.copy()
data["monitors"] = "invalid"
result = session.app.post_json("/api/mon/configure/", params=data,
expect_errors=True)
assert result.status_int == 400
assert "monitors" in result.json["message"]
def test_configure_monitors_not_a_list_of_objects(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
data = self.configure_data.copy()
data["monitors"] = "['mon1', 'mon2']"
result = session.app.post_json("/api/mon/configure/", params=data,
expect_errors=True)
assert result.status_int == 400
assert "monitors" in result.json["message"]
def test_configure_monitors_missing_host_key(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
mons = [{"foo": "bar"}]
data = self.configure_data.copy()
data['monitors'] = mons
result = session.app.post_json("/api/mon/configure/", params=data,
expect_errors=True)
assert result.status_int == 400
assert "monitors" in result.json["message"]
def test_configure_monitors_missing_interface_key(self, session, monkeypatch):
monkeypatch.setattr(mon.call_ansible, 'apply_async', lambda args, kwargs: None)
mons = [{"host": "mon0.host"}]
data = self.configure_data.copy()
data['monitors'] = mons
result = session.app.post_json("/api/mon/configure/", params=data,
expect_errors=True)
assert result.status_int == 400
assert "monitors" in result.json["message"]
def test_configure_valid_monitors(self, session, monkeypatch):
def check(args, kwargs):
inventory = args[0]
hosts = inventory[0][1]
assert "node1 monitor_interface=eth0" in hosts
assert "mon1.host monitor_interface=eth1" in hosts
monkeypatch.setattr(mon.call_ansible, 'apply_async', check)
mons = [{"host": "mon1.host", "interface": "eth1"}]
data = self.configure_data.copy()
data["monitors"] = mons
result = session.app.post_json("/api/mon/configure/", params=data)
assert result.status_int == 200
assert result.json['endpoint'] == '/api/mon/configure/'
assert result.json['identifier'] is not None
|
Python
| 0
|
@@ -4822,48 +4822,8 @@
ta)%0A
- assert result.status_int == 200%0A
|
57841be69b9952a638f917923aa7d7acdb8bd8ba
|
Fix sync_current_joint
|
src/poppy_inverse_kinematics/model.py
|
src/poppy_inverse_kinematics/model.py
|
# coding= utf8
"""
.. module:: model
"""
import numpy as np
from . import forward_kinematics as fk
from . import inverse_kinematic as ik
from . import robot_utils
class Model():
"""Base model class
:param configuration: The configuration of the robot
:type configuration: model_config
:param computation_method: Method for the computation of the Forward Kinematic
:type computation_method: string
:param simplify: Simplify symbolic expressions (hybrid and symbolic computation methods only)
:type simplify: bool
"""
def __init__(self, configuration, pypot_object=None, computation_method="default", simplify=False):
# Configuration 2D
self.config = configuration
self.arm_length = self.get_robot_length()
self.computation_method = computation_method
self.pypot_object = pypot_object
self.simplify = simplify
self.transformation_lambda = fk.compute_transformation(self.config.parameters, method=self.computation_method, representation=self.config.representation, model_type=self.config.model_type, simplify=self.simplify)
# initialize starting configuration
self.current_joints = np.zeros(self.config.joints_number)
self.current_pose = self.forward_kinematic(self.current_joints)
self.target = self.current_pose
def forward_kinematic(self, q=None):
"""Renvoie la position du end effector en fonction de la configuration des joints"""
if q is None:
q = self.current_joints
# calculate the forward kinematic
if self.computation_method == "default":
# Special args for the default method
X = fk.get_end_effector(nodes_angles=q, method=self.computation_method, transformation_lambda=self.transformation_lambda, representation=self.config.representation, model_type=self.config.model_type, robot_parameters=self.config.parameters)
else:
X = fk.get_end_effector(nodes_angles=q, method=self.computation_method, transformation_lambda=self.transformation_lambda)
return X
def inverse_kinematic(self, target=None, initial_position=None):
"""Computes the IK for given target"""
# If absolute_target is not given, use self.target
if target is None:
target = self.target
if initial_position is None:
initial_position = self.current_joints
# Choose computation method
if self.computation_method == "default":
return ik.inverse_kinematic(target, self.transformation_lambda, initial_position, fk_method=self.computation_method, model_type=self.config.model_type, representation=self.config.representation, robot_parameters=self.config.parameters, bounds=self.config.bounds, first_active_joint=self.config.first_active_joint)
else:
return ik.inverse_kinematic(target, self.transformation_lambda, initial_position, fk_method=self.computation_method, bounds=self.config.bounds, first_active_joint=self.config.first_active_joint)
def goto_target(self):
"""Déplace le robot vers la target donnée"""
# Compute goal joints
self.goal_joints = self.inverse_kinematic()
# Go to goal joints
self.goto_joints()
def goto_joints(self):
"""Move the robot according to the goal joints"""
self.sync_goal_joints()
self.sync_current_joints()
def sync_goal_joints(self):
"""Synchronize goal_joints value with goto_position value of Pypot object"""
if self.pypot_object is not None:
for index, joint in enumerate(self.config.parameters):
if joint["name"] != "last_joint":
# If the joint is not the last (virtual) joint :
angle = robot_utils.convert_angle_to_pypot(self.goal_joints[index], joint)
# print(joint["name"], self.goal_joints[index] * 180 / np.pi, angle)
# Use the name of the joint to map to the motor name
getattr(self.pypot_object, joint["name"]).goal_position = angle
def sync_current_joints(self, pypot_sync=True):
"""Get current joints value from robot"""
if self.pypot_object is not None and pypot_sync:
# If there is an attached robot, read the joint values from the robot
for index, joint in enumerate(self.config.parameters):
if joint["name"] != "last_joint":
angle = robot_utils.convert_angle_from_pypot(getattr(self.pypot_object, joint["name"]).goal_position, joint)
else:
angle = 0
self.current_joints[index] = angle
else:
# On place le modèle directement dans la position voulue
self.current_joints = self.goal_joints
def plot_model(self, q=None, ax=None, show=True):
"""Plot the model"""
from . import plot_utils as pl
if q is None:
q = self.current_joints
if ax is None:
# If ax is not given, create one
ax = pl.init_3d_figure()
pl.plot_robot(self.config.parameters, q, ax, representation=self.config.representation, model_type=self.config.model_type)
pl.plot_basis(self.config.parameters, ax, self.arm_length)
# Plot the goal position
if self.target is not None:
pl.plot_target(self.target, ax)
if(show):
pl.show_figure()
def animate_model(self, targets_x, targets_y, targets_z):
"""Animate the model moving along the trajectory"""
from . import plot_utils as pl
import matplotlib.pyplot
fig = matplotlib.pyplot.figure()
ax = fig.add_subplot(111, projection='3d')
# Création d'un objet line
line = ax.plot([0, 0], [0, 0], [0, 0])[0]
# Plot de la trajectoire et du repère
pl.plot_target_trajectory(targets_x, targets_y, targets_z, ax)
pl.plot_basis(self.config.parameters, ax)
IK_angles = []
nodes_angles = self.current_joints
for target in zip(targets_x, targets_y, targets_z):
IK_angles.append(self.inverse_kinematic(target, initial_position=nodes_angles))
nodes_angles = IK_angles[-1]
animation = matplotlib.animation.FuncAnimation(fig, pl.update_line, len(IK_angles), fargs=(self.config.parameters, IK_angles, line, self.config.representation, self.config.model_type), interval=50)
matplotlib.pyplot.show()
return animation
def get_robot_length(self):
"""Calcule la longueur du robot (tendu)"""
translations_vectors = [x["translation"] for x in self.config.parameters]
joints_lengths = [np.sqrt(sum([x**2 for x in vector]))
for vector in translations_vectors]
return sum(joints_lengths)
def set_compliance(self, compliance=False):
"""Set the compliance of the underlying PyPot object of the model"""
if self.pypot_object is not None:
for motor in self.pypot_object.motors:
# For every joint of the model, set the PyPot compliance
if motor.name in self.config.joint_names[self.config.first_active_joint:]:
motor.compliant = compliance
|
Python
| 0.00014
|
@@ -4556,36 +4556,39 @@
joint%5B%22name%22%5D).
-goal
+present
_position, joint
|
1ebe34b5ae889ed5f78eead61abd7f0d3b71678f
|
Fix docstring
|
byceps/services/board/topic_query_service.py
|
byceps/services/board/topic_query_service.py
|
"""
byceps.services.board.topic_query_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from __future__ import annotations
from datetime import datetime
from typing import Optional
from sqlalchemy import select
from sqlalchemy.sql import Select
from ...database import db, paginate, Pagination
from .dbmodels.category import Category as DbCategory
from .dbmodels.posting import Posting as DbPosting
from .dbmodels.topic import Topic as DbTopic
from .transfer.models import BoardID, CategoryID, TopicID
def count_topics_for_board(board_id: BoardID) -> int:
"""Return the number of topics for that board."""
return db.session \
.query(DbTopic) \
.join(DbCategory) \
.filter(DbCategory.board_id == board_id) \
.count()
def find_topic_by_id(topic_id: TopicID) -> Optional[DbTopic]:
"""Return the topic with that id, or `None` if not found."""
return db.session.get(DbTopic, topic_id)
def get_topic(topic_id: TopicID) -> DbTopic:
"""Return the topic with that id."""
topic = find_topic_by_id(topic_id)
if topic is None:
raise ValueError(f'Unknown topic ID "{topic_id}"')
return topic
def find_topic_visible_for_user(
topic_id: TopicID, include_hidden: bool
) -> Optional[DbTopic]:
"""Return the topic with that id, or `None` if not found or
invisible for the user.
"""
query = db.session \
.query(DbTopic) \
.options(
db.joinedload(DbTopic.category),
)
if not include_hidden:
query = query.filter_by(hidden=False)
return query \
.filter_by(id=topic_id) \
.first()
def get_recent_topics(
board_id: BoardID, include_hidden: bool, limit: int
) -> list[DbTopic]:
"""Paginate topics in that board."""
query = _query_topics(include_hidden) \
.join(DbCategory) \
.filter(DbCategory.board_id == board_id) \
.filter(DbCategory.hidden == False) \
.order_by(DbTopic.last_updated_at.desc()) \
.limit(limit)
return db.session.execute(query).scalars().all()
def paginate_topics(
board_id: BoardID, include_hidden: bool, page: int, per_page: int
) -> Pagination:
"""Paginate topics in that board."""
items_query = _query_topics(include_hidden) \
.join(DbCategory) \
.filter(DbCategory.board_id == board_id) \
.filter(DbCategory.hidden == False) \
.order_by(DbTopic.last_updated_at.desc())
count_query = select(db.func.count(DbTopic.id))
if not include_hidden:
count_query = count_query.filter_by(hidden=False)
return paginate(
items_query, count_query, page, per_page, scalar_result=True
)
def get_all_topic_ids_in_category(category_id: CategoryID) -> set[TopicID]:
"""Return the IDs of all topics in the category."""
rows = db.session \
.query(DbTopic.id) \
.filter(DbTopic.category_id == category_id) \
.all()
return {row[0] for row in rows}
def paginate_topics_of_category(
category_id: CategoryID,
include_hidden: bool,
page: int,
per_page: int,
) -> Pagination:
"""Paginate topics in that category, as visible for the user.
Pinned topics are returned first.
"""
items_query = _query_topics(include_hidden) \
.filter_by(category_id=category_id) \
.order_by(DbTopic.pinned.desc(), DbTopic.last_updated_at.desc())
count_query = select(db.func.count(DbTopic.id))
if not include_hidden:
count_query = count_query.filter_by(hidden=False)
return paginate(
items_query, count_query, page, per_page, scalar_result=True
)
def _query_topics(include_hidden: bool) -> Select:
query = select(DbTopic) \
.options(
db.joinedload(DbTopic.category),
db.joinedload(DbTopic.last_updated_by),
db.joinedload(DbTopic.hidden_by),
db.joinedload(DbTopic.locked_by),
db.joinedload(DbTopic.pinned_by),
)
if not include_hidden:
query = query.filter_by(hidden=False)
return query
def find_default_posting_to_jump_to(
topic_id: TopicID, include_hidden: bool, last_viewed_at: datetime
) -> Optional[DbPosting]:
"""Return the posting of the topic to show by default, or `None`."""
postings_query = db.session \
.query(DbPosting) \
.filter_by(topic_id=topic_id)
if not include_hidden:
postings_query = postings_query.filter_by(hidden=False)
first_new_posting = postings_query \
.filter(DbPosting.created_at > last_viewed_at) \
.order_by(DbPosting.created_at.asc()) \
.first()
if first_new_posting is None:
# Current user has seen all postings so far, so show the last one.
return postings_query \
.order_by(DbPosting.created_at.desc()) \
.first()
return first_new_posting
|
Python
| 0.00003
|
@@ -1839,32 +1839,37 @@
ic%5D:%0A %22%22%22
-Paginate
+Return recent
topics in t
|
717db7509b586e59c06d06ad60be3ca5671e1c35
|
add support for circleci
|
src/pyquickhelper/pycode/ci_helper.py
|
src/pyquickhelper/pycode/ci_helper.py
|
"""
@file
@brief Helpers for CI
.. versionadded:: 1.3
"""
def is_travis_or_appveyor():
"""
tells if is a travis environment or appveyor
@return ``'travis'``, ``'appveyor'`` or ``None``
The function should rely more on environement variables
``CI``, ``TRAVIS``, ``APPVEYOR``.
.. versionadded:: 1.3
"""
import sys
if "travis" in sys.executable:
return "travis"
import os
if os.environ.get("USERNAME", os.environ.get("USER", None)) == "appveyor" or \
os.environ.get("APPVEYOR", "").lower() in ("true", "1"):
return "appveyor"
return None
|
Python
| 0
|
@@ -326,16 +326,84 @@
ed:: 1.3
+%0A%0A .. versionchanged:: 1.5%0A Takes into account *circleci*.
%0A %22%22%22
@@ -664,16 +664,105 @@
pveyor%22%0A
+ if os.environ.get('CIRCLECI', %22undefined%22) != %22undefined%22:%0A return %22circleci%22%0A
retu
|
06513cf504c7da5cc912743d2b8ad2b39320557d
|
update help menu link to sphinx
|
src/python/director/actionhandlers.py
|
src/python/director/actionhandlers.py
|
import os
from PythonQt import QtCore, QtGui
import director.applogic as app
import director.objectmodel as om
import director.ioUtils as io
import director.visualization as vis
from director import roboturdf
from director import otdfmodel
_lastDir = None
def getDefaultDirectory():
return _lastDir or os.getcwd()
def storeDefaultDirectory(filename):
global _lastDir
if os.path.isfile(filename):
filename = os.path.dirname(filename)
if os.path.isdir(filename):
_lastDir = filename
def onFileOpenDialog():
mainWindow = app.getMainWindow()
fileFilters = "Data Files (*.obj *.pcd *.ply *.stl *.vtk *.vtp *.wrl *.urdf *.otdf)";
filename = QtGui.QFileDialog.getOpenFileName(mainWindow, "Open...", getDefaultDirectory(), fileFilters)
if not filename:
return
storeDefaultDirectory(filename)
onOpenFile(filename)
def onOpenFile(filename):
if filename.lower().endswith('urdf'):
onOpenUrdf(filename)
elif filename.lower().endswith('otdf'):
onOpenOtdf(filename)
else:
onOpenGeometry(filename)
def onOpenGeometry(filename):
if filename.lower().endswith('wrl'):
onOpenVrml(filename)
return
polyData = io.readPolyData(filename)
if not polyData or not polyData.GetNumberOfPoints():
app.showErrorMessage('Failed to read any data from file: %s' % filename, title='Reader error')
return
vis.showPolyData(polyData, os.path.basename(filename), parent='files')
def onOpenVrml(filename):
meshes, color = io.readVrml(filename)
folder = om.getOrCreateContainer(os.path.basename(filename), parentObj=om.getOrCreateContainer('files'))
for i, pair in enumerate(zip(meshes, color)):
mesh, color = pair
vis.showPolyData(mesh, 'mesh %d' % i, color=color, parent=folder)
def onOpenUrdf(filename):
model = roboturdf.openUrdf(filename, app.getCurrentRenderView())
if not model:
app.showErrorMessage('Failed to read urdf file: %s' % filename, title='Read urdf error')
def onOpenOtdf(filename):
model = otdfmodel.openOtdf(filename, app.getCurrentRenderView())
def onFileExportUrdf():
obj = om.getActiveObject()
if not obj or not isinstance(obj, otdfmodel.OtdfModelItem):
app.showErrorMessage('Please select an OTDF object', title='OTDF object not selected')
return
mainWindow = app.getMainWindow()
filename = QtGui.QFileDialog.getSaveFileName(mainWindow, "Save Data...", getDefaultDirectory(), 'URDF (*.urdf)', 'URDF (*.urdf)')
if not os.path.splitext(filename)[1]:
filename += '.urdf'
storeDefaultDirectory(filename)
urdfString = obj.parser.getUrdfFromOtdf()
urdfFile = open(filename, 'w')
urdfFile.write(urdfString)
urdfFile.close()
def onFileSaveData():
obj = om.getActiveObject()
if not obj:
app.showErrorMessage('Please select an object', title='No object selected')
return
if isinstance(obj, otdfmodel.OtdfModelItem):
mainWindow = app.getMainWindow()
filename = QtGui.QFileDialog.getSaveFileName(mainWindow, "Save Data...", getDefaultDirectory(), 'OTDF (*.otdf)', 'OTDF (*.otdf)')
if not os.path.splitext(filename)[1]:
filename += '.otdf'
storeDefaultDirectory(filename)
otdfString = obj.parser.getUpdatedOtdf()
otdfFile = open(filename, 'w')
otdfFile.write(otdfString)
otdfFile.close()
elif hasattr(obj, 'polyData'):
mainWindow = app.getMainWindow()
fileFilters = "PLY (*.ply);;STL (*.stl);;VTP (*.vtp);;VTK (*.vtk)";
defaultFilter = 'VTP (*.vtp)';
filename = QtGui.QFileDialog.getSaveFileName(mainWindow, "Save Data...", getDefaultDirectory(), fileFilters, defaultFilter)
if not filename:
return
if not os.path.splitext(filename)[1]:
filename += '.vtp'
polyData = io.writePolyData(obj.polyData, filename)
else:
app.showErrorMessage('Please select an object that contains geometry data or an OTDF object', title='Invalid object selected')
return
storeDefaultDirectory(filename)
def onOpenOnlineHelp():
QtGui.QDesktopServices.openUrl(QtCore.QUrl('https://github.com/RobotLocomotion/director/blob/master/docs/director/README.md'))
def init():
mainWindow = app.getMainWindow()
mainWindow.connect('fileOpen()', onFileOpenDialog)
mainWindow.connect('fileSaveData()', onFileSaveData)
mainWindow.connect('fileExportUrdf()', onFileExportUrdf)
mainWindow.connect('openOnlineHelp()', onOpenOnlineHelp)
|
Python
| 0
|
@@ -4242,60 +4242,31 @@
s://
-github.com/RobotLocomotion/director/blob/master/docs
+openhumanoids.github.io
/dir
@@ -4275,17 +4275,8 @@
tor/
-README.md
'))%0A
|
e6e68143e39dcc14833065b388f65879f2aa81f2
|
Update import export TestCase
|
src/tests/ggrc/converters/__init__.py
|
src/tests/ggrc/converters/__init__.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
from flask import json
from os.path import abspath
from os.path import dirname
from os.path import join
from tests import ggrc
THIS_ABS_PATH = abspath(dirname(__file__))
CSV_DIR = join(THIS_ABS_PATH, "test_csvs/")
class TestCase(ggrc.TestCase):
def import_file(self, filename, dry_run=False):
data = {"file": (open(join(CSV_DIR, filename)), filename)}
headers = {
"X-test-only": "true" if dry_run else "false",
"X-requested-by": "gGRC",
}
response = self.client.post("/_service/import_csv",
data=data, headers=headers)
self.assert200(response)
return json.loads(response.data)
|
Python
| 0
|
@@ -406,16 +406,52 @@
ile__))%0A
+%0A%0Aclass TestCase(ggrc.TestCase):%0A%0A
CSV_DIR
@@ -491,41 +491,8 @@
%22)%0A%0A
-%0Aclass TestCase(ggrc.TestCase):%0A%0A
de
@@ -568,16 +568,21 @@
en(join(
+self.
CSV_DIR,
|
d00f63c4403cc1c6bc6fa3abeff4e81b30e4640b
|
put proper return type
|
examples/polymorph/polymorph2.py
|
examples/polymorph/polymorph2.py
|
from sqlalchemy import *
import sys
# this example illustrates a polymorphic load of two classes, where each class has a very
# different set of properties
db = create_engine('sqlite://', echo=True, echo_uow=False)
# a table to store companies
companies = Table('companies', db,
Column('company_id', Integer, primary_key=True),
Column('name', String(50))).create()
# we will define an inheritance relationship between the table "people" and "engineers",
# and a second inheritance relationship between the table "people" and "managers"
people = Table('people', db,
Column('person_id', Integer, primary_key=True),
Column('company_id', Integer, ForeignKey('companies.company_id')),
Column('name', String(50))).create()
engineers = Table('engineers', db,
Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
Column('status', String(30)),
Column('engineer_name', String(50)),
Column('primary_language', String(50)),
).create()
managers = Table('managers', db,
Column('person_id', Integer, ForeignKey('people.person_id'), primary_key=True),
Column('status', String(30)),
Column('manager_name', String(50))
).create()
# create our classes. The Engineer and Manager classes extend from Person.
class Person(object):
def __repr__(self):
return "Ordinary person %s" % self.name
class Engineer(Person):
def __repr__(self):
return "Engineer %s, status %s, engineer_name %s, primary_language %s" % (self.name, self.status, self.engineer_name, self.primary_language)
class Manager(Person):
def __repr__(self):
return "Manager %s, status %s, manager_name %s" % (self.name, self.status, self.manager_name)
class Company(object):
def __repr__(self):
return "Company %s" % self.name
# assign plain vanilla mappers
assign_mapper(Person, people)
assign_mapper(Engineer, engineers, inherits=Person.mapper)
assign_mapper(Manager, managers, inherits=Person.mapper)
# create a union that represents both types of joins. we have to use
# nulls to pad out the disparate columns.
person_join = select(
[
people,
managers.c.status,
managers.c.manager_name,
null().label('engineer_name'),
null().label('primary_language'),
column("'manager'").label('type')
],
people.c.person_id==managers.c.person_id).union_all(
select(
[
people,
engineers.c.status,
null().label('').label('manager_name'),
engineers.c.engineer_name,
engineers.c.primary_language,
column("'engineer'").label('type')
],
people.c.person_id==engineers.c.person_id)).alias('pjoin')
print [c for c in person_join.c]
# MapperExtension object.
class PersonLoader(MapperExtension):
def create_instance(self, mapper, row, imap, class_):
if row[person_join.c.type] =='engineer':
return Engineer()
elif row[person_join.c.type] =='manager':
return Manager()
else:
return Person()
def populate_instance(self, mapper, instance, row, identitykey, imap, isnew):
if row[person_join.c.type] =='engineer':
Engineer.mapper.populate_instance(instance, row, identitykey, imap, isnew, frommapper=mapper)
return False
elif row[person_join.c.type] =='manager':
Manager.mapper.populate_instance(instance, row, identitykey, imap, isnew, frommapper=mapper)
return False
else:
return True
people_mapper = mapper(Person, person_join, extension=PersonLoader())
assign_mapper(Company, companies, properties={
'employees': relation(people_mapper, lazy=False, private=True)
})
c = Company(name='company1')
c.employees.append(Manager(name='pointy haired boss', status='AAB', manager_name='manager1'))
c.employees.append(Engineer(name='dilbert', status='BBA', engineer_name='engineer1', primary_language='java'))
c.employees.append(Engineer(name='wally', status='CGG', engineer_name='engineer2', primary_language='python'))
c.employees.append(Manager(name='jsmith', status='ABA', manager_name='manager2'))
objectstore.commit()
objectstore.clear()
c = Company.get(1)
for e in c.employees:
print e, e._instance_key
print "\n"
dilbert = Engineer.mapper.get_by(name='dilbert')
dilbert.engineer_name = 'hes dibert!'
objectstore.commit()
objectstore.clear()
c = Company.get(1)
for e in c.employees:
print e, e._instance_key
objectstore.delete(c)
objectstore.commit()
managers.drop()
engineers.drop()
people.drop()
companies.drop()
|
Python
| 0.020196
|
@@ -3761,20 +3761,43 @@
return
-True
+sqlalchemy.mapping.EXT_PASS
%0A%0Apeople
|
671e877bc14eb2034bc4ff735c56c2d3aeb2e43d
|
Update a test
|
examples/raw_parameter_script.py
|
examples/raw_parameter_script.py
|
""" The main purpose of this file is to demonstrate running SeleniumBase
scripts without the use of Pytest by calling the script directly
with Python or from a Python interactive interpreter. Based on
whether relative imports work or don't, the script can autodetect
how this file was run. With pure Python, it will initialize
all the variables that would've been automatically initialized
by the Pytest plugin. The setUp() and tearDown() methods are also
now called from the script itself.
One big advantage to running tests with Pytest is that most of this
is done for you automatically, with the option to update any of the
parameters through command line parsing. Pytest also provides you
with other plugins, such as ones for generating test reports,
handling multithreading, and parametrized tests. Depending on your
specific needs, you may need to call SeleniumBase commands without
using Pytest, and this example shows you how. """
try:
# Running with Pytest / (Finds test methods to run using autodiscovery)
# Example run command: "pytest raw_parameter_script.py"
from .my_first_test import MyTestClass # (relative imports work: ".~")
except (ImportError, ValueError):
# Running with pure Python OR from a Python interactive interpreter
# Example run command: "python raw_parameter_script.py"
from my_first_test import MyTestClass # (relative imports DON'T work)
b = MyTestClass("test_basic")
b.browser = "chrome"
b.headless = False
b.servername = "localhost"
b.port = 4444
b.data = None
b.environment = "test"
b.user_agent = None
b.database_env = "test"
b.log_path = "latest_logs/"
b.archive_logs = False
b.disable_csp = False
b.visual_baseline = False
b.save_screenshot_after_test = False
b.timeout_multiplier = None
b.pytest_html_report = None
b.report_on = False
b.with_db_reporting = False
b.with_s3_logging = False
b.js_checking_on = False
b.is_pytest = False
b.demo_mode = False
b.demo_sleep = 1
b.message_duration = 2
b.proxy_string = None
b.ad_block_on = False
b.highlights = None
b.check_js = False
b.cap_file = None
b.setUp()
try:
b.test_basic()
finally:
b.tearDown()
del b
|
Python
| 0.000001
|
@@ -1648,24 +1648,78 @@
gent = None%0A
+ b.extension_zip = None%0A b.extension_dir = None%0A
b.databa
@@ -1815,24 +1815,50 @@
csp = False%0A
+ b.enable_sync = False%0A
b.visual
@@ -2191,16 +2191,43 @@
ion = 2%0A
+ b.user_data_dir = None%0A
b.pr
|
870c69f6d8c0f0b9dbe60a053e839ae25283f8ba
|
map each step
|
pytest_bdd/cucumber_json.py
|
pytest_bdd/cucumber_json.py
|
"""Cucumber json output formatter."""
import os
import time
import json
import py
def pytest_addoption(parser):
group = parser.getgroup('pytest-bdd')
group.addoption(
'--cucumberjson', '--cucumber-json', action='store',
dest='cucumber_json_path', metavar='path', default=None,
help='create cucumber json style report file at given path.')
def pytest_configure(config):
cucumber_json_path = config.option.cucumber_json_path
# prevent opening json log on slave nodes (xdist)
if cucumber_json_path and not hasattr(config, 'slaveinput'):
config._bddcucumberjson = LogBDDCucumberJSON(cucumber_json_path)
config.pluginmanager.register(config._bddcucumberjson)
def pytest_unconfigure(config):
xml = getattr(config, '_bddcucumberjson', None)
if xml:
del config._bddcucumberjson
config.pluginmanager.unregister(xml)
def mangle_testnames(names):
names = [x.replace('.py', '') for x in names if x != '()']
names[0] = names[0].replace('/', '.')
return names
class LogBDDCucumberJSON(object):
"""Log plugin for cucumber like json output."""
def __init__(self, logfile):
logfile = os.path.expanduser(os.path.expandvars(logfile))
self.logfile = os.path.normpath(os.path.abspath(logfile))
self.tests = []
self.features = []
# def _write_captured_output(self, report):
# for capname in ('out', 'err'):
# allcontent = ''
# for name, content in report.get_sections('Captured std%s' % capname):
# allcontent += content
# if allcontent:
# tag = getattr(Junit, 'system-'+capname)
# self.append(tag(bin_xml_escape(allcontent)))
def append(self, obj):
self.tests[-1].append(obj)
def _get_result(self, report):
"""Get scenario test run result."""
if report.passed:
if report.when == 'call': # ignore setup/teardown
return {'status': 'passed'}
elif report.failed:
return {
'status': 'failed',
'error_message': report.longrepr}
elif report.skipped:
return {'status': 'skipped'}
def pytest_runtest_logreport(self, report):
names = mangle_testnames(report.nodeid.split('::'))
classnames = names[:-1]
test_id = '.'.join(classnames)
try:
scenario = report.item.obj.__scenario__
except AttributeError:
# skip reporting for non-bdd tests
return
result = self._get_result(report) or {}
self.tests.append(
{
"keyword": "Scenario",
"id": test_id,
"name": scenario.name,
"line": scenario.line_number,
"description": '',
"tags": [],
"type": "scenario",
"time": getattr(report, 'duration', 0),
"steps": [
{
"keyword": "Given ",
"name": "a failing step",
"line": 10,
"match": {
"location": "features/step_definitions/steps.rb:5"
},
"result": result.get("status", None)
}
]
}
)
def pytest_sessionstart(self):
self.suite_start_time = time.time()
def pytest_sessionfinish(self):
if py.std.sys.version_info[0] < 3:
logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8')
else:
logfile = open(self.logfile, 'w', encoding='utf-8')
logfile.write(json.dumps(self.tests))
logfile.close()
def pytest_terminal_summary(self, terminalreporter):
terminalreporter.write_sep('-', 'generated json file: %s' % (self.logfile))
|
Python
| 0.999938
|
@@ -2629,26 +2629,26 @@
-self.tests.append(
+def stepMap(step):
%0A
@@ -2648,32 +2648,39 @@
p):%0A
+return
%7B%0A
@@ -2673,32 +2673,40 @@
+
%22keyword%22: %22Scen
@@ -2704,51 +2704,28 @@
d%22:
-%22Scenario%22,%0A %22id%22: test_id,%0A
+step.type,%0A
@@ -2727,33 +2727,32 @@
-
%22name%22: scenario
@@ -2744,24 +2744,21 @@
name%22: s
-cenario.
+tep._
name,%0A
@@ -2763,32 +2763,40 @@
+
%22line%22: scenario
@@ -2788,23 +2788,19 @@
line%22: s
-cenario
+tep
.line_nu
@@ -2825,174 +2825,110 @@
-%22description%22: '',%0A %22tags%22: %5B%5D,%0A %22type%22: %22scenario%22,%0A %22time%22: getattr(report, 'duration', 0),%0A %22steps%22: %5B%0A
+ %22match%22: %7B%0A %22location%22: %22features/step_definitions/steps.rb:5%22%0A
@@ -2943,17 +2943,18 @@
-%7B
+%7D,
%0A
@@ -2975,77 +2975,139 @@
%22
-keyword%22: %22Given %22,%0A %22name%22: %22a failing step%22,
+result%22: result.get(%22status%22, None)%0A %7D%0A%0A steps = map(stepMap, scenario.steps)%0A%0A self.tests.append(
%0A
@@ -3106,32 +3106,35 @@
end(%0A
+ %7B%0A
%22li
@@ -3134,169 +3134,200 @@
-%22line%22: 10,%0A %22match%22: %7B%0A %22location%22: %22features/step_definitions/steps.rb:5%22%0A
+ %22keyword%22: %22Scenario%22,%0A %22id%22: test_id,%0A %22name%22: scenario.name,%0A %22line%22: scenario.line_number,%0A %22description%22: '',%0A
-%7D,%0A
@@ -3314,32 +3314,45 @@
%0A
+ %22tags%22: %5B%5D,%0A
@@ -3356,65 +3356,82 @@
%22
-result%22: result.get(%22status%22, None)%0A %7D
+type%22: %22scenario%22,%0A %22time%22: getattr(report, 'duration', 0),
%0A
@@ -3443,17 +3443,30 @@
-%5D
+%22steps%22: steps
%0A
|
7e98bf41b605c08c0cd04c9edf4479b9ac3961f8
|
fix import wordnet
|
pythainlp/corpus/wordnet.py
|
pythainlp/corpus/wordnet.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals,print_function,absolute_import
import nltk
try:
nltk.data.find("corpora/omw")
except:
nltk.download('wordnet')
nltk.download('omw')
try:
from nltk.corpus import wordnet
except:
nltk.download('wordnet')
'''
API ตัวเก่า
'''
import sqlite3
import pythainlp
import os
from collections import namedtuple
templates_dir = os.path.join(os.path.dirname(pythainlp.__file__), 'corpus')
template_file = os.path.join(templates_dir, 'tha-wn.db')
conn = sqlite3.connect(template_file)
Word = namedtuple('Word', 'synsetid li')
Synset = namedtuple('Synset', 'synset li')
def getWords(wordid):
"""เป็นคำสั่ง ใช้รับคำจาก ID รับค่า str ส่งออกเป็น tuple ('Word', 'synsetid li')"""
print("แจ้งเตือน !!! API ตัวนี้จะยกเลิกการใช้งานใน PyThaiNLP 1.5")
words = []
cur = conn.execute("select * from word_synset where synsetid=?", (wordid,))
row = cur.fetchone()
return Word(*cur.fetchone())
def getSynset(synset):
"""เป็นคำสั่ง ใช้รับ Synset รับค่า str ส่งออกเป็น tuple ('Synset', 'synset li')"""
print("แจ้งเตือน !!! API ตัวนี้จะยกเลิกการใช้งานใน PyThaiNLP 1.5")
cursor=conn.execute("select * from word_synset where li=?",(synset,))
row=cursor.fetchone()
if row:
return Synset(*row)
else:
return None
'''
API ตัวใหม่ เริ่มใช้ตั้งแต่ PyThaiNLP 1.4 เป็นต้นไป
'''
def synsets(word, pos=None, lang="tha"):
return wordnet.synsets(lemma=word,pos=pos,lang=lang)
def synset(name_synsets):
return wordnet.synset(name_synsets)
def all_lemma_names(pos=None, lang="tha"):
return wordnet.all_lemma_names(pos=pos, lang=lang)
def all_synsets(pos=None):
return wordnet.all_synsets(pos=pos)
def langs():
return wordnet.langs()
def lemmas(word,pos=None,lang="tha"):
return wordnet.lemmas(word,pos=pos,lang=lang)
def lemma(name_synsets):
return wordnet.lemma(name_synsets)
def lemma_from_key(key):
return wordnet.lemma_from_key(key)
def path_similarity(synsets1,synsets2):
return wordnet.path_similarity(synsets1,synsets2)
def lch_similarity(synsets1,synsets2):
return wordnet.lch_similarity(synsets1,synsets2)
def wup_similarity(synsets1,synsets2):
return wordnet.wup_similarity(synsets1,synsets2)
def morphy(form, pos=None):
return wordnet.morphy(form, pos=None)
def custom_lemmas(tab_file, lang):
return wordnet.custom_lemmas(tab_file, lang)
|
Python
| 0.000005
|
@@ -136,16 +136,51 @@
a/omw%22)%0A
+%09nltk.data.find(%22corpora/wordnet%22)%0A
except:%0A
@@ -227,22 +227,16 @@
('omw')%0A
-try:%0A%09
from nlt
@@ -262,43 +262,8 @@
dnet
- %0Aexcept:%0A%09nltk.download('wordnet')
%0A'''
|
9a22781888c88b66d4438094159380f8e6d5075e
|
Use absolute times for S3 presignature, not deltas
|
euca2ools/commands/s3/__init__.py
|
euca2ools/commands/s3/__init__.py
|
# Copyright 2013-2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import string
import sys
import urlparse
from requestbuilder import Arg
import requestbuilder.auth
import requestbuilder.request
import requestbuilder.service
import requests
from euca2ools.commands import Euca2ools
from euca2ools.exceptions import AWSError
from euca2ools.util import substitute_euca_region
class S3(requestbuilder.service.BaseService):
NAME = 's3'
DESCRIPTION = 'Object storage service'
REGION_ENVVAR = 'AWS_DEFAULT_REGION'
URL_ENVVAR = 'S3_URL'
ARGS = [Arg('-U', '--url', metavar='URL',
help='storage service endpoint URL')]
def configure(self):
substitute_euca_region(self)
requestbuilder.service.BaseService.configure(self)
def handle_http_error(self, response):
raise AWSError(response)
def build_presigned_url(self, method='GET', path=None, params=None,
auth=None, auth_args=None):
if path:
# We can't simply use urljoin because a path might start with '/'
# like it could for keys that start with that character.
if self.endpoint.endswith('/'):
url = self.endpoint + path
else:
url = self.endpoint + '/' + path
else:
url = self.endpoint
request = requests.Request(method=method, url=url, params=params)
if auth is not None:
auth.apply_to_request_params(request, self, **(auth_args or {}))
p_request = request.prepare()
return p_request.url
class S3Request(requestbuilder.request.BaseRequest):
SUITE = Euca2ools
SERVICE_CLASS = S3
AUTH_CLASS = requestbuilder.auth.S3RestAuth
def __init__(self, **kwargs):
requestbuilder.request.BaseRequest.__init__(self, **kwargs)
self.redirects_left = 3
def get_presigned_url(self, validity_timedelta):
self.preprocess()
return self.service.build_presigned_url(
method=self.method, path=self.path, params=self.params,
auth=self.auth,
auth_args={'validity_timedelta': validity_timedelta})
def handle_server_error(self, err):
if 300 <= err.status_code < 400 and 'Endpoint' in err.elements:
# When S3 does an inter-region redirect it doesn't supply the new
# location in the usual header, but rather supplies a new endpoint
# in the error's XML. This forces us to handle it manually.
self.log.debug('-- response content --\n',
extra={'append': True})
self.log.debug(self.response.text, extra={'append': True})
self.log.debug('-- end of response content --')
self.log.info('result: redirect')
if self.redirects_left > 0:
self.redirects_left -= 1
parsed = list(urlparse.urlparse(self.service.endpoint))
parsed[1] = err.elements['Endpoint']
new_url = urlparse.urlunparse(parsed)
self.log.debug('redirecting to %s (%i redirects remaining)',
new_url, self.redirects_left)
self.service.endpoint = new_url
# TODO: change region_name if possible
if isinstance(self.body, file):
self.log.debug('re-seeking body to beginning of file')
# pylint: disable=E1101
# noinspection PyUnresolvedReferences
self.body.seek(0)
# pylint: enable=E1101
return self.send()
else:
self.log.warn('too many redirects; giving up')
raise
else:
return requestbuilder.request.BaseRequest.handle_server_error(
self, err)
def validate_generic_bucket_name(bucket):
if len(bucket) == 0:
raise ValueError('name is too short')
if len(bucket) > 255:
raise ValueError('name is too long')
for char in bucket:
if char not in string.ascii_letters + string.digits + '.-_':
raise ValueError('invalid character \'{0}\''.format(char))
def validate_dns_bucket_name(bucket):
if len(bucket) < 3:
raise ValueError('name is too short')
if len(bucket) > 63:
raise ValueError('name is too long')
if bucket.startswith('.'):
raise ValueError('name may not start with \'.\'')
if bucket.endswith('.'):
raise ValueError('name may not end with \'.\'')
labels = bucket.split('.')
for label in labels:
if len(label) == 0:
raise ValueError('name may not contain \'..\'')
for char in label:
if char not in string.ascii_lowercase + string.digits + '-':
raise ValueError('invalid character \'{0}\''.format(char))
if label[0] not in string.ascii_lowercase + string.digits:
raise ValueError(('character \'{0}\' may not begin part of a '
'bucket name').format(label[0]))
if label[-1] not in string.ascii_lowercase + string.digits:
raise ValueError(('character \'{0}\' may not end part of a '
'bucket name').format(label[-1]))
if len(labels) == 4:
try:
[int(chunk) for chunk in bucket.split('.')]
except ValueError:
# This is actually the case we want
pass
else:
raise ValueError('name must not be formatted like an IP address')
|
Python
| 0.000008
|
@@ -3190,34 +3190,35 @@
l(self,
-validity_timedelta
+expiration_datetime
):%0A
@@ -3411,47 +3411,49 @@
s=%7B'
-validity_timedelta': validity_timedelta
+expiration_datetime': expiration_datetime
%7D)%0A%0A
|
02a3fb6e1d7bde7b9f9d20089e8dd11040388e80
|
remove testing code
|
python/app/extract_stats.py
|
python/app/extract_stats.py
|
# Copyright (C) 2014 Matthieu Caneill <matthieu.caneill@gmail.com>
#
# This file is part of Debsources.
#
# Debsources is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
def extract_stats(filter_suites=None, filename="cache/stats.data"):
"""
Extracts information from the collected stats.
If filter_suites is None, all the information are extracted.
Otherwise suites must be an array of suites names (can contain "total").
e.g. extract_stats(filter_suites=["total", "debian_wheezy"])
"""
languages = set()
suites = set()
res = dict()
with open(filename) as f:
for line in f:
try:
(key, value) = line.split()
except:
continue
try:
value = int(value)
except:
pass
# we extract some information (suites, languages)
splits = key.split(".")
if splits[0][:7] == "debian_":
# we extract suites names
suites.add(splits[0])
if len(splits) == 3 and splits[1] == "sloccount":
# we extract language names
languages.add(splits[2])
# if this key/value is in the required suites, we add it
if filter_suites is None or splits[0] in filter_suites:
res[key] = value
# we use lists instead of sets, because they are JSON-serializable
return dict(results=res, suites=list(suites), languages=list(languages))
if __name__ == "__main__":
from pprint import pprint
pprint(extract_stats(filename="stats.data",
filter_suites=["debian_wheezy", "total"]))
|
Python
| 0.000021
|
@@ -2133,197 +2133,4 @@
s))%0A
-%0A%0Aif __name__ == %22__main__%22:%0A from pprint import pprint%0A pprint(extract_stats(filename=%22stats.data%22,%0A filter_suites=%5B%22debian_wheezy%22, %22total%22%5D))%0A%0A %0A
|
e2fb56d315a08de736d6fcf5d3cf9cb19b5cba75
|
check Available; add --no-clean option
|
cclint.py
|
cclint.py
|
#!/usr/bin/env python
"""
Detects problems in data exported from CoreCommerce.
"""
import ConfigParser
import cctools
import optparse
import sys
def check_string(type_name, item_name, item, key, min_len):
"""Print message if item[key] is empty or shorter than min_len."""
value_len = len(item[key])
if value_len == 0:
print "%s '%s': Value '%s' not defined" % (
type_name,
item_name,
key
)
elif value_len < min_len:
print "%s '%s': Value '%s' == '%s' is too short" % (
type_name,
item_name,
key,
item[key]
)
def check_value_in_set(type_name, item_name, item, key, valid_values):
"""Print message if item[key] not in valid_values."""
if not item[key] in valid_values:
print "%s '%s': Invalid '%s' == '%s' not in %s" % (
type_name,
item_name,
key,
item[key],
valid_values
)
def check_products(products):
"""Check products list for problems."""
for product in products:
display_name = "%s %s" % (product["SKU"], product["Product Name"])
check_string(
"Product",
display_name,
product,
"Teaser",
10
)
check_value_in_set(
"Product",
display_name,
product,
"Discontinued Item",
("Y", "N")
)
def main():
"""main"""
option_parser = optparse.OptionParser(
usage="usage: %prog [options]\n" +
" Detects problems in data exported from CoreCommerce."
)
option_parser.add_option(
"--config",
action="store",
metavar="FILE",
default="cctools.cfg",
help="configuration filename (default=%default)"
)
option_parser.add_option(
"--verbose",
action="store_true",
default=False,
help="display progress messages"
)
# Parse command line arguments.
(options, args) = option_parser.parse_args()
if len(args) != 0:
option_parser.error("invalid argument")
# Read config file.
config = ConfigParser.RawConfigParser()
config.readfp(open(options.config))
# Create a connection to CoreCommerce.
cc_browser = cctools.CCBrowser(
config.get("website", "host"),
config.get("website", "site"),
config.get("website", "username"),
config.get("website", "password"),
verbose=options.verbose,
clean=False # don't cleanup data
)
# Check products list.
products = cc_browser.get_products()
check_products(products)
if options.verbose:
sys.stderr.write("Checks complete\n")
return 0
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -1422,25 +1422,17 @@
%22
-Discontinued Item
+Available
%22,%0A
@@ -1460,24 +1460,391 @@
%0A )%0A%0A
+ check_value_in_set(%0A %22Product%22,%0A display_name,%0A product,%0A %22Discontinued Item%22,%0A (%22Y%22, %22N%22)%0A )%0A%0A if product%5B%22Available%22%5D == %22Y%22 and product%5B%22Discontinued Item%22%5D == %22Y%22:%0A print %22Product '%25s': Is Available and is a Discontinued Item%22 %25 (%0A display_name%0A )%0A%0A
%0Adef main():
@@ -2230,32 +2230,213 @@
ser.add_option(%0A
+ %22--no-clean%22,%0A action=%22store_false%22,%0A dest=%22clean%22,%0A default=True,%0A help=%22do not clean data before checking%22%0A )%0A option_parser.add_option(%0A
%22--verbo
@@ -3100,35 +3100,21 @@
ean=
-False # don't cleanup data
+options.clean
%0A
|
510a15371cdbb635bc4691eae6fad8070b582814
|
Support for Pandoc-style code blocks (http://johnmacfarlane.net/pandoc/demo/example9/pandocs-markdown.html) and code span elements (http://daringfireball.net/projects/markdown/syntax#code), which should not be rendered to LaTeX
|
MarkdownPP/Modules/LaTeXRender.py
|
MarkdownPP/Modules/LaTeXRender.py
|
# Copyright (C) 2012 Alex Nisnevich
# Licensed under the MIT license
import re
import httplib, urllib
from MarkdownPP.Module import Module
from MarkdownPP.Transform import Transform
singlelinere = re.compile("\$(\$?)..*\$(\$?)") # $...$ (or $$...$$)
startorendre = re.compile("^\$(\$?)|^\S.*\$(\$?)$") # $... or ...$ (or $$... or ...$$)
codere = re.compile("^( |\t)")
fencedcodere = re.compile("^```\w*$")
class LaTeXRender(Module):
"""
Module for rendering LaTeX enclosed between $ dollar signs $.
Rendering is performed using QuickLaTeX via ProblemSetMarmoset.
"""
def transform(self, data):
transforms = []
linenum = 0
in_block = False
current_block = ""
in_fenced_code_block = False
for line in data:
# Handling fenced code blocks (for Github-flavored markdown)
if fencedcodere.search(line):
if in_fenced_code_block:
in_fenced_code_block = False
else:
in_fenced_code_block = True
# Are we in a code block?
if not in_fenced_code_block and not codere.search(line):
# Is this line part of an existing LaTeX block?
if in_block:
transforms.append(Transform(linenum, "drop"))
current_block += "\n" + line
match = singlelinere.search(line)
if match:
# Single LaTeX line
tex = match.group(0)
before_tex = line[0:line.find(tex)]
after_tex = line[(line.find(tex) + len(tex)) : len(line)]
transforms.append(Transform(linenum, "swap", before_tex + self.render(tex) + after_tex))
else:
match = startorendre.search(line)
if match:
# Starting or ending a multi-line LaTeX block
if in_block:
# Ending a LaTeX block
transforms.pop() # undo last drop
transforms.append(Transform(linenum, "swap", self.render(current_block)))
else:
# Starting a LaTeX block
current_block = line
transforms.append(Transform(linenum, "drop"))
in_block = not in_block
linenum += 1
return transforms
def render(self, formula):
# Prepare the formula
formula = formula.replace("$", "")
encoded_formula = formula.replace("%","[comment]").replace("+","%2B")
display_formula = formula.replace("\n","")
print 'Rendering: %s ...' % display_formula
# Prepare POST request to QuickLaTeX via ProblemSetMarmoset (for added processing)
params = urllib.urlencode({'engine': 'quicklatex', 'input': encoded_formula})
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
conn = httplib.HTTPConnection("www.problemsetmarmoset.com")
# Make the request
conn.request("POST", "/latex/render.php", params, headers)
response = conn.getresponse()
img_url = response.read()
# Display as Markdown image
rendered_tex = '\n'.format(display_formula, img_url)
return rendered_tex
|
Python
| 0
|
@@ -371,42 +371,192 @@
)%22)%0A
-fencedcodere = re.compile(%22%5E%60%60%60%5Cw*
+spancodere = re.compile(r'(%60%5B%5E%60%5D+%5C%60)') # code between backticks%0A%0A# Support for Pandoc style code blocks with attributes%0Afencedcodere = re.compile(%22%5E((%3E *)?%60%60%60%5Cw*%7C(%3E *)?~~~~*(%5Cs*%7B.*%7D)?)
$%22)%0A
@@ -1381,24 +1381,198 @@
%09%09if match:%0A
+%09%09%09%09%09code_pos = %5B%5D%0A%09%09%09%09%09for m in spancodere.finditer(line):%0A%09%09%09%09%09%09code_pos += range(*m.span())%0A%0A%09%09%09%09%09if not (match.start(0) in code_pos or match.end(0) in code_pos):%0A%09%09%09%09%09%09%0A%09
%09%09%09%09%09# Singl
@@ -1580,24 +1580,25 @@
LaTeX line%0A
+%09
%09%09%09%09%09tex = m
@@ -1611,16 +1611,17 @@
roup(0)%0A
+%09
%09%09%09%09%09bef
@@ -1658,16 +1658,17 @@
)%5D%0A%09%09%09%09%09
+%09
after_te
@@ -1717,16 +1717,17 @@
(line)%5D%0A
+%09
%09%09%09%09%09tra
|
adfb7518b47c36396c14a513f547fd5055a29883
|
add bootstrap3
|
MobileFoodOrderServer/settings.py
|
MobileFoodOrderServer/settings.py
|
"""
Django settings for MobileFoodOrderServer project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$6gqjkg1!57(@o6bs#tki8jt2@0p4z_ed@spnfrb@bh8lxqw$n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'order',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'MobileFoodOrderServer.urls'
WSGI_APPLICATION = 'MobileFoodOrderServer.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
DEFAULT_CHARSET = 'utf-8'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR), 'templates']
|
Python
| 0.000001
|
@@ -1012,24 +1012,42 @@
aticfiles',%0A
+ 'bootstrap3',%0A
'order',
|
4aa36475ba9346b760d0e8df3b34429dfd9bb008
|
Add options -s and -m (part I)
|
python/scripts/lpod-show.py
|
python/scripts/lpod-show.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2009 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Authors: Hervé Cauwelier <herve@itaapy.com>
# David Versmisse <david.versmisse@itaapy.com>
#
# This file is part of Lpod (see: http://lpod-project.org).
# Lpod is free software; you can redistribute it and/or modify it under
# the terms of either:
#
# a) the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option)
# any later version.
# Lpod is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Lpod. If not, see <http://www.gnu.org/licenses/>.
#
# b) the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Import from the standard library
from optparse import OptionParser
from sys import exit, stdout, stdin
# Import from lpod
from lpod import __version__
from lpod.document import odf_get_document
from lpod.vfs import vfs
from lpod.table import odf_table
def get_target_directory(dirname, container_url):
# Check the name and create the directory
if vfs.exists(dirname):
message = 'The directory "%s" exists, can i overwrite it? [y/N]'
stdout.write(message % dirname)
stdout.flush()
line = stdin.readline()
line = line.strip().lower()
if line == 'y':
vfs.remove(dirname)
else:
exit(0)
vfs.make_folder(dirname)
return vfs.open(dirname)
def clean_filename(filename):
filename = filename.encode('utf-8')
allowed_characters = set([u'.', u'-', u'_', u'@'])
result = []
for c in filename:
if c not in allowed_characters and not c.isalnum():
result.append('_')
else:
result.append(c)
return ''.join(result)
def text_to_stdout(document):
text = document.get_formated_text()
stdout.write(text.encode(stdout.encoding))
stdout.flush()
def text_to_text(document, target):
text_file = target.open('content.txt', 'w')
text = document.get_formated_text()
text_file.write(text.encode('utf-8'))
text_file.close()
def spreadsheet_to_stdout(document):
body = document.get_body()
for table_element in body.get_table_list():
table = odf_table(odf_element=table_element)
table.export_to_csv(stdout, encoding=stdout.encoding)
stdout.write("\n")
stdout.flush()
def spreadsheet_to_csv(document, target):
body = document.get_body()
for table_element in body.get_table_list():
table = odf_table(odf_element=table_element)
name = table.get_tagname()
filename = clean_filename(name) + '.csv'
csv_file = target.open(filename, 'w')
table.export_to_csv(csv_file)
csv_file.close()
if __name__ == '__main__':
# Options initialisation
usage = "%prog <file>"
description = ("Dump text from an OpenDocument file to the standard "
"output")
parser = OptionParser(usage, version=__version__,
description=description)
# --dirname
parser.add_option('-d', '--dirname', action='store', type='string',
dest='dirname', metavar='DIRNAME',
help="Dump output in files in the given directory.")
# Parse !
opts, args = parser.parse_args()
# Container
if len(args) != 1:
parser.print_help()
exit(1)
container_url = args[0]
# Open it!
document = odf_get_document(container_url)
doc_type = document.get_type()
if opts.dirname:
target = get_target_directory(opts.dirname, container_url)
# text
if doc_type in ('text', 'text-template', 'presentation',
'presentation-template'):
if opts.dirname:
text_to_text(document, target)
else:
text_to_stdout(document)
# spreadsheet
elif doc_type in ('spreadsheet', 'spreadsheet-template'):
if opts.dirname:
spreadsheet_to_csv(document, target)
else:
spreadsheet_to_stdout(document)
else:
print "The OpenDocument format", doc_type, "is not supported yet."
exit(1)
|
Python
| 0
|
@@ -3361,19 +3361,48 @@
%22output
+, optionally styles and meta.
%22)%0A
-
pars
@@ -3637,17 +3637,17 @@
help=%22
-D
+d
ump outp
@@ -3680,24 +3680,420 @@
irectory.%22)%0A
+ # --meta%0A parser.add_option('-m', '--meta', dest='meta', action='store_true',%0A default=False,%0A help='dump metadata (if -d DIR add DIR/meta.txt)')%0A # --styles%0A parser.add_option('-s', '--styles', dest='styles', action='store_true',%0A default=False,%0A help='dump styles (if -d DIR add DIR/styles.txt)')%0A
# Parse
|
47316dcb10d8eaa8be2ee7d9a0b5fcb2d4b562a6
|
Fix Nonetype due to un-materialized runtime (#2389)
|
python/taichi/aot/module.py
|
python/taichi/aot/module.py
|
from taichi.lang import impl, kernel_arguments, kernel_impl
class Module:
"""An AOT module to save and load Taichi kernels.
This module serializes the Taichi kernels for a specific arch. The
serialized module can later be loaded to run on that backend, without the
Python environment.
Example::
m = ti.aot.Module(ti.metal)
m.add_kernel(foo)
m.add_kernel(bar)
m.save('/path/to/module')
# Now the module file '/path/to/module' contains the Metal kernels
# for running ``foo`` and ``bar``.
"""
def __init__(self, arch):
self._arch = arch
self._kernels = []
self._aot_builder = impl.get_runtime().prog.make_aot_module_builder(
arch)
def add_kernel(self, kernel_fn, name=None, template_args=None):
"""Add a taichi kernel to the AOT module.
Args:
kernel_fn (Function): the function decorated by taichi `kernel`.
name (str): Name to identify this kernel in the module. If not
provided, uses the built-in ``__name__`` attribute of `kernel_fn`.
template_args (dict[str: Any]): Used to instantiate the template
parameters in the passed in function, this is because the template
parameters must be known at compile time.
Example:
Note that if `kernel_fn` contains at least one template parameter, it
is required that users provide an explicit `name`. In addition, all
the values of these template parameters must be instantiated via
`template_args`.
Usage::
@ti.kernel
def bar(a: ti.template()):
x = a
# do something with `x`
m = ti.aot.Module(arch)
m.add_kernel(bar, name='bar_a=True', template_args={'a': True})
Later on, the ``bar`` kernel instantiated with ``a = True`` can be
found in the module via ``"bar_a=True"``.
TODO:
* Support external array
"""
name = name or kernel_fn.__name__
kernel = kernel_fn._primal
assert isinstance(kernel, kernel_impl.Kernel)
injected_args = []
for i in range(len(kernel.argument_annotations)):
anno = kernel.argument_annotations[i]
if isinstance(anno, kernel_arguments.ArgExtArray):
raise RuntimeError('Arg type `ext_arr` not supported yet')
if isinstance(anno, kernel_arguments.Template):
value = template_args[kernel.argument_names[i]]
injected_args.append(value)
else:
# For primitive types, we can just inject a dummy value.
injected_args.append(0)
kernel.ensure_compiled(*injected_args)
self._aot_builder.add(name, kernel.kernel_cpp)
# kernel AOT
self._kernels.append(kernel)
def save(self, filepath):
self._aot_builder.dump(filepath)
|
Python
| 0
|
@@ -641,24 +641,65 @@
ernels = %5B%5D%0A
+ impl.get_runtime().materialize()%0A
self
|
d06fa1c8bfa5c782a5c28403caf44736620a3706
|
add get_instruction method modified: qaamus/test_angka_parser.py
|
qaamus/test_angka_parser.py
|
qaamus/test_angka_parser.py
|
import unittest
from bs4 import BeautifulSoup
from ind_ara_parser import BaseParser
class AngkaParser(BaseParser):
pass
class AngkaParserTestCase(unittest.TestCase):
with open("../html/angka123", "rb") as f:
f = f.read()
soup = BeautifulSoup(f)
def setUp(self):
self.angka_parser = AngkaParser(self.soup)
def test_get_angka(self):
result = self.angka_parser._get_query()
expected = '123'
self.assertEqual(result, expected)
def test_get_ara(self):
result = self.angka_parser._get_ara()
expected = 'المئة و الثالث و العشرون'
self.assertEqual(result, expected)
def test_get_footer(self):
result = self.angka_parser._get_footer()
expected = ''
self.assertEqual(result, expected)
def test_get_arti_master(self):
result = self.angka_parser.get_arti_master()
expected = {"ind": '123',
"ara": 'المئة و الثالث و العشرون',
"footer": ""}
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main()
|
Python
| 0.000012
|
@@ -118,12 +118,437 @@
-pass
+%22%22%22Handle terjemah angka page.%22%22%22%0A def get_instruction(self):%0A %22%22%22Return the instruction text.%0A%0A text is returning 'Terjemah angka adalah menterjemahkan angka%0A kedalam bahasa arab, caranya cukup mudah ketik angka%0A (tanpa titik dan koma) yang akan di terjemahkan'.%22%22%22%0A text = self.soup.select(%22.page-header %3E h1%22)%5B0%5D.next_sibling.strip()%0A return text.split(%22,%22)%5B1%5D.strip().capitalize()
%0A%0A%0Ac
@@ -1474,16 +1474,275 @@
ected)%0A%0A
+ def test_get_page_header(self):%0A result = self.angka_parser.get_instruction()%0A expected = (%22Caranya cukup mudah ketik %22%0A %22angka (tanpa titik dan koma) yang akan di terjemahkan%22)%0A self.assertEqual(result, expected)%0A%0A
if __nam
|
a063d15a58cdbaaaac4e63d12080664cef6c02e6
|
Add detonation timeout to submission parameters.
|
modules/Detonation/VxStream.py
|
modules/Detonation/VxStream.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/
from __future__ import division, absolute_import, with_statement, print_function, unicode_literals
import requests
import json
import time
# from common import basename
from collections import Counter
__author__ = 'Austin West'
__license__ = 'MPL 2.0'
TYPE = 'Detonation'
NAME = 'VxStream Sandbox'
DEFAULTCONF = {
'ENABLED': False,
'API URL': 'http://localhost/api/',
'API key': '',
'API secret': '',
'Environment ID': 1,
'Verify': False,
'timeout': 360,
'running timeout': 120,
}
def post_to_vxstream(
f_name, environment_id,
submit_url, apikey, secret, verify):
with open(f_name, 'rb') as f:
files = {'file': f}
data = {'apikey': apikey, 'secret': secret, 'environmentId': environment_id}
try:
user_agent = {'User-agent': 'VxStream Sandbox'}
res = requests.post(submit_url, data=data, headers=user_agent, files=files, verify=verify)
if res.status_code == 200:
return res.json()
else:
print('Error code: {}, returned when uploading: {}'.format(res.status_code, f.name))
except requests.exceptions.HTTPError as err:
print(err)
#traceback.print_exc()
def get_file_status(file_sha256, status_url, environment_id, apikey, secret, verify):
user_agent = {'User-agent': 'VxStream Sandbox'}
params = {'apikey': apikey, 'secret': secret, 'environmentId': environment_id}
resource_url = '%s/%s' % (status_url, file_sha256)
try:
res = requests.get(resource_url, headers=user_agent, params=params, verify=verify)
if res.status_code == 200:
return res.json()
else:
print('Error code: {}, returned when getting file status: {}'.format(res.status_code, file_sha256))
return res
except requests.exceptions.HTTPError as err:
print(err)
def get_file_report(file_sha256, report_url, environment_id, type_, apikey, secret, verify):
user_agent = {'User-agent': 'VxStream Sandbox'}
params = {'apikey': apikey, 'secret': secret, 'environmentId': environment_id, 'type': type_}
resource_url = '%s/%s' % (report_url, file_sha256)
try:
res = requests.get(resource_url, headers=user_agent, params=params, verify=verify)
if res.status_code == 200:
return res.json()
else:
print('Error code: {}, returned when getting report: {}'.format(res.status_code, file_sha256))
return res
except requests.exceptions.HTTPError as err:
print(err)
def check(conf=DEFAULTCONF):
return conf['ENABLED']
def scan(filelist, conf=DEFAULTCONF):
resultlist = []
tasks = []
if conf['API URL'].endswith('/'):
url = conf['API URL']
else:
url = conf['API URL'] + '/'
submit_url = url + 'submit'
status_url = url + 'state'
report_url = url + 'result'
for fname in filelist:
response = post_to_vxstream(
fname, environment_id=conf['Environment ID'],
submit_url=submit_url, apikey=conf['API key'], secret=conf['API secret'],
verify=conf['Verify'])
try:
file_sha256 = response['response']['sha256']
except KeyError as e:
continue
if file_sha256 is not None:
tasks.append((fname, file_sha256))
# Wait for tasks to finish
task_status = {}
while tasks:
for fname, file_sha256 in tasks[:]:
status_dict = get_file_status(
file_sha256, status_url, conf['Environment ID'],
apikey=conf['API key'], secret=conf['API secret'],
verify=conf['Verify']
)
status = status_dict.get('response', {}).get('state', 'ERROR')
# If we have a report
if status == 'SUCCESS':
report = get_file_report(
file_sha256, report_url, conf['Environment ID'],
apikey=conf['API key'], secret=conf['API secret'],
type_='json', verify=conf['Verify']
)
if report:
resultlist.append((fname, report))
tasks.remove((fname, file_sha256))
# Check for dead tasks
elif status == 'IN_PROGRESS':
if file_sha256 not in task_status:
task_status[file_sha256] = time.time() + conf['timeout'] + conf['running timeout']
else:
if time.time() > task_status[file_sha256]:
# TODO Log timeout
tasks.remove((fname, file_sha256))
# If there is an unknown status
elif status == 'ERROR':
# TODO Log errors better
tasks.remove((fname, file_sha256))
time.sleep(15)
metadata = {}
metadata["Name"] = NAME
metadata["Type"] = TYPE
metadata["Include"] = False
return (resultlist, metadata)
|
Python
| 0
|
@@ -790,32 +790,41 @@
apikey, secret,
+ runtime,
verify):%0A wi
@@ -813,24 +813,24 @@
e, verify):%0A
-
with ope
@@ -868,16 +868,17 @@
iles = %7B
+
'file':
@@ -878,16 +878,17 @@
file': f
+
%7D%0A
@@ -893,24 +893,37 @@
data = %7B
+%0A
'apikey': ap
@@ -919,32 +919,44 @@
apikey': apikey,
+%0A
'secret': secre
@@ -949,32 +949,44 @@
secret': secret,
+%0A
'environmentId'
@@ -997,24 +997,72 @@
vironment_id
+,%0A 'customruntime': runtime,%0A
%7D%0A tr
@@ -3387,32 +3387,89 @@
conf%5B'API key'%5D,
+%0A runtime=conf%5B'running timeout'%5D,%0A
secret=conf%5B'AP
@@ -3479,28 +3479,16 @@
ecret'%5D,
-%0A
verify=
|
07ef4a31337685212b68aebd009e12ea7d4ce14e
|
Set default timestamp to now if not provided.
|
st2common/st2common/models/db/execution.py
|
st2common/st2common/models/db/execution.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import mongoengine as me
from st2common import log as logging
from st2common.models.db import stormbase
from st2common.fields import ComplexDateTimeField
from st2common.util import date as date_utils
from st2common.util.secrets import get_secret_parameters
from st2common.util.secrets import mask_secret_parameters
from st2common.constants.types import ResourceType
__all__ = [
'ActionExecutionDB',
'ActionExecutionStdoutOutputDB',
'ActionExecutionStderrOutputDB',
]
LOG = logging.getLogger(__name__)
class ActionExecutionDB(stormbase.StormFoundationDB):
RESOURCE_TYPE = ResourceType.EXECUTION
UID_FIELDS = ['id']
trigger = stormbase.EscapedDictField()
trigger_type = stormbase.EscapedDictField()
trigger_instance = stormbase.EscapedDictField()
rule = stormbase.EscapedDictField()
action = stormbase.EscapedDictField(required=True)
runner = stormbase.EscapedDictField(required=True)
# Only the diff between the liveaction type and what is replicated
# in the ActionExecutionDB object.
liveaction = stormbase.EscapedDictField(required=True)
status = me.StringField(
required=True,
help_text='The current status of the liveaction.')
start_timestamp = ComplexDateTimeField(
default=date_utils.get_datetime_utc_now,
help_text='The timestamp when the liveaction was created.')
end_timestamp = ComplexDateTimeField(
help_text='The timestamp when the liveaction has finished.')
parameters = stormbase.EscapedDynamicField(
default={},
help_text='The key-value pairs passed as to the action runner & action.')
result = stormbase.EscapedDynamicField(
default={},
help_text='Action defined result.')
context = me.DictField(
default={},
help_text='Contextual information on the action execution.')
parent = me.StringField()
children = me.ListField(field=me.StringField())
log = me.ListField(field=me.DictField())
# Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.
web_url = me.StringField(required=False)
meta = {
'indexes': [
{'fields': ['rule.ref']},
{'fields': ['action.ref']},
{'fields': ['liveaction.id']},
{'fields': ['start_timestamp']},
{'fields': ['end_timestamp']},
{'fields': ['status']},
{'fields': ['parent']},
{'fields': ['rule.name']},
{'fields': ['runner.name']},
{'fields': ['trigger.name']},
{'fields': ['trigger_type.name']},
{'fields': ['trigger_instance.id']},
{'fields': ['context.user']},
{'fields': ['-start_timestamp', 'action.ref', 'status']}
]
}
def get_uid(self):
# TODO Construct od from non id field:
uid = [self.RESOURCE_TYPE, str(self.id)]
return ':'.join(uid)
def mask_secrets(self, value):
result = copy.deepcopy(value)
liveaction = result['liveaction']
parameters = {}
# pylint: disable=no-member
parameters.update(value.get('action', {}).get('parameters', {}))
parameters.update(value.get('runner', {}).get('runner_parameters', {}))
secret_parameters = get_secret_parameters(parameters=parameters)
result['parameters'] = mask_secret_parameters(parameters=result['parameters'],
secret_parameters=secret_parameters)
if 'parameters' in liveaction:
liveaction['parameters'] = mask_secret_parameters(parameters=liveaction['parameters'],
secret_parameters=secret_parameters)
return result
def get_masked_parameters(self):
"""
Retrieve parameters with the secrets masked.
:rtype: ``dict``
"""
serializable_dict = self.to_serializable_dict(mask_secrets=True)
return serializable_dict['parameters']
class ActionExecutionStdoutOutputDB(stormbase.StormFoundationDB):
"""
Stores stdout output of a particular action.
New document is inserted dynamically when a new line is received which means you can simulate
tail behavior by periodically reading from this collection.
"""
execution_id = me.StringField(required=True)
action_ref = me.StringField(required=True)
timestamp = me.DateTimeField(required=True)
line = me.StringField()
meta = {
'indexes': [
{'fields': ['execution_id']},
{'fields': ['timestamp']},
{'fields': ['action_ref']}
]
}
class ActionExecutionStderrOutputDB(stormbase.StormFoundationDB):
"""
Stores stderr output of a particular action.
New document is inserted dynamically when a new line is received which means you can simulate
tail behavior by periodically reading from this collection.
"""
execution_id = me.StringField(required=True)
action_ref = me.StringField(required=True)
timestamp = me.DateTimeField(required=True)
line = me.StringField()
meta = {
'indexes': [
{'fields': ['execution_id']},
{'fields': ['timestamp']},
{'fields': ['action_ref']}
]
}
MODELS = [ActionExecutionDB, ActionExecutionStdoutOutputDB,
ActionExecutionStderrOutputDB]
|
Python
| 0
|
@@ -5259,32 +5259,73 @@
ld(required=True
+, default=date_utils.get_datetime_utc_now
)%0A%0A line = me
@@ -5948,16 +5948,57 @@
red=True
+, default=date_utils.get_datetime_utc_now
)%0A%0A l
|
455ec97adb158bf4a1512337ec96ea9d37ebb98a
|
Add support for configurable TEST_MASTER to client
|
client.py
|
client.py
|
#!/usr/bin/env python
import getpass
import logging
import optparse
import os
import sys
import time
import urllib
import urllib2
import simplejson
import time
TEST_MASTER = "http://a1228.halxg.cloudera.com:8081"
RED = "\x1b[31m"
RESET = "\x1b[m"
def generate_job_id():
return "%s.%d.%d" % (getpass.getuser(), int(time.time()), os.getpid())
def do_watch_results(job_id):
watch_url = TEST_MASTER + "/job?" + urllib.urlencode([("job_id", job_id)])
logging.info("Watch your results at %s", watch_url)
url = TEST_MASTER + "/job_status?" + urllib.urlencode([("job_id", job_id)])
start_time = time.time()
first = True
while True:
result_str = urllib2.urlopen(url).read()
result = simplejson.loads(result_str)
# On all but the first iteration, replace the previous line
if not first:
print "\x1b[F\x1b[2K",
first = False
run_time = time.time() - start_time
print "%.1fs\t" % run_time,
print "%d/%d tasks complete" % \
(result['finished_tasks'], result['total_tasks']),
if result['failed_tasks']:
print RED,
print "(%d failed)" % result['failed_tasks'],
print RESET,
print
if result['finished_tasks'] == result['total_tasks']:
break
time.sleep(0.5)
def save_last_job_id(job_id):
with file(os.path.expanduser("~/.dist-test-last-job"), "w") as f:
f.write(job_id)
def load_last_job_id():
try:
with file(os.path.expanduser("~/.dist-test-last-job"), "r") as f:
return f.read()
except:
return None
def submit_job_json(job_prefix, json):
# Verify that it is proper JSON
simplejson.loads(json)
# Prepend the job_prefix if present
if job_prefix is not None and len(job_prefix) > 0:
job_prefix += "."
job_id = job_prefix + generate_job_id()
form_data = urllib.urlencode({'job_id': job_id, 'job_json': json})
result_str = urllib2.urlopen(TEST_MASTER + "/submit_job",
data=form_data).read()
result = simplejson.loads(result_str)
if result.get('status') != 'SUCCESS':
sys.err.println("Unable to submit job: %s" % repr(result))
sys.exit(1)
save_last_job_id(job_id)
logging.info("Submitted job %s", job_id)
return job_id
def submit(argv):
p = optparse.OptionParser(
usage="usage: %prog submit [options] <job-json-path>")
p.add_option("-n", "--name",
action="store",
type="string",
dest="name",
default="",
help="Job name prefix, will be mangled for additional uniqueness")
options, args = p.parse_args()
if len(args) != 1:
p.print_help()
sys.exit(1)
job_id = submit_job_json(options.name, file(args[0]).read())
do_watch_results(job_id)
def get_job_id_from_args(command, args):
if len(args) == 1:
job_id = load_last_job_id()
if job_id is not None:
logging.info("Using most recently submitted job id: %s" % job_id)
return job_id
if len(args) != 2:
print >>sys.stderr, "usage: %s %s <job-id>" % (os.path.basename(sys.argv[0]), command)
sys.exit(1)
return args[1]
def watch(argv):
job_id = get_job_id_from_args("watch", argv)
do_watch_results(job_id)
def fetch_failed_tasks(job_id):
url = TEST_MASTER + "/failed_tasks?" + urllib.urlencode([("job_id", job_id)])
results_str = urllib2.urlopen(url).read()
return simplejson.loads(results_str)
def safe_name(s):
return "".join([c.isalnum() and c or "_" for c in s])
def fetch(argv):
p = optparse.OptionParser(
usage="usage: %prog fetch [options] <job-id>")
p.add_option("-d", "--output-dir", dest="out_dir", type="string",
help="directory into which to download logs", metavar="PATH",
default="dist-test-results")
options, args = p.parse_args()
if len(args) == 0:
last_job = load_last_job_id()
if last_job:
args.append(last_job)
if len(args) != 1:
p.error("no job id specified")
job_id = args[0]
failed_tasks = fetch_failed_tasks(job_id)
if len(failed_tasks) == 0:
logging.info("No failed tasks in provided job, or job does not exist")
return
logging.info("Fetching %d failed task logs into %s",
len(failed_tasks),
options.out_dir)
try:
os.makedirs(options.out_dir)
except:
pass
for t in failed_tasks:
filename = safe_name(t['task_id']) + "." + safe_name(t['description'])
path_prefix = os.path.join(options.out_dir, filename)
if 'stdout_link' in t:
path = path_prefix + ".stdout"
if not os.path.exists(path):
logging.info("Fetching stdout for task %s into %s", t['task_id'], path)
urllib.urlretrieve(t['stdout_link'], path)
else:
logging.info("No stdout for task %s" % t['task_id'])
if 'stderr_link' in t:
path = path_prefix + ".stderr"
if not os.path.exists(path):
logging.info("Fetching stderr for task %s into %s", t['task_id'], path)
urllib.urlretrieve(t['stderr_link'], path)
else:
logging.info("No stderr for task %s" % t['task_id'])
def cancel_job(argv):
job_id = get_job_id_from_args("cancel", argv)
url = TEST_MASTER + "/cancel_job?" + urllib.urlencode([("job_id", job_id)])
result_str = urllib2.urlopen(url).read()
logging.info("Cancellation: %s" % result_str)
def usage(argv):
print >>sys.stderr, "usage: %s <command> [<args>]" % os.path.basename(argv[0])
print >>sys.stderr, """Commands:
submit Submit a JSON file listing tasks
cancel Cancel a previously submitted job
watch Watch an already-submitted job ID
fetch Fetch failed test logs from a previous job"""
print >>sys.stderr, "%s <command> --help may provide further info" % argv[0]
def main(argv):
logging.basicConfig(level=logging.INFO)
if len(argv) < 2:
usage(argv)
sys.exit(1)
command = argv[1]
del argv[1]
if command == "submit":
submit(argv)
elif command == "watch":
watch(argv)
elif command == "cancel":
cancel_job(argv)
elif command == "fetch":
fetch(argv)
else:
usage(argv)
sys.exit(1)
if __name__ == "__main__":
main(sys.argv)
|
Python
| 0
|
@@ -168,16 +168,51 @@
MASTER =
+ os.environ.get(%22DIST_TEST_MASTER%22,
%22http:/
@@ -242,16 +242,17 @@
om:8081%22
+)
%0A%0ARED =
|
6509a1c1e9ee92841378d0b6f546ebf64991bbea
|
add xyz to exportable formats
|
stltovoxel.py
|
stltovoxel.py
|
import argparse
from PIL import Image
import numpy as np
import os.path
import slice
import stl_reader
import perimeter
from util import arrayToPixel
def doExport(inputFilePath, outputFilePath, resolution):
mesh = list(stl_reader.read_stl_verticies(inputFilePath))
(scale, shift, bounding_box) = slice.calculateScaleAndShift(mesh, resolution)
mesh = list(slice.scaleAndShiftMesh(mesh, scale, shift))
outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)
for height in range(bounding_box[2]):
img = Image.new('RGB', (bounding_box[0], bounding_box[1]), "white") # create a new black image
pixels = img.load()
lines = slice.toIntersectingLines(mesh, height)
prepixel = np.zeros((bounding_box[0], bounding_box[1]), dtype=bool)
perimeter.linesToVoxels(lines, prepixel)
arrayToPixel(prepixel, pixels)
path = outputFilePattern + "-" + str(height) + outputFileExtension
print("%d/%d: Saving %s"%(height,bounding_box[2],path))
img.save(path)
def file_choices(choices,fname):
filename, ext = os.path.splitext(fname)
if ext == "" or ext not in choices:
if len(choices) == 1:
parser.error("file doesn't end with {}".format(choices))
else:
parser.error("file doesn't end with one of {}".format(choices))
return fname
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert STL files to images/gifs')
parser.add_argument('input', nargs='?', type=lambda s:file_choices((".stl"),s))
parser.add_argument('output', nargs='?', type=lambda s:file_choices((".png"),s))
args = parser.parse_args()
doExport(args.input, args.output, 256)
|
Python
| 0.000001
|
@@ -416,156 +416,93 @@
-outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)%0A for height in range(bounding_box%5B2%5D):%0A img = Image.new('RGB', (
+#Note: vol should be addressed with vol%5Bz%5D%5Bx%5D%5By%5D%0A vol = np.zeros((bounding_box%5B2%5D,
boun
@@ -505,33 +505,32 @@
bounding_box%5B0%5D,
-
bounding_box%5B1%5D)
@@ -535,71 +535,127 @@
%5D),
-%22white%22) # create a new black image%0A pixels = img.load(
+dtype=bool)%0A for height in range(bounding_box%5B2%5D):%0A print('Processing layer %25d/%25d'%25(height+1,bounding_box%5B2%5D)
)%0A
@@ -845,29 +845,729 @@
-arrayToPixel(prepixel
+vol%5Bheight%5D = prepixel%0A outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)%0A if outputFileExtension == '.png':%0A exportPngs(vol, bounding_box, outputFilePath)%0A elif outputFileExtension == '.xyz':%0A exportXyz(vol, bounding_box, outputFilePath)%0A elif outputFileExtension == '.svx':%0A exportSvx(vol, bounding_box, outputFilePath)%0A%0Adef exportPngs(voxels, bounding_box, outputFilePath):%0A outputFilePattern, outputFileExtension = os.path.splitext(outputFilePath)%0A for height in range(bounding_box%5B2%5D):%0A img = Image.new('RGB', (bounding_box%5B0%5D, bounding_box%5B1%5D), 'white') # create a new black image%0A pixels = img.load()%0A arrayToPixel(voxels%5Bheight%5D
, pi
@@ -1611,11 +1611,11 @@
n +
-%22-%22
+'-'
+ s
@@ -1659,41 +1659,129 @@
-print(%22%25d/%25d: Saving %25s%22%25(height,
+img.save(path)%0A%0Adef exportXyz(voxels, bounding_box, outputFilePath):%0A output = open(outputFilePath, 'w')%0A for z in
boun
@@ -1795,15 +1795,9 @@
x%5B2%5D
-,path))
+:
%0A
@@ -1801,30 +1801,249 @@
-img.save(path)
+for x in bounding_box%5B0%5D:%0A for y in bounding_box%5B1%5D:%0A if vol%5Bz%5D%5Bx%5D%5By%5D:%0A output.write('%25s %25s %25s%5Cn'%25(x,y,z))%0A output.close()%0A%0Adef exportSvx(voxels, bounding_box, outputFilePath):%0A pass
%0A%0A%0Adef f
@@ -2132,10 +2132,10 @@
==
-%22%22
+''
or
@@ -2205,35 +2205,34 @@
arser.error(
-%22file
+'%25s
doesn
+%5C
't end with
@@ -2231,27 +2231,27 @@
nd with
-%7B%7D%22.format(
+%25s'%25(fname,
choices)
@@ -2295,19 +2295,18 @@
ror(
-%22file
+'%25s
doesn
+%5C
't e
@@ -2324,19 +2324,19 @@
of
-%7B%7D%22.format(
+%25s'%25(fname,
choi
@@ -2547,14 +2547,14 @@
es((
-%22
+'
.stl
-%22
+'
),s)
@@ -2632,14 +2632,30 @@
es((
-%22
+'
.png
-%22
+', '.xyz', '.svx'
),s)
|
21f53bee1bfba8ef82b82898693c2cc09a7873c7
|
add get_weight() to Keras interface
|
syft/interfaces/keras/models/sequential.py
|
syft/interfaces/keras/models/sequential.py
|
import syft
import syft.nn as nn
import sys
from syft.interfaces.keras.layers import Log
class Sequential(object):
def __init__(self):
self.syft = nn.Sequential()
self.layers = list()
self.compiled = False
def add(self, layer):
if(len(self.layers) > 0):
# look to the previous layer to get the input shape for this layer
layer.input_shape = self.layers[-1].output_shape
# if layer doesn't know its output shape - it's probably dynamic
if not hasattr(layer, 'output_shape'):
layer.output_shape = layer.input_shape
layer.create_model()
self.layers.append(layer)
# sometimes keras has single layers that actually correspond
# to multiple syft layers - so they end up getting stored in
# an ordered list called "ordered_syft"
for l in layer.ordered_syft:
self.syft.add(l)
def summary(self):
self.syft.summary()
def compile(self,loss,optimizer,metrics,alpha=0.01):
if(not self.compiled):
self.compiled = True
if(loss == 'categorical_crossentropy'):
self.add(Log())
self.loss = nn.NLLLoss()
self.optimizer = optimizer
self.metrics = metrics
self.optimizer.init(syft_params=self.syft.parameters(),alpha=alpha)
else:
sys.stderr.write("Warning: Model already compiled... please rebuild from scratch if you need to change things")
def fit(self,x_train,y_train,batch_size,epochs,verbose,validation_data):
final_loss = self.syft.fit(input=x_train,
target=y_train,
batch_size=batch_size,
criterion=self.loss,
optim=self.optimizer.syft,
iters=epochs,
log_interval=1)
return final_loss
def predict(self,x):
return self.syft.forward(input=x)
|
Python
| 0
|
@@ -1783,11 +1783,65 @@
ut=x)%0A%0A%09
+def get_weights(self):%0A%09%09return self.syft.parameters()
%0A%0A%0A
|
8299b323eee11dbfebb7c97bfcd16281b874be1d
|
add release endpoints for /thirdparty
|
synapse/rest/client/v2_alpha/thirdparty.py
|
synapse/rest/client/v2_alpha/thirdparty.py
|
# -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from twisted.internet import defer
from synapse.api.constants import ThirdPartyEntityKind
from synapse.http.servlet import RestServlet
from ._base import client_v2_patterns
logger = logging.getLogger(__name__)
class ThirdPartyProtocolsServlet(RestServlet):
PATTERNS = client_v2_patterns("/thirdparty/protocols", releases=())
def __init__(self, hs):
super(ThirdPartyProtocolsServlet, self).__init__()
self.auth = hs.get_auth()
self.appservice_handler = hs.get_application_service_handler()
@defer.inlineCallbacks
def on_GET(self, request):
yield self.auth.get_user_by_req(request, allow_guest=True)
protocols = yield self.appservice_handler.get_3pe_protocols()
defer.returnValue((200, protocols))
class ThirdPartyProtocolServlet(RestServlet):
PATTERNS = client_v2_patterns("/thirdparty/protocol/(?P<protocol>[^/]+)$",
releases=())
def __init__(self, hs):
super(ThirdPartyProtocolServlet, self).__init__()
self.auth = hs.get_auth()
self.appservice_handler = hs.get_application_service_handler()
@defer.inlineCallbacks
def on_GET(self, request, protocol):
yield self.auth.get_user_by_req(request, allow_guest=True)
protocols = yield self.appservice_handler.get_3pe_protocols(
only_protocol=protocol,
)
if protocol in protocols:
defer.returnValue((200, protocols[protocol]))
else:
defer.returnValue((404, {"error": "Unknown protocol"}))
class ThirdPartyUserServlet(RestServlet):
PATTERNS = client_v2_patterns("/thirdparty/user(/(?P<protocol>[^/]+))?$",
releases=())
def __init__(self, hs):
super(ThirdPartyUserServlet, self).__init__()
self.auth = hs.get_auth()
self.appservice_handler = hs.get_application_service_handler()
@defer.inlineCallbacks
def on_GET(self, request, protocol):
yield self.auth.get_user_by_req(request, allow_guest=True)
fields = request.args
fields.pop("access_token", None)
results = yield self.appservice_handler.query_3pe(
ThirdPartyEntityKind.USER, protocol, fields
)
defer.returnValue((200, results))
class ThirdPartyLocationServlet(RestServlet):
PATTERNS = client_v2_patterns("/thirdparty/location(/(?P<protocol>[^/]+))?$",
releases=())
def __init__(self, hs):
super(ThirdPartyLocationServlet, self).__init__()
self.auth = hs.get_auth()
self.appservice_handler = hs.get_application_service_handler()
@defer.inlineCallbacks
def on_GET(self, request, protocol):
yield self.auth.get_user_by_req(request, allow_guest=True)
fields = request.args
fields.pop("access_token", None)
results = yield self.appservice_handler.query_3pe(
ThirdPartyEntityKind.LOCATION, protocol, fields
)
defer.returnValue((200, results))
def register_servlets(hs, http_server):
ThirdPartyProtocolsServlet(hs).register(http_server)
ThirdPartyProtocolServlet(hs).register(http_server)
ThirdPartyUserServlet(hs).register(http_server)
ThirdPartyLocationServlet(hs).register(http_server)
|
Python
| 0
|
@@ -941,23 +941,10 @@
ols%22
-, releases=()
)%0A
+
%0A
@@ -1503,55 +1503,8 @@
+)$%22
-,%0A releases=()
)%0A%0A
@@ -2244,55 +2244,8 @@
)?$%22
-,%0A releases=()
)%0A%0A
@@ -2848,32 +2848,32 @@
t(RestServlet):%0A
+
PATTERNS = c
@@ -2940,55 +2940,8 @@
)?$%22
-,%0A releases=()
)%0A%0A
|
1caa24a8eda4a90caeeb41e7daee8b73735f7c2e
|
Add API endpoint returning waterpoint ids for open requests
|
taarifa_waterpoints/taarifa_waterpoints.py
|
taarifa_waterpoints/taarifa_waterpoints.py
|
import json
import requests
from eve.render import send_response
from flask import request, Response, send_from_directory
from werkzeug.contrib.cache import SimpleCache
cache = SimpleCache()
from taarifa_api import api as app, main
def post_waterpoints_get_callback(request, payload):
"""Strip all meta data but id from waterpoint payload if 'strip' is set to
a non-zero value in the query string."""
if request.args.get('strip', 0):
d = json.loads(payload.data)
d['_items'] = [dict((k, v) for k, v in it.items()
if k == '_id' or not k.startswith('_'))
for it in d['_items']]
payload.data = json.dumps(d)
app.name = 'TaarifaWaterpoints'
app.on_post_GET_waterpoints += post_waterpoints_get_callback
# Override the maximum number of results on a single page
# This is needed by the dashboard
# FIXME: this should eventually be replaced by an incremental load
# which is better for responsiveness
app.config['PAGINATION_LIMIT'] = 70000
@app.route('/' + app.config['URL_PREFIX'] + '/waterpoints/values/<field>')
def waterpoint_values(field):
"Return the unique values for a given field in the waterpoints collection."
# FIXME: Direct call to the PyMongo driver, should be abstracted
resources = app.data.driver.db['resources']
if request.args:
resources = resources.find(dict(request.args.items()))
return send_response('resources', (resources.distinct(field),))
@app.route('/' + app.config['URL_PREFIX'] + '/waterpoints/stats')
def waterpoint_stats():
"Return number of waterpoints grouped by district and status."
# FIXME: Direct call to the PyMongo driver, should be abstracted
resources = app.data.driver.db['resources']
return send_response('resources', (resources.group(
['district', 'status_group'], dict(request.args.items()),
initial={'count': 0},
reduce="function(curr, result) {result.count++;}"),))
@app.route('/' + app.config['URL_PREFIX'] + '/waterpoints/status')
def waterpoint_status():
"Return number of waterpoints grouped by status."
# FIXME: Direct call to the PyMongo driver, should be abstracted
resources = app.data.driver.db['resources']
return send_response('resources', (resources.group(
['status_group'], dict(request.args.items()), initial={'count': 0},
reduce="function(curr, result) {result.count++;}"),))
@app.route('/' + app.config['URL_PREFIX'] + '/waterpoints/count_by/<field>')
def waterpoint_count_by(field):
"Return number of waterpoints grouped a given field."
# FIXME: Direct call to the PyMongo driver, should be abstracted
resources = app.data.driver.db['resources']
return send_response('resources', (resources.group(
field.split(','), dict(request.args.items()), initial={'count': 0},
reduce="function(curr, result) {result.count++;}"),))
@app.route('/' + app.config['URL_PREFIX'] + '/waterpoints/stats_by/<field>')
def waterpoint_stats_by(field):
"""Return number of waterpoints of a given status grouped by a certain
attribute."""
# FIXME: Direct call to the PyMongo driver, should be abstracted
resources = app.data.driver.db['resources']
return send_response('resources', (resources.aggregate([
{"$match": dict(request.args.items())},
{"$group": {"_id": {field: "$" + field,
"status": "$status_group"},
"statusCount": {"$sum": 1},
"populationCount": {"$sum": "$population"}}},
{"$group": {"_id": "$_id." + field,
"waterpoints": {
"$push": {
"status": "$_id.status",
"count": "$statusCount",
"population": "$populationCount",
},
},
"count": {"$sum": "$statusCount"}}},
{"$project": {"_id": 0,
field: "$_id",
"waterpoints": 1,
"population": 1,
"count": 1}},
{"$sort": {field: 1}}])['result'],))
@app.route('/scripts/<path:filename>')
def scripts(filename):
return send_from_directory(app.root_path + '/dist/scripts/', filename)
@app.route('/styles/<path:filename>')
def styles(filename):
return send_from_directory(app.root_path + '/dist/styles/', filename)
@app.route('/images/<path:filename>')
def images(filename):
return send_from_directory(app.root_path + '/dist/images/', filename)
@app.route('/data/<path:filename>.geojson')
def geojson(filename):
url = 'http://162.243.57.235/geoserver/wfs?srsName=EPSG%3A4326&typename=geonode%3A' \
+ filename + '&outputFormat=json&version=1.0.0&service=WFS&request=GetFeature'
resp = cache.get(filename)
if resp is None:
r = requests.get(url)
resp = Response(r.content, status=r.status_code,
content_type=r.headers['content-type'])
cache.set(filename, resp, timeout=24*60*60)
return resp
@app.route('/data/<path:filename>')
def data(filename):
# FIXME: if we ever want to send non-JSON data this needs fixing
return send_from_directory(app.root_path + '/dist/data/', filename,
mimetype="application/json")
@app.route('/views/<path:filename>')
def views(filename):
return send_from_directory(app.root_path + '/dist/views/', filename)
@app.route("/")
def index():
return send_from_directory(app.root_path + '/dist/', 'index.html')
@app.route("/favicon.ico")
def favicon():
return send_from_directory(app.root_path + '/dist/', 'favicon.ico')
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -1020,16 +1020,500 @@
70000%0A%0A%0A
+@app.route('/' + app.config%5B'URL_PREFIX'%5D + '/waterpoints/requests')%0Adef waterpoint_requests():%0A %22Return the unique values for a given field in the waterpoints collection.%22%0A # FIXME: Direct call to the PyMongo driver, should be abstracted%0A requests = app.data.driver.db%5B'requests'%5D.find(%7B'status': 'open'%7D,%0A %5B'attribute.waterpoint_id'%5D)%0A return send_response('requests', (requests.distinct('attribute.waterpoint_id'),))%0A%0A%0A
@app.rou
|
423b8ed88cb1387e3b35eb2f3e270568c17ce13b
|
Make substitutions to avoid absolute paths.
|
test/command_line/tst_kapton_correction.py
|
test/command_line/tst_kapton_correction.py
|
from __future__ import absolute_import, division
from dials.array_family import flex # import dependency
class Test(object):
def __init__(self):
from os.path import join
import libtbx.load_env
try:
dials_regression = libtbx.env.dist_path('dials_regression')
except KeyError, e:
print 'SKIP: dials_regression not configured'
exit(0)
self.path = join(dials_regression, "integration_test_data/stills_PSII")
def run(self):
self.test_integrate_with_kapton()
def test_integrate_with_kapton(self):
from os.path import join, exists
from libtbx import easy_run
import os
from uuid import uuid4
dirname ='tmp_%s' % uuid4().hex
os.mkdir(dirname)
os.chdir(dirname)
pickle_path = join(self.path, 'idx-20161021225550223_indexed.pickle')
json_path = join(self.path, 'idx-20161021225550223_refined_experiments.json')
assert os.path.exists(pickle_path)
assert os.path.exists(json_path)
templ_phil = """
output {
experiments = 'idx-20161021225550223_integrated_experiments_%s.json'
reflections = 'idx-20161021225550223_integrated_%s.pickle'
}
integration {
lookup.mask = '/Users/idyoung/xfel_dev/modules/dials_regression/integration_test_data/stills_PSII/mask.pickle'
integrator = stills
profile.fitting = False
background.algorithm = simple
debug {
output = True
separate_files = False
split_experiments = False
}
}
profile {
gaussian_rs.min_spots.overall = 0
}
absorption_correction {
apply = %s
algorithm = fuller_kapton
fuller_kapton {
smart_sigmas = True
}
}
"""
without_kapton_phil = templ_phil % ("nokapton", "nokapton", "False")
with_kapton_phil = templ_phil % ("kapton", "kapton", "True")
f = open("integrate_without_kapton.phil", 'wb')
f.write(without_kapton_phil)
f.close()
f = open("integrate_with_kapton.phil", 'wb')
f.write(with_kapton_phil)
f.close()
loc = os.getcwd()
# Call dials.integrate with and without kapton correction
for phil in "integrate_without_kapton.phil", "integrate_with_kapton.phil":
result = easy_run.fully_buffered([
'dials.integrate', pickle_path, json_path, phil
]).raise_if_errors()
result.show_stdout()
import cPickle as pickle
results = []
for mode in "kapton", "nokapton":
result = os.path.join(loc, "idx-20161021225550223_integrated_%s.pickle" % mode)
table = pickle.load(open(result, 'rb'))
millers = table['miller_index']
test_indices = {'zero':(-5, 2, -6), 'low':(-2, -20, 7), 'high':(-1, -10, 4)}
test_rows = {k:millers.first_index(v) for k,v in test_indices.iteritems()}
test_I_sigsqI = {k:(table[v]['intensity.sum.value'], table[v]['intensity.sum.variance'])
for k,v in test_rows.iteritems()}
results.append(test_I_sigsqI)
assert results[0]['zero'][0] == results[1]['zero'][0]
assert results[0]['zero'][1] - results[1]['zero'][1] < 0.0001
assert False not in [results[0]['low'][i] > results[1]['low'][i] for i in (0, 1)]
assert False not in [results[0]['high'][i] > results[1]['high'][i] for i in (0, 1)]
print 'OK'
if __name__ == '__main__':
from dials.test import cd_auto
with cd_auto(__file__):
test = Test()
test.run()
|
Python
| 0.000005
|
@@ -418,33 +418,36 @@
ration_test_data
-/
+%22, %22
stills_PSII%22)%0A%0A
@@ -736,42 +736,48 @@
me)%0A
-%0A pickle_path = join(self.path,
+ loc = os.getcwd()%0A%0A pickle_name =
'id
@@ -811,17 +811,16 @@
.pickle'
-)
%0A jso
@@ -825,30 +825,14 @@
son_
-path = join(self.path,
+name =
'id
@@ -880,84 +880,465 @@
son'
-)%0A assert os.path.exists(pickle_path)%0A assert os.path.exists(json_path
+%0A image_name = '20161021225550223.pickle'%0A pickle_path = join(self.path, pickle_name)%0A json_path = join(self.path, json_name)%0A image_path = join(self.path, image_name)%0A assert os.path.exists(pickle_path)%0A assert os.path.exists(json_path)%0A%0A import shutil%0A shutil.copy(pickle_path, loc)%0A shutil.copy(image_path, loc)%0A%0A w = open(json_name, 'wb')%0A r = open(json_path, 'rb')%0A w.write(r.read() %25 loc)%0A r.close()%0A w.close(
)%0A%0A
@@ -1571,102 +1571,10 @@
= '
-/Users/idyoung/xfel_dev/modules/dials_regression/integration_test_data/stills_PSII/mask.pickle
+%25s
'%0A
@@ -2084,16 +2084,62 @@
kapton%22,
+%0A os.path.join(self.path, %22mask.pickle%22),
%22False%22
@@ -2196,16 +2196,62 @@
kapton%22,
+%0A os.path.join(self.path, %22mask.pickle%22),
%22True%22)
@@ -2446,37 +2446,16 @@
lose()%0A%0A
- loc = os.getcwd()
%0A%0A #
@@ -2668,23 +2668,23 @@
kle_
-path
+name
, json_
-path
+name
, ph
|
dc6390e58a26a6becedadb8705ef094507fb4281
|
Remove superfluous quotes
|
tests/pytests/unit/states/test_ssh_auth.py
|
tests/pytests/unit/states/test_ssh_auth.py
|
"""
:codeauthor: Jayesh Kariya <jayeshk@saltstack.com>
"""
import pytest
import salt.states.ssh_auth as ssh_auth
from tests.support.mock import MagicMock, patch
@pytest.fixture
def configure_loader_modules():
return {ssh_auth: {}}
def test_present():
"""
Test to verifies that the specified SSH key
is present for the specified user.
"""
name = "sshkeys"
user = "root"
source = "salt://ssh_keys/id_rsa.pub"
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
mock = MagicMock(return_value="exists")
mock_data = MagicMock(side_effect=["replace", "new"])
with patch.dict(
ssh_auth.__salt__, {"ssh.check_key": mock, "ssh.set_auth_key": mock_data}
):
with patch.dict(ssh_auth.__opts__, {"test": True}):
comt = "The authorized host key sshkeys is already " "present for user root"
ret.update({"comment": comt})
assert ssh_auth.present(name, user, source) == ret
with patch.dict(ssh_auth.__opts__, {"test": False}):
comt = "The authorized host key sshkeys " "for user root was updated"
ret.update({"comment": comt, "changes": {name: "Updated"}})
assert ssh_auth.present(name, user, source) == ret
comt = "The authorized host key sshkeys " "for user root was added"
ret.update({"comment": comt, "changes": {name: "New"}})
assert ssh_auth.present(name, user, source) == ret
def test_absent():
"""
Test to verifies that the specified SSH key is absent.
"""
name = "sshkeys"
user = "root"
source = "salt://ssh_keys/id_rsa.pub"
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
mock = MagicMock(
side_effect=["User authorized keys file not present", "Key removed"]
)
mock_up = MagicMock(side_effect=["update", "updated"])
with patch.dict(
ssh_auth.__salt__, {"ssh.rm_auth_key": mock, "ssh.check_key": mock_up}
):
with patch.dict(ssh_auth.__opts__, {"test": True}):
comt = "Key sshkeys for user root is set for removal"
ret.update({"comment": comt})
assert ssh_auth.absent(name, user, source) == ret
comt = "Key is already absent"
ret.update({"comment": comt, "result": True})
assert ssh_auth.absent(name, user, source) == ret
with patch.dict(ssh_auth.__opts__, {"test": False}):
comt = "User authorized keys file not present"
ret.update({"comment": comt, "result": False})
assert ssh_auth.absent(name, user, source) == ret
comt = "Key removed"
ret.update({"comment": comt, "result": True, "changes": {name: "Removed"}})
assert ssh_auth.absent(name, user, source) == ret
def test_manage():
"""
Test to verifies that the specified SSH key is absent.
"""
user = "root"
ret = {"name": "", "changes": {}, "result": None, "comment": ""}
mock_rm = MagicMock(
side_effect=["User authorized keys file not present", "Key removed"]
)
mock_up = MagicMock(side_effect=["update", "updated"])
mock_set = MagicMock(side_effect=["replace", "new"])
mock_keys = MagicMock(
return_value={
"somekey": {
"enc": "ssh-rsa",
"comment": "user@host",
"options": [],
"fingerprint": "b7",
}
}
)
with patch.dict(
ssh_auth.__salt__,
{
"ssh.rm_auth_key": mock_rm,
"ssh.set_auth_key": mock_set,
"ssh.check_key": mock_up,
"ssh.auth_keys": mock_keys,
},
):
with patch("salt.states.ssh_auth.present") as call_mocked_present:
mock_present = {"comment": "", "changes": {}, "result": None}
call_mocked_present.return_value = mock_present
with patch.dict(ssh_auth.__opts__, {"test": True}):
assert ssh_auth.manage("sshid", ["somekey"], user) == ret
comt = "somekey Key set for removal"
ret.update({"comment": comt})
assert ssh_auth.manage("sshid", [], user) == ret
with patch("salt.states.ssh_auth.present") as call_mocked_present:
mock_present = {"comment": "", "changes": {}, "result": True}
call_mocked_present.return_value = mock_present
with patch.dict(ssh_auth.__opts__, {"test": False}):
ret = {"name": "", "changes": {}, "result": True, "comment": ""}
assert ssh_auth.manage("sshid", ["somekey"], user) == ret
with patch("salt.states.ssh_auth.absent") as call_mocked_absent:
mock_absent = {"comment": "Key removed"}
call_mocked_absent.return_value = mock_absent
ret.update(
{
"comment": "",
"result": True,
"changes": {"somekey": "Key removed"},
}
)
assert ssh_auth.manage("sshid", ["addkey"], user) == ret
# add a key
with patch("salt.states.ssh_auth.present") as call_mocked_present:
mock_present = {
"comment": "The authorized host key newkey for user {} was added".format(
user
),
"changes": {"newkey": "New"},
"result": True,
}
call_mocked_present.return_value = mock_present
with patch.dict(ssh_auth.__opts__, {"test": False}):
ret = {
"name": "",
"changes": {"newkey": "New"},
"result": True,
"comment": "",
}
assert ssh_auth.manage("sshid", ["newkey", "somekey"], user) == ret
|
Python
| 0.001816
|
@@ -849,19 +849,16 @@
already
-%22 %22
present
@@ -1083,35 +1083,32 @@
ost key sshkeys
-%22 %22
for user root wa
@@ -1310,11 +1310,8 @@
eys
-%22 %22
for
|
2db00337d9179740f7c9ab2e0df5c12db42bd873
|
Fix test
|
tests/test_simpleflow/swf/test_executor.py
|
tests/test_simpleflow/swf/test_executor.py
|
import unittest
import boto
from moto import mock_swf
from sure import expect
from simpleflow import activity, futures
from simpleflow.swf.executor import Executor
from swf.actors import Decider
from swf.models.history import builder
from swf.responses import Response
from tests.data import (
BaseTestWorkflow,
DOMAIN,
increment,
)
@activity.with_attributes(task_priority=32)
def increment_high_priority(self, x):
return x + 1
class ExampleWorkflow(BaseTestWorkflow):
"""
Example workflow definition used in tests below.
"""
@property
def task_priority(self):
"""
Sets a default task priority as a dynamic value. We could also have used
task_priority = <num> on the class directly.
"""
return 12
def run(self):
a = self.submit(increment, 3)
b = self.submit(increment, 3, __priority=5)
c = self.submit(increment, 3, __priority=None)
d = self.submit(increment_high_priority, 3)
e = self.submit(increment_high_priority, 3, __priority=30)
futures.wait(a, b, c, d, e)
@mock_swf
class TestSimpleflowSwfExecutor(unittest.TestCase):
def setUp(self):
self.conn = boto.connect_swf()
self.conn.register_domain("TestDomain", "50")
self.conn.register_workflow_type(
"TestDomain", "test-workflow", "v1.2",
task_list="test-task-list", default_child_policy="TERMINATE",
default_execution_start_to_close_timeout="6",
default_task_start_to_close_timeout="3",
)
self.conn.start_workflow_execution("TestDomain", "wfe-1234",
"test-workflow", "v1.2")
def tearDown(self):
pass
def test_submit_resolves_priority(self):
response = Decider(DOMAIN, "test-task-list").poll()
executor = Executor(DOMAIN, ExampleWorkflow)
decisions, _ = executor.replay(response)
expect(decisions).to.have.length_of(5)
def get_task_priority(decision):
return decision["scheduleActivityTaskDecisionAttributes"].get("taskPriority")
# default priority for the whole workflow
expect(get_task_priority(decisions[0])).to.equal("12")
# priority passed explicitly
expect(get_task_priority(decisions[1])).to.equal("5")
# priority == None
expect(get_task_priority(decisions[2])).to.be.none
# priority set at decorator level
expect(get_task_priority(decisions[3])).to.equal("32")
# priority set at decorator level but overridden in self.submit()
expect(get_task_priority(decisions[4])).to.equal("30")
def test_get_event_details(self):
history = builder.History(ExampleWorkflow, input={})
signal_input = {'x': 42, 'foo': 'bar', '__propagate': False}
marker_details = {'baz': 'bae'}
history.add_signal('a_signal', signal_input)
history.add_marker('a_marker', marker_details)
executor = Executor(DOMAIN, ExampleWorkflow)
decisions, _ = executor.replay(Response(history=history, execution=None))
details = executor.get_event_details('signal', 'a_signal')
del details['timestamp']
expect(details).to.equal({
'type': 'signal',
'state': 'signaled',
'name': 'a_signal',
'input': signal_input,
'event_id': 4,
'external_initiated_event_id': 0,
'external_run_id': None,
'external_workflow_id': None,
})
details = executor.get_event_details('signal', 'another_signal')
expect(details).to.be.none
details = executor.get_event_details('marker', 'a_marker')
del details['timestamp']
expect(details).to.equal({
'type': 'marker',
'state': 'recorded',
'name': 'a_marker',
'details': marker_details,
'event_id': 5,
})
details = executor.get_event_details('marker', 'another_marker')
expect(details).to.be.none
|
Python
| 0.000001
|
@@ -2663,24 +2663,76 @@
qual(%2230%22)%0A%0A
+%0Aclass TestCaseNotNeedingDomain(unittest.TestCase):%0A
def test
|
29c59225c04760e4670f63eee5ed15a910a8f7ec
|
test update added new parameter to experiment()
|
tests/texture_features_experiments_test.py
|
tests/texture_features_experiments_test.py
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
# import funkcí z jiného adresáře
import sys
import os.path
import logging
path_to_script = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(path_to_script, "../experiments/"))
sys.path.append(os.path.join(path_to_script, "../extern/py3DSeedEditor/"))
sys.path.append(os.path.join(path_to_script, "../src/"))
import unittest
#
import tiled_liver_statistics
class TextureFeaturesExperimentTest(unittest.TestCase):
# @unittest.skip("comment after implementation")
def test_run_experiments(self):
"""
"""
import tiled_liver_statistics as tls
import texture_features as tfeat
from sklearn import svm
from sklearn.naive_bayes import GaussianNB
import classification
self.dcmdir = os.path.join(path_to_script, '../sample_data/jatra_06mm_jenjatraplus/')
yaml_file = os.path.join(path_to_script, '../experiments/20130919_liver_statistics.yaml')
# write_csv(fvall)
gf = tfeat.GaborFeatures()
glcmf = tfeat.GlcmFeatures()
haralick = tfeat.HaralickFeatures()
list_of_feature_fcn = [
[tls.feat_hist, []],
# [gf.feats_gabor, []],
# [glcmf.feats_glcm, []],
# [haralick.feats_haralick, [True]]
]
list_of_classifiers = [
#[GaussianNB, []],
#[svm.SVC, []],
[classification.GMMClassifier, {'n_components':2, 'covariance_type': 'full'}],
]
featrs_plus_classifs = tls.make_product_list(list_of_feature_fcn,
list_of_classifiers)
tile_shape = [50, 50, 50]
tls.experiment(yaml_file, yaml_file,
featrs_plus_classifs, tile_shape=tile_shape,
visualization=False)
#slab = {'none':0, 'bone':8,'lungs':9,'heart':10}
##import pdb; pdb.set_trace()
## SupportStructureSegmentation
#sss = support_structure_segmentation.SupportStructureSegmentation(
#data3d = self.data3d,
#voxelsize_mm = self.metadata['voxelsize_mm'],
#modality = 'CT',
#slab = slab
#)
#sss.lungs_segmentation()
##sss.segmentation[260:270,160:170,1:10] = 2
##sss.visualization()
## total number of voxels segmented as bones in spine
#probebox1 = sss.segmentation [260:270,160:170,1:10]== slab['lungs']
#self.assertGreater(np.sum(probebox1),20)
## total number of voexel segmented as none in upper left corner
#probebox1 = sss.segmentation[10:20,10:20,5:15] == slab['none']
#self.assertGreater(np.sum(probebox1),900)
##import pdb; pdb.set_trace()
if __name__ == "__main__":
logging.basicConfig( stream=sys.stderr )
logging.getLogger().setLevel( logging.DEBUG )
unittest.main()
|
Python
| 0
|
@@ -1820,16 +1820,118 @@
_shape,%0A
+ use_voxelsize_norm=False,%0A working_voxelsize_mm=%5B1,1,1%5D,%0A
|
cb0ba85a56c163436d6a4180413f0228407458d8
|
Correct path specification error
|
tests/workflows/test_component_wrappers.py
|
tests/workflows/test_component_wrappers.py
|
""" Unit tests for json helpers
"""
import os
import unittest
from data_models.parameters import arl_path
from workflows.wrappers.component_wrapper import component_wrapper
class TestComponentWrappers(unittest.TestCase):
def test_run_components(self):
files = ["test_results/test_pipeline.log",
"test_results/test_skymodel.hdf",
"test_results/test_empty_vislist.hdf",
"test_results/test_perfect_vislist.hdf",
"test_results/test_perfect_restored.fits",
"test_results/test_perfect_deconvolved.fits",
"test_results/test_perfect_residual.fits"
]
try:
for f in files:
os.remove(arl_path(f))
except FileNotFoundError:
pass
config_files = ["test_create_vislist.json",
"test_create_skymodel.json",
"test_predict_vislist.json",
"test_continuum_imaging.json"]
for config_file in config_files:
component_wrapper(config_file)
for f in files:
assert os.path.isfile(arl_path(f)), "File %s does not exist" % arl_path(f)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000001
|
@@ -837,24 +837,40 @@
g_files = %5B%22
+tests/workflows/
test_create_
@@ -905,24 +905,40 @@
%22
+tests/workflows/
test_create_
@@ -974,24 +974,40 @@
%22
+tests/workflows/
test_predict
@@ -1043,24 +1043,40 @@
%22
+tests/workflows/
test_continu
@@ -1173,16 +1173,25 @@
wrapper(
+arl_path(
config_f
@@ -1194,16 +1194,17 @@
ig_file)
+)
%0A
|
f535228e38f33263289f28d46e910ccb0a98a381
|
Use list comprehension to evaluate PYTZ_TIME_ZONE_CHOICES
|
tournamentcontrol/competition/constants.py
|
tournamentcontrol/competition/constants.py
|
import pytz
from dateutil.rrule import DAILY, WEEKLY
from django.utils.translation import ugettext_lazy as _
GENDER_CHOICES = (
('M', _('Male')),
('F', _('Female')),
('X', _('Unspecified')),
)
SEASON_MODE_CHOICES = (
(WEEKLY, _("Season")),
(DAILY, _("Tournament")),
)
WIN_LOSE = {
'W': _("Winner"),
'L': _("Loser"),
}
###################
# TIME ZONE NAMES #
###################
"""
Ideally this would be a better list for the specific uses of the site in
question. For example, it is perhaps much easier to list just the Australian
time zones for sites deployed for Australian customers.
This is also implemented in touchtechnology.common.forms and should probably
be moved and better leveraged in future release.
See https://bitbucket.org/touchtechnology/common/issue/16/
"""
PYTZ_TIME_ZONE_CHOICES = [('\x20Standard', (('UTC', 'UTC'), ('GMT', 'GMT')))]
for iso, name in pytz.country_names.items():
values = sorted(pytz.country_timezones.get(iso, []))
names = [s.rsplit("/", 1)[1].replace("_", " ") for s in values]
PYTZ_TIME_ZONE_CHOICES.append((name, zip(values, names)))
PYTZ_TIME_ZONE_CHOICES.sort()
|
Python
| 0
|
@@ -1095,16 +1095,34 @@
d((name,
+ %5Beach for each in
zip(val
@@ -1132,16 +1132,17 @@
, names)
+%5D
))%0APYTZ_
|
998feb0e9684c05240d1370085e7b83e4f9dd776
|
Fix encoding problem when parsing cairo repository
|
utils/mkconstants.py
|
utils/mkconstants.py
|
# coding: utf-8
import os
import sys
import re
import pycparser.c_generator
def parse_constant(node):
if isinstance(node, pycparser.c_ast.Constant):
return node.value
elif isinstance(node, pycparser.c_ast.UnaryOp) and node.op == '-':
return '-' + parse_constant(node.expr)
else:
raise TypeError(node)
class PrintEnumsVisitor(pycparser.c_ast.NodeVisitor):
def visit_Enum(self, node):
value = 0
for enumerator in node.values.enumerators:
if enumerator.value is not None:
value_string = parse_constant(enumerator.value)
value = int(value_string, 0)
else:
value_string = str(value)
assert enumerator.name.startswith('CAIRO_') # len('CAIRO_') == 6
print('%s = %s' % (enumerator.name[6:], value_string))
value += 1
print('')
def read_cairo_header(cairo_git_dir, suffix):
filename = os.path.join(cairo_git_dir, 'src', 'cairo%s.h' % suffix)
source = open(filename).read()
source = re.sub(
'/\*.*?\*/'
'|CAIRO_(BEGIN|END)_DECLS'
'|cairo_public '
r'|^\s*#.*?[^\\]\n',
'',
source,
flags=re.DOTALL | re.MULTILINE)
source = re.sub('\n{3,}', '\n\n', source)
return source
def generate(cairo_git_dir):
# Remove comments, preprocessor instructions and macros.
source = read_cairo_header(cairo_git_dir, '')
source += read_cairo_header(cairo_git_dir, '-pdf')
source += read_cairo_header(cairo_git_dir, '-ps')
source += read_cairo_header(cairo_git_dir, '-svg')
source += '''
typedef void* HDC;
typedef void* HFONT;
typedef void LOGFONTW;
'''
source += read_cairo_header(cairo_git_dir, '-win32')
source += '''
typedef void* CGContextRef;
typedef void* CGFontRef;
typedef void* ATSUFontID;
'''
source += read_cairo_header(cairo_git_dir, '-quartz')
ast = pycparser.CParser().parse(source)
print('# *** Do not edit this file ***')
print('# Generated by utils/mkconstants.py\n')
PrintEnumsVisitor().visit(ast)
print('_CAIRO_HEADERS = r"""%s"""' % source)
source = read_cairo_header(cairo_git_dir, '-xcb')
print('_CAIRO_XCB_HEADERS = r"""%s"""\n' % source)
if __name__ == '__main__':
if len(sys.argv) >= 2:
generate(sys.argv[1])
else:
print('Usage: %s path/to/cairo_source.git' % sys.argv[0])
|
Python
| 0.000189
|
@@ -1035,16 +1035,39 @@
filename
+, encoding='iso-8859-1'
).read()
|
c8ecbda2b8c4d1a03285527dd11a27db74f746e7
|
change IdP configuration order, enabled first
|
openedx/core/djangoapps/appsembler/tpa_admin/api.py
|
openedx/core/djangoapps/appsembler/tpa_admin/api.py
|
from rest_framework import generics, viewsets
from rest_framework.permissions import IsAuthenticated
from openedx.core.djangoapps.appsembler.sites.permissions import AMCAdminPermission
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
)
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig
from .serializers import SAMLConfigurationSerializer, SAMLProviderConfigSerializer
class SAMLConfigurationViewSet(viewsets.ModelViewSet):
queryset = SAMLConfiguration.objects.current_set()
serializer_class = SAMLConfigurationSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLConfigurationSiteDetail(generics.RetrieveAPIView):
serializer_class = SAMLConfigurationSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLConfiguration.objects.current_set().filter(site__id=site_id)
class SAMLProviderConfigViewSet(viewsets.ModelViewSet):
queryset = SAMLProviderConfig.objects.current_set()
serializer_class = SAMLProviderConfigSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLProviderSiteDetail(generics.ListAPIView):
serializer_class = SAMLProviderConfigSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLProviderConfig.objects.current_set().filter(site__id=site_id)
|
Python
| 0
|
@@ -1584,29 +1584,50 @@
t().filter(site__id=site_id)
+.order_by('-enabled')
%0A
|
67a0bab4da1d31aba150ce5cb7831daaea1523de
|
Increase BQ_DEFAULT_TABLE_EXPIRATION_MS in e2etest settings
|
openprescribing/openprescribing/settings/e2etest.py
|
openprescribing/openprescribing/settings/e2etest.py
|
from __future__ import absolute_import
from .test import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': utils.get_env_setting('E2E_DB_NAME'),
'USER': utils.get_env_setting('DB_USER'),
'PASSWORD': utils.get_env_setting('DB_PASS'),
'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1')
}
}
PIPELINE_METADATA_DIR = os.path.join(SITE_ROOT, 'pipeline', 'metadata')
PIPELINE_DATA_BASEDIR = os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'data', '')
PIPELINE_IMPORT_LOG_PATH=os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'log.json')
SLACK_SENDING_ACTIVE = True
|
Python
| 0.000001
|
@@ -665,8 +665,74 @@
= True%0A
+%0ABQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours%0A
|
904c9ff5f98cea7975850bfa994b43d936691da4
|
Fix typos and trailing spaces
|
weboob/applications/galleroob/galleroob.py
|
weboob/applications/galleroob/galleroob.py
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Noé Rubinstein
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import sys
import os
from re import search, sub
from weboob.tools.application.repl import ReplApplication
from weboob.capabilities.base import NotLoaded
from weboob.capabilities.gallery import ICapGallery
from weboob.tools.application.formatters.iformatter import IFormatter
__all__ = ['Galleroob']
class GalleryListFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'title')
count = 0
def flush(self):
self.count = 0
def format_dict(self, item):
result = u'%s* (%s) %s%s' % (
ReplApplication.BOLD,
item['id'],
item['title'],
ReplApplication.NC)
if item['cardinality'] is not NotLoaded:
result += u' (%d pages)' % item['cardinality']
if item['description'] is not NotLoaded:
result += u'\n %-70s' % item['description']
return result
class Galleroob(ReplApplication):
APPNAME = 'galleroob'
VERSION = '0.8'
COPYTIGHT = 'Copyright(C) 2011 Noé Rubinstein'
DESCRIPTION = 'galleroob browses and downloads web image galleries'
CAPS = ICapGallery
EXTRA_FORMATTERS = {'gallery_list': GalleryListFormatter}
COMMANDS_FORMATTERS = {'search': 'gallery_list'}
def __init__(self, *args, **kwargs):
ReplApplication.__init__(self, *args, **kwargs)
def do_search(self, pattern=None):
"""
search PATTERN
List galleries matching a PATTERN.
If PATTERN is not given, the command will list all the galleries
"""
self.set_formatter_header(u'Search pattern: %s' %
pattern if pattern else u'Latest galleries')
for backend, gallery in self.do('iter_search_results',
pattern=pattern, max_results=self.options.count):
self.add_object(gallery)
self.format(gallery)
def do_download(self, line):
"""
download ID [FIRST [FOLDER]]
Download a gallery.
Begins at page FIRST (default: 0) and saves to FOLDER (default: title)
"""
_id, first, dest = self.parse_command_args(line, 3, 1)
if first is None:
first = 0
else:
first = int(first)
gallery = None
_id, backend = self.parse_id(_id)
for backend, result in self.do('get_gallery', _id, backends=backend):
if result:
backend = backend
gallery = result
if not gallery:
print 'Gallery not found: %s' % _id
return 1
backend.fillobj(gallery, ('title',))
if dest is None:
dest = sub('/', ' ', gallery.title)
print "Downloading to %s" % dest
try:
os.mkdir(dest)
except OSError:
pass # ignore error on existing directory
os.chdir(dest) # fail here if dest couldn't be created
i = 0
for img in backend.iter_gallery_images(gallery):
i += 1
if i < first:
continue
backend.fillobj(img, ('url','data'))
if img.data is None:
backend.fillobj(img, ('url','data'))
if img.data is None:
print "Couldn't get page %d, exiting" % i
break
ext = search(r"\.([^\.]{1,5})$", img.url)
if ext:
ext = ext.group(1)
else:
ext = "jpg"
name = '%03d.%s' % (i, ext)
print 'Writing file %s' % name
with open(name, 'w') as f:
f.write(img.data)
os.chdir("..")
def do_info(self, line):
"""
info ID
Get information about a gallery.
"""
_id, = self.parse_command_args(line, 1, 1)
gallery = self.get_object(_id, 'get_gallery')
if not gallery:
print >>sys.gallery, 'Gallery not found: %s' % _id
return
self.format(gallery)
self.flush()
|
Python
| 0.998815
|
@@ -734,26 +734,8 @@
%3E.%0A%0A
-import subprocess%0A
impo
@@ -741,16 +741,16 @@
ort sys%0A
+
import o
@@ -1620,25 +1620,9 @@
ult%0A
-
%0A
+
%0Acla
@@ -2061,13 +2061,9 @@
gs)%0A
-
%0A
+
@@ -2440,17 +2440,16 @@
esults',
-
%0A
@@ -4349,12 +4349,22 @@
dir(
-%22..%22
+os.path.pardir
)%0A%0A
@@ -4627,23 +4627,22 @@
t %3E%3Esys.
-gallery
+stderr
, 'Galle
|
2660096db01f88cd0e71860935862fe969204666
|
Fix a script missed in refactor
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
Python
| 0.00003
|
@@ -193,172 +193,13 @@
et_s
-fr%0Afrom paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves%0Afrom paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
+caler
%0Afro
@@ -553,92 +553,85 @@
-if resource%5B'type'%5D == 'aws_spot_fleet_request':%0A resource%5B'sfr'%5D
+print(%22Checking %25s%22 %25 resource%5B'id'%5D)%0A try:%0A scaler
= get_s
fr(r
@@ -626,17 +626,20 @@
= get_s
-f
+cale
r(resour
@@ -646,291 +646,480 @@
ce%5B'
-id'%5D, region=resource%5B'region'%5D)%0A instances = get_spot_fleet_instances(resource%5B'id'%5D, region=resource%5B'region'%5D)%0A resource%5B'sfr'%5D%5B'ActiveInstances'%5D = instances%0A slaves = get_sfr_slaves(resource, mesos_state)%0A if len(instances) == 0:%0A
+type'%5D)(resource=resource,%0A pool_settings=None,%0A config_folder=None,%0A dry_run=True)%0A except KeyError:%0A print(%22Couldn't find a metric provider for resource of type: %7B0%7D%22.format(resource%5B'type'%5D))%0A continue%0A if len(scaler.instances) == 0:%0A print(%22No instances for this resource%22)%0A
@@ -1131,28 +1131,24 @@
continue%0A
-
else
@@ -1149,24 +1149,76 @@
else:%0A
+ slaves = scaler.get_aws_slaves(mesos_state)%0A
@@ -1275,16 +1275,23 @@
oat(len(
+scaler.
instance
@@ -1301,20 +1301,16 @@
) * 100%0A
-
@@ -1382,26 +1382,22 @@
-
print
-
+(
%22CRIT: O
@@ -1453,22 +1453,17 @@
mesos. %22
-%5C%0A
+%0A
@@ -1553,20 +1553,16 @@
stered,%0A
-
@@ -1643,29 +1643,26 @@
ource%5B'id'%5D)
+)
%0A
-
@@ -1687,17 +1687,17 @@
print
-
+(
%22OK: Fou
@@ -1776,9 +1776,8 @@
is %22
-%5C
%0A
@@ -1825,16 +1825,17 @@
centage)
+)
%0A ret
|
e1cf1e0f2cdfd98d47d47a222511127cfee63610
|
fix config test
|
corehq/sql_db/tests/test_partition_config.py
|
corehq/sql_db/tests/test_partition_config.py
|
from django.test import SimpleTestCase
from django.test.utils import override_settings
from corehq.sql_db.management.commands.configure_pl_proxy_cluster import get_pl_proxy_server_config_sql, \
get_shard_config_strings
from ..config import PartitionConfig
from ..exceptions import PartitionValidationError
TEST_PARTITION_CONFIG = {
'shards': {
'default': [0, 1],
'proxy': [2, 3],
},
'groups': {
'main': ['default'],
'proxy': ['proxy'],
'form_processing': ['db1', 'db2'],
}
}
INVALID_SHARD_RANGE = {
'shards': {
'default': [0, 2],
'proxy': [1, 4],
},
'groups': {
'main': ['default'],
'proxy': ['proxy'],
'form_processing': ['db1', 'db2'],
}
}
db_dict = {'NAME': 'commcarehq', 'USER': 'commcarehq', 'HOST': 'hqdb0', 'PORT': 5432}
TEST_DATABASES = {
'default': db_dict,
'proxy': db_dict,
'db1': db_dict,
'db2': db_dict,
}
@override_settings(PARTITION_DATABASE_CONFIG=TEST_PARTITION_CONFIG)
@override_settings(DATABASES=TEST_DATABASES)
class TestPartitionConfig(SimpleTestCase):
def test_dbs_by_group(self):
config = PartitionConfig()
dbs = config.get_form_processing_dbs()
self.assertIn('db1', dbs)
self.assertIn('db2', dbs)
def test_shard_mapping(self):
config = PartitionConfig()
mapping = config.shard_mapping()
self.assertEquals(mapping, {
0: 'default',
1: 'default',
2: 'proxy',
3: 'proxy',
})
@override_settings(PARTITION_DATABASE_CONFIG=INVALID_SHARD_RANGE)
def test_invalid_shard_range(self):
with self.assertRaises(PartitionValidationError):
PartitionConfig()
@override_settings(DATABASES=TEST_DATABASES)
class PlProxyTests(SimpleTestCase):
def test_get_shard_config_strings(self):
shard_mapping = {
1: 'db1',
2: 'db1',
3: 'db2',
}
configs = get_shard_config_strings(shard_mapping)
self.assertEqual(3, len(configs))
self.assertIn("p1 'dbname=commcarehq hostname=hqdb0 port=5432'", configs)
|
Python
| 0.000002
|
@@ -2127,20 +2127,16 @@
ehq host
-name
=hqdb0 p
|
f4a941eade4f2d9b08a81003b7028d3cfc754ae2
|
Read the data instead of returning a link to the file.
|
src/otest/rp/endpoints.py
|
src/otest/rp/endpoints.py
|
import logging
import mimetypes
import os
from aatest.check import State
from aatest.check import ERROR
from aatest.events import EV_CONDITION
from aatest.events import EV_HTTP_INFO
from aatest.events import EV_REQUEST
from future.backports.urllib.parse import parse_qs
from oic.utils.http_util import BadRequest
from oic.utils.http_util import extract_from_request
from oic.utils.http_util import NotFound
from oic.utils.http_util import Response
from oic.utils.http_util import ServiceError
from oic.utils.webfinger import OIC_ISSUER
from oic.utils.webfinger import WebFinger
__author__ = 'roland'
logger = logging.getLogger(__name__)
def store_response(response, events):
events.store(EV_HTTP_INFO, response.info())
def wsgi_wrapper(environ, func, events, **kwargs):
kwargs = extract_from_request(environ, kwargs)
if kwargs['request']:
events.store(EV_REQUEST, kwargs['request'])
args = func(**kwargs)
try:
resp, state = args
store_response(resp, events)
return resp
except TypeError:
resp = args
store_response(resp, events)
return resp
except Exception as err:
logger.error("%s" % err)
raise
# noinspection PyUnresolvedReferences
def static_mime(path, environ, start_response):
logger.info("[static]sending: %s" % (path,))
# Set content-type based on filename extension
ext = ""
i = path.rfind('.')
if i != -1:
ext = path[i:].lower()
content_type = mimetypes.types_map.get(ext, None)
try:
if not content_type.startswith('image/'):
data = open(path, 'r').read()
else:
data = open(path, 'rb')
resp = Response(data, content=content_type)
return resp(environ, start_response)
except IOError:
_dir = os.getcwd()
resp = NotFound("{} not in {}".format(path, _dir))
except Exception as err:
resp = NotFound('{}'.format(err))
return resp(environ, start_response)
# noinspection PyUnresolvedReferences
def static(path):
logger.info("[static]sending: %s" % (path,))
try:
resp = Response(open(path).read())
if path.endswith(".ico"):
resp.add_header(('Content-Type', "image/x-icon"))
elif path.endswith(".html"):
resp.add_header(('Content-Type', 'text/html'))
elif path.endswith(".json"):
resp.add_header(('Content-Type', 'application/json'))
elif path.endswith(".txt"):
resp.add_header(('Content-Type', 'text/plain'))
elif path.endswith(".css"):
resp.add_header(('Content-Type', 'text/css'))
else:
resp.add_header(('Content-Type', "text/xml"))
return resp
except IOError:
return NotFound(path)
def css(environ, events):
try:
info = open(environ["PATH_INFO"]).read()
resp = Response(info)
except (OSError, IOError):
resp = NotFound(environ["PATH_INFO"])
return resp
def token(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.token_endpoint, events)
def authorization(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.authorization_endpoint,
events)
def userinfo(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.userinfo_endpoint,
events)
def clientinfo(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.client_info_endpoint,
events)
def revocation(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.revocation_endpoint,
events)
def introspection(environ, events):
_op = environ["oic.op"]
return wsgi_wrapper(environ, _op.introspection_endpoint, events)
# noinspection PyUnusedLocal
def op_info(environ, events):
_op = environ["oic.op"]
logger.info("op_info")
return wsgi_wrapper(environ, _op.providerinfo_endpoint,
events)
# noinspection PyUnusedLocal
def registration(environ, events):
_op = environ["oic.op"]
if environ["REQUEST_METHOD"] == "POST":
return wsgi_wrapper(environ, _op.registration_endpoint,
events)
elif environ["REQUEST_METHOD"] == "GET":
return wsgi_wrapper(environ, _op.read_registration,
events)
else:
return ServiceError("Method not supported")
def webfinger(environ, events):
query = parse_qs(environ["QUERY_STRING"])
_op = environ["oic.op"]
try:
if query["rel"] != [OIC_ISSUER]:
events.store(
EV_CONDITION,
State('webfinger_parameters', ERROR,
message='parameter rel wrong value: {}'.format(
query['rel'])))
return BadRequest('Parameter value error')
else:
resource = query["resource"][0]
except KeyError as err:
events.store(EV_CONDITION,
State('webfinger_parameters', ERROR,
message='parameter {} missing'.format(err)))
resp = BadRequest("Missing parameter in request")
else:
wf = WebFinger()
resp = Response(wf.response(subject=resource, base=_op.baseurl))
return resp
def add_endpoints(extra, URLS):
for endp in extra:
URLS.append(("^%s" % endp.etype, endp.func))
return URLS
|
Python
| 0
|
@@ -1679,16 +1679,23 @@
h, 'rb')
+.read()
%0A
|
cad9bf433d0fadd8fff27194be9d7a5428b58ae4
|
missing path
|
src/plone.server/setup.py
|
src/plone.server/setup.py
|
# -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
from distutils.core import Extension
import os
import sys
import platform
py_impl = getattr(platform, 'python_implementation', lambda: None)
pure_python = os.environ.get('PURE_PYTHON', False)
is_pypy = py_impl() == 'PyPy'
is_jython = 'java' in sys.platform
if pure_python or is_pypy or is_jython:
ext_modules = []
else:
ext_modules = [
Extension(
'plone.server.optimizations',
sources=[os.path.join(
'plone', 'server',
'optimizations.c')])
]
setup(
name='plone.server',
version=open('VERSION').read().strip(),
description='asyncio transactional server to build REST API / Websocket with ZODB', # noqa
long_description=(open('README.rst').read() + '\n' +
open('CHANGELOG.rst').read()),
keywords=['asyncio', 'ZODB', 'REST', 'Framework', 'transactional'],
author='Ramon & Asko & Nathan',
author_email='ramon@plone.org',
classifiers=[
'License :: OSI Approved :: BSD License',
'Framework :: ZODB',
'Framework :: Zope3',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Internet :: WWW/HTTP',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules'
],
url='https://github.com/plone/plone.server',
license='GPL version 3',
setup_requires=[
'pytest-runner',
],
zip_safe=True,
include_package_data=True,
package_dir=None if os.path.isdir('plone') else {'': os.path.join('src', 'plone.server')}, # noqa
packages=find_packages('./' if os.path.isdir('plone') else os.path.join('src', 'plone.server'), exclude=['ez_setup']), # noqa
namespace_packages=['plone'],
ext_modules=ext_modules,
install_requires=[
'aiohttp',
'jsonschema',
'python-dateutil',
'BTrees',
'persistent',
'plone.behavior',
'pycrypto',
'setuptools',
'six',
'transaction',
'ujson',
'ZODB',
'zope.authentication',
'zope.component',
'zope.configuration',
'zope.dottedname',
'zope.dublincore',
'zope.event',
'zope.i18n',
'zope.i18nmessageid',
'zope.interface',
'zope.lifecycleevent',
'zope.location',
'zope.proxy',
'zope.schema',
'zope.security',
'zope.securitypolicy',
'pyjwt',
'requests'
],
extras_require={
'test': [
'pytest',
'requests',
'zope.testing',
]
},
entry_points={
'console_scripts': [
'pserver = plone.server.commands.server:ServerCommand',
'pcli = plone.server.commands.cli:CliCommand',
'pshell = plone.server.commands.shell:ShellCommand',
'pmigrate = plone.server.commands.migrate:MigrateCommand',
'pcreate = plone.server.commands.create:CreateCommand'
]
}
)
|
Python
| 0.999092
|
@@ -537,24 +537,47 @@
+ 'src', 'plone.server',
'plone', 's
|
d406cf8f4812fce7314f10d3a4b1303d54230099
|
Remove unused import.
|
src/pyhmsa/util/signal.py
|
src/pyhmsa/util/signal.py
|
#!/usr/bin/env python
"""
================================================================================
:mod:`signal` -- Signal pattern
================================================================================
.. module:: signal
:synopsis: signal pattern
.. inheritance-diagram:: pyhmsa.util.signal
"""
# Script information for the file.
__author__ = "Philippe T. Pinard"
__email__ = "philippe.pinard@gmail.com"
__version__ = "0.1"
__copyright__ = "Copyright (c) 2014 Philippe T. Pinard"
__license__ = "GPL v3"
# Standard library modules.
from weakref import WeakSet
# Third party modules.
# Local modules.
# Globals and constants variables.
class Signal(object):
def __init__(self):
self._handlers = set()
def connect(self, handler):
self._handlers.add(handler)
def fire(self, *args):
for handler in self._handlers:
handler(*args)
|
Python
| 0
|
@@ -553,36 +553,8 @@
les.
-%0Afrom weakref import WeakSet
%0A%0A#
|
ac2fc5dbd75d7a897473c4da06875d1b10783bcc
|
return after getting DatabaseError
|
src/sentry/utils/raven.py
|
src/sentry/utils/raven.py
|
from __future__ import absolute_import, print_function
import inspect
import logging
import raven
import sentry
from django.conf import settings
from django.db.utils import DatabaseError
from raven.contrib.django.client import DjangoClient
from . import metrics
UNSAFE_FILES = (
'sentry/event_manager.py',
'sentry/tasks/process_buffer.py',
)
def can_record_current_event():
"""
Tests the current stack for unsafe locations that would likely cause
recursion if an attempt to send to Sentry was made.
"""
for _, filename, _, _, _, _ in inspect.stack():
if filename.endswith(UNSAFE_FILES):
return False
return True
class SentryInternalClient(DjangoClient):
def is_enabled(self):
if getattr(settings, 'DISABLE_RAVEN', False):
return False
return settings.SENTRY_PROJECT is not None
def capture(self, *args, **kwargs):
if not can_record_current_event():
metrics.incr('internal.uncaptured.events')
self.error_logger.error('Not capturing event due to unsafe stacktrace:\n%r', kwargs)
return
return super(SentryInternalClient, self).capture(*args, **kwargs)
def send(self, **kwargs):
# TODO(dcramer): this should respect rate limits/etc and use the normal
# pipeline
from sentry.app import tsdb
from sentry.coreapi import ClientApiHelper
from sentry.event_manager import EventManager
from sentry.models import Project
helper = ClientApiHelper(
agent='raven-python/%s (sentry %s)' % (raven.VERSION, sentry.VERSION),
project_id=settings.SENTRY_PROJECT,
version=self.protocol_version,
)
try:
project = Project.objects.get_from_cache(id=settings.SENTRY_PROJECT)
except DatabaseError:
self.error_logger.error('Unable to fetch internal project',
exc_info=True)
except Project.DoesNotExist:
self.error_logger.error('Internal project (id=%s) does not exist',
settings.SENTRY_PROJECT)
return
helper.context.bind_project(project)
metrics.incr('events.total', 1)
kwargs['project'] = project.id
try:
manager = EventManager(kwargs)
data = manager.normalize()
tsdb.incr_multi([
(tsdb.models.project_total_received, project.id),
(tsdb.models.organization_total_received, project.organization_id),
])
helper.insert_data_to_database(data)
except Exception as e:
if self.raise_send_errors:
raise
self.error_logger.error(
'Unable to record event: %s\nEvent was: %r', e,
kwargs['message'], exc_info=True)
class SentryInternalFilter(logging.Filter):
def filter(self, record):
metrics.incr('internal.uncaptured.logs')
return can_record_current_event()
|
Python
| 0.000005
|
@@ -1972,16 +1972,35 @@
o=True)%0A
+ return%0A
|
50d65e246f451b1ecaaa683f5b65d34fbaf6905e
|
(chore) remove blanks
|
photo_editor/settings/development.py
|
photo_editor/settings/development.py
|
# -*- coding: utf-8 -*-
from django_envie.workroom import convertfiletovars
convertfiletovars()
from .base import *
DEBUG = True
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-coverage',
]
|
Python
| 0.999999
|
@@ -161,16 +161,17 @@
nose',)%0A
+
%0ATEST_RU
@@ -211,16 +211,17 @@
Runner'%0A
+
%0ANOSE_AR
|
2180d9ef6d9af45c80fdc89cd64b90df2924c8a7
|
Fix comment in stepwise (follow up to #5555) [skip ci] (#5560)
|
src/_pytest/stepwise.py
|
src/_pytest/stepwise.py
|
import pytest
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--sw",
"--stepwise",
action="store_true",
dest="stepwise",
help="exit on test failure and continue from last failing test next time",
)
group.addoption(
"--stepwise-skip",
action="store_true",
dest="stepwise_skip",
help="ignore the first failing test but stop on the next failing test",
)
@pytest.hookimpl
def pytest_configure(config):
config.pluginmanager.register(StepwisePlugin(config), "stepwiseplugin")
class StepwisePlugin:
def __init__(self, config):
self.config = config
self.active = config.getvalue("stepwise")
self.session = None
self.report_status = ""
if self.active:
self.lastfailed = config.cache.get("cache/stepwise", None)
self.skip = config.getvalue("stepwise_skip")
def pytest_sessionstart(self, session):
self.session = session
def pytest_collection_modifyitems(self, session, config, items):
if not self.active:
return
if not self.lastfailed:
self.report_status = "no previously failed tests, not skipping."
return
already_passed = []
found = False
# Make a list of all tests that have been run before the last failing one.
for item in items:
if item.nodeid == self.lastfailed:
found = True
break
else:
already_passed.append(item)
# If the previously failed test was not found among the test items,
# do not skip any tests.
if not found:
self.report_status = "previously failed test not found, not skipping."
already_passed = []
else:
self.report_status = "skipping {} already passed items.".format(
len(already_passed)
)
for item in already_passed:
items.remove(item)
config.hook.pytest_deselected(items=already_passed)
def pytest_runtest_logreport(self, report):
# Skip this hook if plugin is not active or the test is xfailed.
if not self.active:
return
if report.failed:
if self.skip:
# Remove test from the failed ones (if it exists) and unset the skip option
# to make sure the following tests will not be skipped.
if report.nodeid == self.lastfailed:
self.lastfailed = None
self.skip = False
else:
# Mark test as the last failing and interrupt the test session.
self.lastfailed = report.nodeid
self.session.shouldstop = (
"Test failed, continuing from this test next run."
)
else:
# If the test was actually run and did pass.
if report.when == "call":
# Remove test from the failed ones, if exists.
if report.nodeid == self.lastfailed:
self.lastfailed = None
def pytest_report_collectionfinish(self):
if self.active and self.config.getoption("verbose") >= 0 and self.report_status:
return "stepwise: %s" % self.report_status
def pytest_sessionfinish(self, session):
if self.active:
self.config.cache.set("cache/stepwise", self.lastfailed)
else:
# Clear the list of failing tests if the plugin is not active.
self.config.cache.set("cache/stepwise", [])
|
Python
| 0
|
@@ -2159,81 +2159,8 @@
t):%0A
- # Skip this hook if plugin is not active or the test is xfailed.%0A
|
7534cadc6a83e94d92b7a92d693a13abdc70c7b4
|
Add command-line arg for content processes
|
example/comp_analysis.py
|
example/comp_analysis.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import os
import mozinfo
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from atsy.stats import ProcessStats
from atsy.multitab import (
FirefoxMultiTabTest, ManualMultiTabTest, MultiTabTest)
def test_browser(browser, stats, binary, urls,
per_tab_pause, settle_wait_time,
proxy):
test_options = {
'per_tab_pause': per_tab_pause,
'settle_wait_time': settle_wait_time
}
if browser == 'Chrome':
options = webdriver.chrome.options.Options()
options.binary_location = binary
caps = options.to_capabilities()
if proxy:
webdriver_proxy = webdriver.Proxy()
webdriver_proxy.http_proxy = proxy
webdriver_proxy.add_to_capabilities(caps)
driver = webdriver.Chrome(desired_capabilities=caps)
test = MultiTabTest(driver, stats, **test_options)
test.open_urls(urls)
driver.quit()
elif browser == 'Firefox':
for count in (2, 4, 8):
print "FIREFOX WITH %d CONTENT PROCESSES" % count
test = FirefoxMultiTabTest(binary, stats, proxy=proxy, process_count=count, **test_options)
test.open_urls(urls)
elif browser in ('Safari', 'IE'):
# Currently this is a manual test, sorry.
manual_test = os.path.abspath(os.path.join(
os.path.dirname(__file__), 'comp_analysis_manual_test.htm'))
test = ManualMultiTabTest(binary, stats, **test_options)
prefix = 'file://' if browser == 'IE' else ''
test.open_urls([prefix + manual_test])
elif browser == 'Edge':
# Currently this is even more manual than IE and Safari. Edge won't
# let us provide a path to launch.
print "Open up explorer, find 'atsy/example/comp_analysis_manual_test.htm'"
print "Right-click, 'Open with' -> 'Microsoft Edge'"
print "Run the test, press enter when it's done."
import sys
sys.stdin.read(1)
stats.print_stats()
else:
raise Exception("Unhandled browser: %s" % browser)
def test_browsers(browsers, setup, test_sites,
per_tab_pause, settle_wait_time, proxy=None):
for browser in browsers:
config = setup[mozinfo.os][browser]
stats = ProcessStats(config['path_filter'], config['parent_filter'])
binary = config['binary']
test_browser(browser, stats, binary, test_sites,
per_tab_pause, settle_wait_time, proxy)
def main():
# Default path to the config file containing the SETUP and TEST_SITES vars.
default_config = os.path.join(
os.path.dirname(__file__), 'comp_analysis_conf_simple.py')
# Default browsers to test.
default_browsers = [ 'Chrome', 'Firefox' ]
if mozinfo.os == 'win':
default_browsers.append('IE')
if mozinfo.version.startswith('10'):
default_browsers.append('Edge')
elif mozinfo.os == 'mac':
default_browsers.append('Safari')
parser = argparse.ArgumentParser()
parser.add_argument('-b', action='append', dest='browsers',
default=[],
help='Adds a browser to the list of browsers to test.')
parser.add_argument('-c', action='store', dest='conf_file',
default=default_config,
help='A python file containing the test configuration.')
parser.add_argument('-q', action='store_true', default=False, dest='quick',
help='Perform a quick test of 3 sites, minimal pauses.')
parser.add_argument('--per_tab_pause', action='store', dest='per_tab_pause',
default='10', type=float,
help='Amount of time in seconds to stay on a tab.')
parser.add_argument('--settle_wait_time', action='store', dest='settle_wait_time',
default='60', type=float,
help='Amount of time in seconds to wait before measuring memory.')
parser.add_argument('--proxy', action='store', dest='proxy', default=None,
help='HTTP proxy to use. e.g "localhost:3128". Only works with Chrome and Firefox currently.')
cmdline = parser.parse_args()
if not cmdline.browsers:
cmdline.browsers = default_browsers
if cmdline.quick:
cmdline.per_tab_pause = 1
cmdline.settle_wait_time = 0
# This loads |SETUP| and |TEST_SITES|.
out = {}
execfile(cmdline.conf_file, {}, out)
TEST_SITES = out['TEST_SITES']
SETUP = out['SETUP']
if cmdline.quick and len(TEST_SITES) > 3:
TEST_SITES = TEST_SITES[:3]
test_browsers(cmdline.browsers, SETUP, TEST_SITES,
cmdline.per_tab_pause, cmdline.settle_wait_time,
cmdline.proxy)
if __name__ == '__main__':
main()
|
Python
| 0.000003
|
@@ -612,19 +612,34 @@
proxy
+, process_count
):%0A
-
%0A tes
@@ -1305,17 +1305,21 @@
in
-(2, 4, 8)
+process_count
:%0A
@@ -2496,16 +2496,57 @@
oxy=None
+,%0A process_count=(2,4,8)
):%0A f
@@ -2837,24 +2837,60 @@
_time, proxy
+,%0A process_count
)%0A%0A%0Adef main
@@ -4593,16 +4593,234 @@
ently.')
+%0A parser.add_argument('--content-processes', action='append', dest='process_count',%0A default=%5B%5D, type=float,%0A help='The number of content processes to use for Firefox.')
%0A%0A cm
@@ -5015,16 +5015,93 @@
me = 0%0A%0A
+ if not cmdline.process_count:%0A cmdline.process_count = (2, 4, 8)%0A%0A
# Th
@@ -5447,32 +5447,32 @@
ttle_wait_time,%0A
-
@@ -5486,16 +5486,39 @@
ne.proxy
+, cmdline.process_count
)%0A%0A%0Aif _
|
9a5135f9cd27cf24d27b2393fd071073b4485ac7
|
add test gat_plot_slice
|
mne/viz/tests/test_decoding.py
|
mne/viz/tests/test_decoding.py
|
# Authors: Denis Engemann <denis.engemann@gmail.com>
#
# License: Simplified BSD
import os.path as op
import warnings
from nose.tools import assert_raises
from mne.decoding import GeneralizationAcrossTime
from mne import io, Epochs, read_events, pick_types
from mne.utils import requires_sklearn, run_tests_if_main
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
warnings.simplefilter('always') # enable b/c these tests throw warnings
# Set our plotters to test mode
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
event_id_gen = dict(aud_l=2, vis_l=4)
@requires_sklearn
def _get_data():
"""Aux function for testing GAT viz"""
gat = GeneralizationAcrossTime()
raw = io.Raw(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg='mag', stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
decim = 30
# Test on time generalization within one condition
with warnings.catch_warnings(record=True):
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True, decim=decim)
# Test default running
gat = GeneralizationAcrossTime()
gat.fit(epochs)
gat.score(epochs)
return gat
def test_gat_plot_matrix():
"""Test GAT matrix plot"""
gat = _get_data()
gat.plot()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
def test_gat_plot_diagonal():
"""Test GAT diagonal plot"""
gat = _get_data()
gat.plot_diagonal()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
run_tests_if_main()
|
Python
| 0.000012
|
@@ -1816,16 +1816,360 @@
.plot)%0A%0A
+%0Adef test_gat_plot_slice():%0A %22%22%22Test GAT slice plot%22%22%22%0A gat = _get_data()%0A gat.plot_slice(gat.train_time%5B'times_'%5D%5B0%5D)%0A # test invalid time point%0A assert_raises(ValueError, gat.plot_slice, -1.)%0A # test float type%0A assert_raises(ValueError, gat.plot_slice, 1)%0A del gat.scores_%0A assert_raises(RuntimeError, gat.plot)%0A%0A%0A
run_test
|
476d7da17c7d22415cbd16b625ba8e443a750f0f
|
update change_upstream_proxy example
|
examples/change_upstream_proxy.py
|
examples/change_upstream_proxy.py
|
# This scripts demonstrates how mitmproxy can switch to a different upstream proxy
# in upstream proxy mode.
#
# Usage: mitmdump -s "change_upstream_proxy.py host"
from libmproxy.protocol.http import send_connect_request
alternative_upstream_proxy = ("localhost", 8082)
def should_redirect(flow):
return flow.request.host == "example.com"
def request(context, flow):
if flow.live and should_redirect(flow):
# If you want to change the target server, you should modify flow.request.host and flow.request.port
# flow.live.change_server should only be used by inline scripts to change the upstream proxy,
# unless you are sure that you know what you are doing.
server_changed = flow.live.change_server(alternative_upstream_proxy, persistent_change=True)
if flow.request.scheme == "https" and server_changed:
send_connect_request(flow.live.c.server_conn, flow.request.host, flow.request.port)
flow.live.c.establish_ssl(server=True)
|
Python
| 0.000001
|
@@ -51,16 +51,23 @@
ch to a
+second/
differen
@@ -128,16 +128,61 @@
mitmdump
+ -U http://default-upstream-proxy.local:8080/
-s %22cha
|
40a5e35b58fcb00022235ec00536dc8aa63091a2
|
fix min/max mess up
|
modules/core/PlayerAnalysis.py
|
modules/core/PlayerAnalysis.py
|
from collections import namedtuple
import datetime
import pymongo
import random
import numpy
class PlayerAnalysis(namedtuple('PlayerAnalysis', ['id', 'titled', 'engine', 'gamesPlayed', 'closedReports', 'gameAnalyses', 'PVAssessment'])): # id = userId, engine = (True | False | None)
def setEngine(self, engine):
return PlayerAnalysis(
id = self.id,
titled = self.titled,
engine = engine,
gamesPlayed = self.gamesPlayed,
closedReports = self.closedReports,
gameAnalyses = self.gameAnalyses,
PVAssessment = self.PVAssessment)
def tensorInputMoves(self):
return self.gameAnalyses.tensorInputMoves()
def tensorInputChunks(self):
return self.gameAnalyses.tensorInputChunks()
def tensorInputPVs(self):
pvs = self.gameAnalyses.pv0ByAmbiguityStats()
for i, pv in enumerate(pvs):
if pv is None:
pvs[i] = 0
return pvs # should be a list of ints 5 items long
def CSVMoves(self):
moves = []
[moves.append([int(self.engine)] + move) for move in self.tensorInputMoves()]
return moves
def CSVChunks(self):
chunks = []
[chunks.append([int(self.engine)] + chunk) for chunk in self.tensorInputChunks()]
return chunks
def CSVPVs(self):
return [int(self.engine)] + self.tensorInputPVs()
def activation(self):
anoa = sorted(self.gameAnalyses.assessmentNoOutlierAverages(), reverse=True)
retained = anoa[:min(1, int(0.3*len(anoa)))]
if len(retained) > 0:
return numpy.mean(retained)
return 0
def report(self):
return {
'userId': self.id,
'isLegit': self.isLegit(),
'activation': int(self.activation()),
'pv0ByAmbiguity': self.gameAnalyses.pv0ByAmbiguityStats(),
'games': self.gameAnalyses.reportDicts()
}
def isLegit(self):
gamesAnalysed = len(self.gameAnalyses.gameAnalyses)
noOutlierAverages = self.gameAnalyses.assessmentNoOutlierAverages()
susGames = sum([int(a > 62) for a in noOutlierAverages])
verySusGames = sum([int(a > 75) for a in noOutlierAverages])
legitGames = sum([int(a < 35) for a in noOutlierAverages])
if ((verySusGames >= (1/5)*gamesAnalysed
or susGames >= (2/5)*gamesAnalysed
or (self.PVAssessment > 70 and susGames >= (1/5)*gamesAnalysed))
and gamesAnalysed > 0 and not self.titled):
return False
elif legitGames == gamesAnalysed and self.PVAssessment < 40 and gamesAnalysed > 0:
return True # Player is legit
return None # Player falls into a grey area
class PlayerAnalysisBSONHandler:
@staticmethod
def reads(bson, gameAnalyses):
return PlayerAnalysis(
id = bson['_id'],
titled = bson['titled'],
engine = bson['engine'],
gamesPlayed = bson['gamesPlayed'],
closedReports = bson['closedReports'],
gameAnalyses = gameAnalyses,
PVAssessment = bson.get('PVAssessment', None))
@staticmethod
def writes(playerAnalysis):
return {
'_id': playerAnalysis.id,
'titled': playerAnalysis.titled,
'engine': playerAnalysis.engine,
'gamesPlayed': playerAnalysis.gamesPlayed,
'closedReports': playerAnalysis.closedReports,
'PVAssessment': playerAnalysis.PVAssessment,
'date': datetime.datetime.utcnow()
}
class PlayerAnalysisDB:
def __init__(self, playerAnalysisColl, gameAnalysisDB):
self.playerAnalysisColl = playerAnalysisColl
self.gameAnalysisDB = gameAnalysisDB
def byId(self, userId):
try:
return PlayerAnalysisBSONHandler.reads(
self.playerAnalysisColl.find_one({'_id': userId}),
self.gameAnalysisDB.byUserId(userId))
except:
return None
def byBSONs(self, bsons):
return [PlayerAnalysisBSONHandler.reads(bson, self.gameAnalysisDB.byUserId(bson['_id'])) for bson in bsons]
def byEngineStatus(self, status):
return self.byBSONs(self.playerAnalysisColl.find({'engine': status}))
def oldestUnsorted(self):
playerAnalysisBSON = next(self.playerAnalysisColl.find({'engine': None}).sort('date', pymongo.ASCENDING), None)
if playerAnalysisBSON is not None:
return PlayerAnalysisBSONHandler.reads(playerAnalysisBSON, self.gameAnalysisDB.byUserId(playerAnalysisBSON['_id']))
return None
def oldestUnsortedUserId(self):
oldest = self.oldestUnsorted()
if oldest is not None:
return oldest.id
return None
def allUnsorted(self): # Players who have not been marked as Engine or Legit
return self.byEngineStatus(None)
def allSorted(self):
return self.byBSONs(self.playerAnalysisColl.find({'engine': {'$in': [True, False]}}))
def balancedSorted(self):
enginePlayerAnalyses = self.engines()
legitPlayerAnalyses = self.legits()
amount = min(len(enginePlayerAnalyses), len(legitPlayerAnalyses))
randomEngines = [ enginePlayerAnalyses[i] for i in sorted(random.sample(range(len(enginePlayerAnalyses)), amount)) ]
randomLegits = [ legitPlayerAnalyses[i] for i in sorted(random.sample(range(len(legitPlayerAnalyses)), amount)) ]
return randomLegits + randomEngines
def countUnsorted(self):
return self.playerAnalysisColl.count({'engine': None})
def engines(self):
return self.byEngineStatus(True)
def legits(self):
return self.byEngineStatus(False)
def write(self, playerAnalysis):
self.playerAnalysisColl.update(
{'_id': playerAnalysis.id},
{'$set': PlayerAnalysisBSONHandler.writes(playerAnalysis)},
upsert=True)
self.gameAnalysisDB.lazyWriteGames(playerAnalysis.gameAnalyses)
def lazyWriteMany(self, playerAnalyses):
[self.write(playerAnalysis) for playerAnalysis in playerAnalyses]
|
Python
| 0.000002
|
@@ -1420,18 +1420,18 @@
anoa%5B:m
-in
+ax
(1, int(
|
0bc9ba5d9f15b443e5af53cb1e1a593446874bbe
|
put monkapi exits in Site's shutdown
|
monk/roles/executor_service.py
|
monk/roles/executor_service.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 19 16:20:55 2014
@author: pacif_000
"""
import os
import simplejson
import logging
from bson.objectid import ObjectId
from twisted.web import server
from twisted.internet import reactor
from deffered_resource import DefferedResource
import monk.core.api as monkapi
import monk.core.constants as cons
import monk.core.configuration as Config
from monk.utils.utils import DateTimeEncoder
config = Config.Configuration("executor.yml", "executorREST", str(os.getpid()))
monkapi.initialize(config)
logger = logging.getLogger("monk.executor")
class MonkAPI(DefferedResource):
def __del__(self):
monkapi.exits()
class Recommend(DefferedResource):
isLeaf = True
def __init__(self, turtleId=None, delayTime=0.0):
DefferedResource.__init__(self, delayTime)
self.defaultTurtleId = turtleId
self.defaultUserContext = {'userId' : cons.DEFAULT_USER}
def _filter(self, ent, fields):
if fields:
return {field:getattr(ent, field, '') for field in fields}
else:
return ent.generic()
def _recommend(self, args):
try:
if 'turtleId' in args:
turtleId = ObjectId(args['turtleId'][0])
else:
turtleId = ObjectId(self.defaultTurtleId)
if 'userContext' in args:
userContext = simplejson.loads(args.get('userContext')[0])
else:
userContext = self.defaultUserContext
userId = userContext['userId']
if 'query' in args:
query = simplejson.loads(args.get('query')[0])
else:
query = {}
if 'num' in args:
num = int(args.get('num')[0])
else:
num = 10
if 'fields' in args:
fields = simplejson.loads(args['fields'][0])
else:
fields = None
if not monkapi.has_one(turtleId, userId):
if not monkapi.has_one_in_store(turtleId, userId):
monkapi.add_one(turtleId, userId)
else:
monkapi.load_one(turtleId, userId)
entityCollectionName = monkapi.entity_collection(turtleId)
ents = monkapi.load_entities(None, query, num * 10, entityCollectionName)
# @todo: add user_context features
results = [(monkapi.predict(turtleId, userId, ent), ent) for ent in ents]
results.sort(reverse=True)
except Exception as e:
logger.error(e.message)
logger.error('can not parse request {0}'.format(args))
results = []
return [self._filter(res[1], fields) for res in results]
def _delayedRender_GET(self, request):
logger.info('request {0}'.format(request.args))
results = self._recommend(request.args)
simplejson.dump(results, request, cls=DateTimeEncoder)
request.finish()
def _delayedRender_POST(self, request):
logger.info('request {0}'.format(request.args))
results = self._recommend(request.args)
simplejson.dump(results, request, cls=DateTimeEncoder)
request.finish()
root = MonkAPI()
root.putChild("recommend", Recommend())
root.putChild("recommendTags", Recommend("5338c7562524830c64a2d599"))
site = server.Site(root, "web.log")
reactor.listenTCP(8080, site)
reactor.run()
|
Python
| 0
|
@@ -596,31 +596,26 @@
MonkAPI(
-DefferedResourc
+server.Sit
e):%0A
@@ -622,15 +622,19 @@
def
-__del__
+stopFactory
(sel
@@ -661,16 +661,54 @@
exits()%0A
+ server.Site.stopFactory(self)%0A
@@ -3302,23 +3302,32 @@
%0Aroot =
-MonkAPI
+DefferedResource
()%0Aroot.
@@ -3439,27 +3439,23 @@
%0Asite =
-server.Site
+MonkAPI
(root, %22
|
e60faa1512357ed1ecbb4d107b67e9a5f434626a
|
Add doctring for grid parse function
|
SudokuSolver.py
|
SudokuSolver.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from collections import OrderedDict
class Sudoku(object):
"""
Sudoku(values) -> new instance
A class for a Sudoku puzzle.
Keyword arguments:
values -- an array containing all 81 values of a puzzle horizontally
Instance variables:
values -- an array containing all 81 values of a puzzle horizontally
rows -- a multi-dimensional array that stores 9 rows as arrays, each
holding 9 values
columns -- a multi-dimensional array that stores 9 columns as arrays, each
holding 9 values
squares -- a multi-dimensional array that stores 9 3x3 squares as arrays,
each holding 9 values
"""
values = []
rows = [[], [], [], [], [], [], [], [], []]
columns = [[], [], [], [], [], [], [], [], []]
squares = [[], [], [], [], [], [], [], [], []]
grid = OrderedDict()
def __init__(self, values):
"""See class docstring for details."""
self.values = values
def parse_sudoku(self):
"""Calls the parse methods for converting the values into arrays."""
self.parse_rows()
self.parse_columns()
self.parse_squares()
self.parse_grid()
def parse_rows(self):
"""Parses the values into the rows array."""
for i in range(9):
for j in self.values[i * 9:i * 9 + 9]:
self.rows[i].append(j)
def parse_columns(self):
"""Parses the rows into the columns array."""
for row in self.rows:
for i in range(9):
self.columns[i].append(row[i])
def parse_squares(self):
"""Parses the rows into the squares array."""
for i in range(9):
for j in range(0, 3):
for k in range(0, 3):
self.squares[i].append(self.rows[j][k])
def parse_grid(self):
j = 0
for l in 'ABCDEFGHI':
for i in range(1, 10):
self.grid[l + str(i)] = values[j]
j += 1
# An example sudoku stored as an 81 value array
values = [0, 0, 0, 2, 6, 0, 7, 0, 1,
6, 8, 0, 0, 7, 0, 0, 9, 0,
1, 9, 0, 0, 0, 4, 5, 0, 0,
8, 2, 0, 1, 0, 0, 0, 4, 0,
0, 0, 4, 6, 0, 2, 9, 0, 0,
0, 5, 0, 0, 0, 3, 0, 2, 8,
0, 0, 9, 3, 0, 0, 0, 7, 4,
0, 4, 0, 0, 5, 0, 0, 3, 6,
7, 0, 3, 0, 1, 8, 0, 0, 0]
# Answer to example sudoku:
# 4 3 5 | 2 6 9 | 7 8 1
# 6 8 2 | 5 7 1 | 4 9 3
# 1 9 7 | 8 3 4 | 5 6 2
# --------+---------+--------
# 8 2 6 | 1 9 5 | 3 4 7
# 3 7 4 | 6 8 2 | 9 1 5
# 9 5 1 | 7 4 3 | 6 2 8
# --------+---------+--------
# 5 1 9 | 3 2 6 | 8 7 4
# 2 4 8 | 9 5 7 | 1 3 6
# 7 6 3 | 4 1 8 | 2 5 9
# Creates a new instance of the Sudoku class passing the values array
sudoku = Sudoku(values)
sudoku.parse_grid()
print(sudoku.grid)
|
Python
| 0
|
@@ -1891,32 +1891,84 @@
rse_grid(self):%0A
+ %22%22%22Parses the values into the grid dict.%22%22%22%0A
j = 0%0A
@@ -2943,43 +2943,4 @@
es)%0A
-sudoku.parse_grid()%0Aprint(sudoku.grid)%0A
|
96a6993935fe82cad5b34a6a11fb32585e3823ec
|
Fix syntax.
|
astrobin_apps_platesolving/views/solution.py
|
astrobin_apps_platesolving/views/solution.py
|
# Python
import simplejson
import urllib2
# Django
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.db import IntegrityError
from django.http import Http404, HttpResponse
from django.shortcuts import get_object_or_404
from django.views.generic import base
# restframework
from rest_framework import generics
from rest_framework import permissions
# This app
from astrobin_apps_platesolving.models import Solution
from astrobin_apps_platesolving.serializers import SolutionSerializer
from astrobin_apps_platesolving.solver import Solver
class SolveView(base.View):
def post(self, request, *args, **kwargs):
object_id = kwargs.pop('object_id')
content_type_id = kwargs.pop('content_type_id')
content_type = ContentType.objects.get_for_id(content_type_id)
target = get_object_or_404(content_type.model_class(), pk = object_id)
solution, created = Solution.objects.get_or_create(object_id = object_id, content_type = content_type)
if solution.submission_id is None:
solver = Solver()
try:
thumb_url = target.thumbnail('regular')
url = thumb_url.split('://')[1]
url = 'http://' + urllib2.quote(url.encode('utf-8'))
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
f = File(img)
submission = solver.solve(f)
solution.status = Solver.PENDING
solution.submission_id = submission
solution.save()
except urllib2.HTTPError, urllib2.URLError, IOError:
solution.status = Solver.MISSING
solution.submission_id = None
solution.save()
context = {
'solution': solution.id,
'submission': solution.submission_id,
'status': solution.status,
}
return HttpResponse(simplejson.dumps(context), mimetype='application/json')
class SolutionUpdateView(base.View):
def post(self, request, *args, **kwargs):
solution = get_object_or_404(Solution, pk = kwargs.pop('pk'))
solver = Solver()
status = solver.status(solution.submission_id)
if status == Solver.MISSING:
solution.status = status
solution.save()
context = {'status': status}
return HttpResponse(simplejson.dumps(context), mimetype='application/json')
class SolutionFinalizeView(base.View):
def post(self, request, *args, **kwargs):
solution = get_object_or_404(Solution, pk = kwargs.pop('pk'))
solver = Solver()
status = solver.status(solution.submission_id)
if status == Solver.SUCCESS:
info = solver.info(solution.submission_id)
solution.objects_in_field = ', '.join(info['objects_in_field'])
solution.ra = "%.3f" % info['calibration']['ra']
solution.dec = "%.3f" % info['calibration']['dec']
solution.orientation = "%.3f" % info['calibration']['orientation']
solution.radius = "%.3f" % info['calibration']['radius']
# Get the images 'w' and adjust pixscale
if solution.content_object:
w = solution.content_object.w
pixscale = info['calibration']['pixscale']
if w and pixscale:
thumbnail_w = settings.THUMBNAIL_ALIASES['']['regular']['size'][0]
ratio = thumbnail_w / float(w)
corrected_scale = float(pixscale) * ratio
solution.pixscale = "%.3f" % corrected_scale
else:
solution.pixscale = None
try:
target = solution.content_type.get_object_for_this_type(pk = solution.object_id)
except solution.content_type.model_class().DoesNotExist:
context = {'status': Solver.FAILED}
return HttpResponse(simplejson.dumps(context), mimetype='application/json')
url = solver.annotated_image_url(solution.submission_id)
img = NamedTemporaryFile(delete=True)
img.write(urllib2.urlopen(url).read())
img.flush()
img.seek(0)
f = File(img)
try:
solution.image_file.save(target.image_file.name, f)
except IntegrityError:
pass
url = solver.sky_plot_zoom1_image_url(solution.submission_id)
if url:
img = NamedTemporaryFile(delete=True)
img.write(urllib2.urlopen(url).read())
img.flush()
img.seek(0)
f = File(img)
try:
solution.skyplot_zoom1.save(target.image_file.name, f)
except IntegrityError:
pass
solution.status = status
solution.save()
context = {'status': solution.status}
return HttpResponse(simplejson.dumps(context), mimetype='application/json')
###############################################################################
# API #
###############################################################################
class SolutionList(generics.ListCreateAPIView):
model = Solution
queryset = Solution.objects.order_by('pk')
serializer_class = SolutionSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
filter_fields = ('content_type', 'object_id',)
class SolutionDetail(generics.RetrieveUpdateDestroyAPIView):
model = Solution
serializer_class = SolutionSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
|
Python
| 0.000005
|
@@ -1891,16 +1891,17 @@
except
+(
urllib2.
@@ -1932,24 +1932,25 @@
ror, IOError
+)
:%0A
|
acef3b83c265078b69061de0054317a31c7a91d0
|
Fix permission check on exportdb widget (#3906)
|
bluebottle/bluebottle_dashboard/dashboard.py
|
bluebottle/bluebottle_dashboard/dashboard.py
|
import importlib
from django.urls.base import reverse, reverse_lazy
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
from jet.dashboard import modules
from jet.dashboard.dashboard import Dashboard, DefaultAppIndexDashboard
from jet.dashboard.modules import DashboardModule, LinkList
from bluebottle.activities.dashboard import RecentActivities
from bluebottle.assignments.dashboard import RecentAssignments
from bluebottle.clients import properties
from bluebottle.events.dashboard import RecentEvents
from bluebottle.funding.dashboard import RecentFunding, PayoutsReadForApprovalDashboardModule
from bluebottle.initiatives.dashboard import RecentInitiatives, MyReviewingInitiatives
from bluebottle.members.dashboard import RecentMembersDashboard
from bluebottle.tasks.models import Task
class ClosingTasks(DashboardModule):
title = _('Tasks nearing application deadline')
title_url = reverse('admin:tasks_task_changelist')
template = 'dashboard/closing_tasks.html'
limit = 5
def init_with_context(self, context):
tasks = Task.objects.exclude(deadline_to_apply__lt=now()).\
filter(project__status__slug='campaign').\
filter(status__in=['open', 'full']).order_by('deadline_to_apply')
self.children = tasks[:self.limit]
class CustomIndexDashboard(Dashboard):
columns = 2
class Media:
css = ('css/admin/dashboard.css', )
def init_with_context(self, context):
self.available_children.append(modules.LinkList)
# Initiatives
self.children.append(RecentInitiatives())
self.children.append(MyReviewingInitiatives())
# Activities
self.children.append(RecentActivities())
self.children.append(RecentEvents())
self.children.append(RecentFunding())
self.children.append(RecentAssignments())
# Payouts
self.children.append(PayoutsReadForApprovalDashboardModule())
# Other
self.children.append(modules.RecentActions(
_('Recent Actions'),
10,
column=0,
order=0
))
self.children.append(RecentMembersDashboard())
if context['request'].user.has_perm('sites.export'):
metrics_children = [
{
'title': _('Export metrics'),
'url': reverse_lazy('exportdb_export'),
},
]
if properties.REPORTING_BACKOFFICE_ENABLED:
metrics_children.append({
'title': _('Download report'),
'url': reverse_lazy('report-export'),
})
self.children.append(LinkList(
_('Export Metrics'),
children=metrics_children
))
class CustomAppIndexDashboard(DefaultAppIndexDashboard):
def __new__(cls, context, **kwargs):
try:
mod = importlib.import_module("bluebottle.{}.dashboard".format(kwargs['app_label']))
dash = mod.AppIndexDashboard(context, **kwargs)
return dash
except ImportError:
return DefaultAppIndexDashboard(context, **kwargs)
|
Python
| 0
|
@@ -323,16 +323,30 @@
nkList%0A%0A
+import rules%0A%0A
from blu
@@ -2224,55 +2224,70 @@
if
-context%5B'request'%5D.user.has_perm('sites.export'
+rules.test_rule('exportdb.can_export', context%5B'request'%5D.user
):%0A
|
5af79b94c6f1b0117e229db23811d3e1c58ff3fa
|
Add password encrpytion and try to fix mailgun again
|
config.py
|
config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = 'this_is_so_secret' #used for development, reset in prod
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#Flask-Security Config
SECURITY_TRACKABLE = True
SECURITY_CHANGEABLE = True
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_DEFAULT_REMEMBER_ME = True
SECURITY_SEND_REGISTER_EMAIL = False
SECURITY_EMAIL_SUBJECT_PASSWORD_RESET = 'DataNews: password reset instructions'
SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = 'DataNews: your password has been reset'
SECURITY_EMAIL_SUBJECT_PASSWORD_CHANGE_NOTICE = 'DataNews: your password changed'
WHOOSH_BASE = os.path.join(basedir, 'search.db')
DEBUG_TB_PANELS = (
'flask.ext.debugtoolbar.panels.versions.VersionDebugPanel',
'flask.ext.debugtoolbar.panels.timer.TimerDebugPanel',
'flask.ext.debugtoolbar.panels.headers.HeaderDebugPanel',
'flask.ext.debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
'flask.ext.debugtoolbar.panels.template.TemplateDebugPanel',
'flask.ext.debugtoolbar.panels.logger.LoggingPanel'
)
class ProductionConfig(Config):
DEBUG = True
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
#Mail config
MAIL_SERVER = os.environ.get('MAILGUN_SMTP_SERVER')
MAIL_PORT = int(os.environ.get('MAILGUN_SMTP_PORT', 25))
MAIL_USE_SSL = True
MAIL_USERNAME = os.environ.get('MAILGUN_SMTP_LOGIN')
MAIL_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD')
class DevelopmentConfig(Config):
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
class TestingConfig(Config):
TESTING = True
|
Python
| 0
|
@@ -1471,24 +1471,124 @@
BASE_URL')%0A%0A
+ SECURITY_PASSWORD_HASH = 'bcrypt'%0A SECURITY_PASSWORD_SALT = '$2a$16$PnnIgfMwkOjGX4SkHqSOPO'%0A%0A
#Mail co
@@ -1708,37 +1708,14 @@
T',
-2
5
+87
))%0A
- MAIL_USE_SSL = True%0A
@@ -1826,16 +1826,62 @@
SSWORD')
+%0A SECURITY_EMAIL_SENDER = 'joe@joehand.org'
%0A%0Aclass
|
a10967f6c4d52cac1cf263d2415b1154ecd40745
|
Remove console level logging in test
|
test/test_summariser.py
|
test/test_summariser.py
|
"""This file contains the BinSummariserTest class."""
import os
import shutil
import tempfile
import unittest
from bin import summariser
class BinSummariserTest(unittest.TestCase):
"""A class to test the bin/summariser.py file."""
def setUp(self):
"""Create tempory config files and populate the db config."""
# Create a directory for temporary files
self._tmp_dir = tempfile.mkdtemp(prefix='summariser')
# Create an empty database config file
self.db_cfg, self.db_cfg_path = tempfile.mkstemp(prefix='db',
dir=self._tmp_dir)
# Create an empty summariser config file
self.sum_cfg, self.sum_cfg_path = tempfile.mkstemp(prefix='sum',
dir=self._tmp_dir)
# Populate the database config (with junk)
os.write(self.db_cfg, DB_CONF)
os.close(self.db_cfg)
def test_lock_file(self):
"""Test a existing Pidfile prevents a summariser from starting."""
# Create the Pidfile.
# We don't have to worry about cleaning it up as tearDown deletes
# everything in self._tmp_dir
_pid_file, pid_path = tempfile.mkstemp(prefix='pid',
dir=self._tmp_dir)
# Create a temporary log file for the summariser
_sum_log, sum_log_path = tempfile.mkstemp(prefix='sum',
dir=self._tmp_dir)
# Create a temporary summariser config that refers to the Pidfile above
sum_conf = ('[summariser]\n'
'pidfile = %s\n'
'\n'
'[logging]\n'
'logfile = %s\n'
'level = INFO\n'
'console = true\n' % (pid_path, sum_log_path))
# Write temporary config to the temporary file
os.write(self.sum_cfg, sum_conf)
os.close(self.sum_cfg)
# Run the summariser with the temporary config files
try:
summariser.runprocess(self.db_cfg_path, self.sum_cfg_path, '')
except SystemExit:
# A SystemExit is raised regardless of what happens
# in the summariser, so we must look at the log output
# Note: Because we need to be compatible with Python 2.4, we can't
# use "with" here - we need to call the open() and close()
# methods manually.
log_file = open(sum_log_path, 'r')
output = log_file.read()
log_file.close()
expected_error = 'A pidfile %s already exists.' % pid_path
if expected_error not in output:
if 'Created Pidfile' in output:
# Then we have errored in an expected way
# and a summariser was started
self.fail('A summariser started despite existing pidfile.')
else:
# Something else has happened.
self.fail('An unexpected summariser error has occured.\n'
'See output below:\n'
'%s' % output)
def tearDown(self):
"""Remove test directory and all contents."""
try:
shutil.rmtree(self._tmp_dir)
except OSError, error:
print 'Error removing temporary directory %s' % self._tmp_dir
print error
DB_CONF = """[db]
# type of database
backend = Tutorial D
# host with database
hostname = Darwen
# port to connect to
port = 3306
# database name
name = hugh
# database user
username = hugh
# password for database
password = duck
# how many records should be put/fetched to/from database
# in single query
records = 1000
# option for summariser so that SummariseVMs is called
type = birds
"""
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000002
|
@@ -1840,11 +1840,12 @@
e =
-tru
+fals
e%5Cn'
|
fe809de47989a8f7378576cdbf88f0ad382fe3f3
|
increase words in list to 4
|
crypto.py
|
crypto.py
|
# -*- coding: utf-8 -*-
import hmac, hashlib, subprocess, random, threading
myrandom = random.SystemRandom()
import gnupg
import config
import store
WORDS_IN_RANDOM_ID = 3
HASH_FUNCTION = hashlib.sha256
GPG_KEY_TYPE = "RSA"
GPG_KEY_LENGTH = "4096"
class CryptoException(Exception): pass
def clean(s, also=''):
"""
>>> clean("Hello, world!")
Traceback (most recent call last):
...
CryptoException: invalid input
>>> clean("Helloworld")
'Helloworld'
"""
ok = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
for c in s:
if c not in ok and c not in also: raise CryptoException("invalid input")
return s
words = file(config.WORD_LIST).read().split('\n')
def genrandomid():
return ' '.join(myrandom.choice(words) for x in range(WORDS_IN_RANDOM_ID))
def displayid(n):
badrandom = random.WichmannHill()
badrandom.seed(n)
return ' '.join(badrandom.choice(words) for x in range(WORDS_IN_RANDOM_ID))
def shash(s):
"""
>>> shash('Hello, world!')
'98015b0fbf815a630cbcda94b809d207490d7cc2c5c02cb33a242acfd5b73cc1'
"""
return hmac.HMAC(config.HMAC_SECRET, s, HASH_FUNCTION).hexdigest()
GPG_BINARY = 'gpg2'
try:
p = subprocess.Popen([GPG_BINARY, '--version'], stdout=subprocess.PIPE)
except OSError:
GPG_BINARY = 'gpg'
p = subprocess.Popen([GPG_BINARY, '--version'], stdout=subprocess.PIPE)
assert p.stdout.readline().split()[-1].split('.')[0] == '2', "upgrade GPG to 2.0"
gpg = gnupg.GPG(gpgbinary=GPG_BINARY, gnupghome=config.GPG_KEY_DIR)
def genkeypair(name, secret):
"""
>>> if not gpg.list_keys(shash('randomid')):
... genkeypair(shash('randomid'), 'randomid').type
... else:
... u'P'
u'P'
"""
name, secret = clean(name), clean(secret, ' ')
return gpg.gen_key(gpg.gen_key_input(
key_type=GPG_KEY_TYPE, key_length=GPG_KEY_LENGTH,
passphrase=secret,
name_email="%s@deaddrop.example.com" % name
))
def getkey(name):
for key in gpg.list_keys():
for uid in key['uids']:
if ' <%s@' % name in uid: return key['fingerprint']
return None
def _shquote(s):
return "\\'".join("'" + p + "'" for p in s.split("'"))
_gpghacklock = threading.Lock()
def encrypt(fp, s, output=None, fn=None):
r"""
>>> encrypt(shash('randomid'), "Goodbye, cruel world!")[:75]
'-----BEGIN PGP MESSAGE-----\nVersion: GnuPG/MacGPG2 v2.0.17 (Darwin)\n\nhQIMA3'
"""
if output:
store.verify(output)
fp = fp.replace(' ', '')
if isinstance(s, unicode):
s = s.encode('utf8')
if isinstance(s, str):
out = gpg.encrypt(s, [fp], output=output, always_trust=True)
else:
if fn:
with _gpghacklock:
oldname = gpg.gpgbinary
gpg.gpgbinary += ' --set-filename ' + _shquote(fn)
out = gpg.encrypt_file(s, [fp], output=output, always_trust=True)
gpg.gpgbinary = oldname
else:
out = gpg.encrypt_file(s, [fp], output=output, always_trust=True)
if out.ok:
return out.data
else:
raise CryptoException(out.stderr)
def decrypt(name, secret, s):
"""
>>> decrypt(shash('randomid'), 'randomid',
... encrypt(shash('randomid'), 'Goodbye, cruel world!')
... )
'Goodbye, cruel world!'
"""
return gpg.decrypt(s, passphrase=secret).data
def secureunlink(fn):
store.verify(fn)
return subprocess.check_call(['srm', fn])
# crash if we don't have srm:
try:
subprocess.check_call(['srm'], stdout=subprocess.PIPE)
except subprocess.CalledProcessError:
pass
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Python
| 0.999999
|
@@ -164,17 +164,17 @@
OM_ID =
-3
+4
%0AHASH_FU
|
9a3442e5ab54488cc0c6aaad13ff5af85f3faa8e
|
Use match instead of wildcard filter by default
|
resolwe/elastic/viewsets.py
|
resolwe/elastic/viewsets.py
|
""".. Ignore pydocstyle D400.
================
Elastic Viewsets
================
.. autoclass:: resolwe.elastic.viewsets.ElasticSearchMixin
:members:
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from elasticsearch_dsl.query import Q
from django.conf import settings
from django.contrib.auth import get_user_model
from rest_framework.exceptions import APIException
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from .pagination import LimitOffsetPostPagination
__all__ = (
'ElasticSearchMixin',
'PaginationMixin',
'ElasticSearchBaseViewSet',
)
ELASTICSEARCH_SIZE = 10000 # maximum number of results returned by ElasticSearch
class TooManyResults(APIException):
"""Exception when elastic query returns more than ``ELASTICSEARCH_SIZE`` results."""
status_code = 400
default_detail = 'Query returned too many results. Please, add more filters or use pagination.'
default_code = 'bad_request'
class ElasticSearchMixin(object):
"""Mixin to use Django REST Framework with ElasticSearch based querysets.
This mixin adds following methods:
* :func:`~ElasticSearchMixin.order_search`
* :func:`~ElasticSearchMixin.filter_search`
* :func:`~ElasticSearchMixin.filter_permissions`
"""
filtering_fields = []
ordering_fields = []
ordering = None
def get_query_param(self, key, default=None):
"""Get query parameter uniformly for GET and POST requests."""
value = self.request.query_params.get(key, None)
if value is None:
value = self.request.data.get(key, None)
if value is None:
value = default
return value
def order_search(self, search):
"""Order given search by the ordering parameter given in request.
:param search: ElasticSearch query object
"""
ordering = self.get_query_param('ordering', self.ordering)
if not ordering:
return search
ordering_field = ordering.lstrip('-')
if ordering_field not in self.ordering_fields:
raise KeyError('Ordering by `{}` is not supported.'.format(ordering_field))
return search.sort(ordering)
def filter_search(self, search):
"""Filter given search by the filter parameter given in request.
:param search: ElasticSearch query object
"""
for field in self.filtering_fields:
value = self.get_query_param(field, None)
if value:
if isinstance(value, list):
filters = [Q('match', **{field: item}) for item in value]
search = search.query('bool', should=filters)
else:
search = search.query('wildcard', **{field: value})
return search
def filter_permissions(self, search):
"""Filter given query based on permissions of the user in the request.
:param search: ElasticSearch query object
"""
user = self.request.user
if user.is_superuser:
return search
if user.is_anonymous():
user_model = get_user_model()
user = user_model.objects.get(**{user_model.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME})
filters = [Q('match', users_with_permissions=user.pk)]
filters.extend([
Q('match', groups_with_permissions=group.pk) for group in user.groups.all()
])
# `minimum_should_match` is set to 1 by default
return search.query('bool', should=filters)
class PaginationMixin(object):
"""Mixin for making paginated response in case pagination parameters are provided."""
def paginate_response(self, queryset):
"""Optionally return paginated response.
If pagination parameters are provided in the request, then paginated response
is returned, otherwise response is not paginated.
"""
page = self.paginate_queryset(queryset)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
class ElasticSearchBaseViewSet(PaginationMixin, ElasticSearchMixin, GenericViewSet):
"""Base ViewSet for ElasticSearch based views.
This ViewSet creates search based on ``document_class`` parameter, specified
in subclass and:
* filter it by query parameters
* apply ordering
* filter permissions
* apply pagination
.. IMPORTANT::
Both ``POST`` and ``GET`` requests are supported.
"""
document_class = None
pagination_class = LimitOffsetPostPagination
def custom_filter(self, search):
"""Perform custom search filtering.
This method is intended to be overriden in subclasses.
"""
return search
def list_with_post(self, request):
"""Endpoint handler."""
search = self.document_class().search() # pylint: disable=not-callable
search = self.custom_filter(search)
search = self.filter_search(search)
search = self.order_search(search)
search = self.filter_permissions(search)
if search.count() > ELASTICSEARCH_SIZE:
limit = self.get_query_param('limit')
offset = self.get_query_param('offset')
if not limit or not offset or limit > ELASTICSEARCH_SIZE:
raise TooManyResults()
search = search.extra(size=ELASTICSEARCH_SIZE)
return self.paginate_response(search)
def list(self, request):
"""Endpoint handler."""
return self.list_with_post(request)
|
Python
| 0
|
@@ -2813,16 +2813,13 @@
ry('
-wildcard
+match
', *
|
cdbf0f2c82360c866d8c26f2d8a9539fa943df6b
|
Bump version 1.0.0.
|
rpm_py_installer/version.py
|
rpm_py_installer/version.py
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '0.9.2'
|
Python
| 0
|
@@ -82,11 +82,11 @@
= '
-0.9.2
+1.0.0
'%0A
|
9b5c1892dd4731df564d627ae9dafe95bd82b6a9
|
Bump version 0.7.1.
|
rpm_py_installer/version.py
|
rpm_py_installer/version.py
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '0.7.0'
|
Python
| 0
|
@@ -82,11 +82,11 @@
= '0.7.
-0
+1
'%0A
|
b1d5f52537d9b8081ebfcf900610b2f97abcb72b
|
remove now useless annotation
|
rsqueakvm/model/database.py
|
rsqueakvm/model/database.py
|
from rsqueakvm.model.pointers import W_PointersObject
from rpython.rlib import jit
from rsqueakvm.plugins.database_plugin import dbm, SQLConnection
from rsqueakvm.error import PrimitiveFailedError
class DBType(object): pass
NIL = DBType()
TEXT = DBType()
INTEGER = DBType()
REAL = DBType()
BLOB = DBType()
ALTER_SQL = "ALTER TABLE %s ADD COLUMN inst_var_%s %s;"
CREATE_SQL = "CREATE TABLE IF NOT EXISTS %s (id INTEGER);"
INSERT_SQL = "INSERT INTO %s (id) VALUES (?);"
SELECT_SQL = "SELECT inst_var_%s FROM %s WHERE id=?;"
UPDATE_SQL = "UPDATE %s SET inst_var_%s=? WHERE id=?"
@jit.elidable
def insert_sql(class_name):
return INSERT_SQL % class_name
@jit.elidable
def select_sql(class_name, n0):
return SELECT_SQL % (n0, class_name)
@jit.elidable
def alter_sql(class_name, n0, dbtype):
if dbtype is NIL:
strtype = ""
elif dbtype is TEXT:
strtype = "text"
elif dbtype is INTEGER:
strtype = "integer"
elif dbtype is REAL:
strtype = "real"
elif dbtype is BLOB:
strtype = "blob"
else:
assert False
return ALTER_SQL % (class_name, n0, strtype)
@jit.elidable
def update_sql(class_name, n0):
return UPDATE_SQL % (class_name, n0)
@jit.elidable
def create_sql(class_name):
return CREATE_SQL % class_name
class W_DBObject_State:
_immutable_fields_ = ["db_connection?", "column_types_for_table",
"db_objects", "class_names"]
def __init__(self):
self.id_counter = 0
self.column_types_for_table = {}
# Maps from DBObject id to DBObject and only includes DBObjects which
# are referenced from an attribute of a DBObject.
self.db_objects = {}
self.class_names = {}
def get_column_type(self, class_name, n0):
dbtype = self.get_column_types(class_name)[n0]
if dbtype != NIL:
return jit.promote(dbtype)
else:
return NIL
@jit.elidable
def get_column_types(self, class_name):
return self.column_types_for_table[class_name]
def set_column_type(self, class_name, position, value):
self.get_column_types(class_name)[position] = value
# This is only ever called once per classname. We always promote the
# classname to a constant, so any time the classname changes, we have to
# break out of the trace and compile a new bridge, anyway. When that
# happens, this was already run once, so we don't need to do it again.
@jit.not_in_trace
def init_column_types_if_neccessary(self, class_name, size):
if class_name not in self.column_types_for_table:
W_DBObject.state.column_types_for_table[class_name] = [NIL] * size
# Same reason as above
@jit.not_in_trace
def create_table_if_neccessary(self, class_name, connection):
if class_name not in W_DBObject.state.class_names:
connection.execute(create_sql(class_name))
W_DBObject.state.class_names[class_name] = True
class W_DBObject(W_PointersObject):
_attrs_ = ["id"]
_immutable_fields_ = ["id"]
state = W_DBObject_State()
@staticmethod
def next_id():
theId = W_DBObject.state.id_counter
W_DBObject.state.id_counter += 1
return theId
@jit.unroll_safe
def __init__(self, space, w_class, size, weak=False):
W_PointersObject.__init__(self, space, w_class, size, weak)
self.id = W_DBObject.next_id()
class_name = self.class_name(space)
W_DBObject.state.init_column_types_if_neccessary(class_name, size)
connection = dbm.connection(space)
W_DBObject.state.create_table_if_neccessary(class_name, connection)
connection.execute(insert_sql(class_name), [self.w_id(space)])
def class_name(self, space):
return jit.promote_string(self.classname(space))
def w_id(self, space):
return space.wrap_int(self.id)
def fetch(self, space, n0):
class_name = self.class_name(space)
if not W_DBObject.state.get_column_type(class_name, n0):
# print "Can't find column. Falling back to default fetch."
return W_PointersObject.fetch(self, space, n0)
cursor = dbm.connection(space).execute(
select_sql(class_name, n0), [self.w_id(space)])
w_result = space.unwrap_array(cursor.next())
if w_result:
if W_DBObject.state.get_column_type(class_name, n0) is BLOB:
db_id = space.unwrap_int(w_result[0])
return W_DBObject.state.db_objects[db_id]
else:
return w_result[0]
else:
raise PrimitiveFailedError
def store(self, space, n0, w_value):
cls = w_value.getclass(space)
if (cls.is_same_object(space.w_String)):
aType = TEXT
elif cls.is_same_object(space.w_SmallInteger):
aType = INTEGER
elif cls.is_same_object(space.w_Float):
aType = REAL
elif cls.is_same_object(space.w_nil):
aType = NIL
else:
if isinstance(w_value, W_DBObject):
aType = BLOB
W_DBObject.state.db_objects[w_value.id] = w_value
# Save id in database.
w_value = w_value.w_id(space)
else:
# print 'Unable to unwrap %s' % w_value.getclass(space)
# print 'Falling back to standard store.'
return W_PointersObject.store(self, space, n0, w_value)
aType = jit.promote(aType)
class_name = self.class_name(space)
if (aType is not NIL and
W_DBObject.state.get_column_type(class_name, n0) is NIL):
connection = dbm.connection(space)
connection.execute(alter_sql(class_name, n0, aType))
# print "invalidate cache"
connection.statement_cache.invalidate()
W_DBObject.state.set_column_type(class_name, n0, aType)
connection = dbm.connection(space)
connection.execute(update_sql(class_name, n0),
[w_value, self.w_id(space)])
|
Python
| 0.000001
|
@@ -3260,29 +3260,8 @@
Id%0A%0A
- @jit.unroll_safe%0A
@@ -3421,17 +3421,16 @@
xt_id()%0A
-%0A
|
2d2513ce860503a7ab69e56f47998ca075efaa3b
|
Add new production hosts
|
rtei/settings/production.py
|
rtei/settings/production.py
|
import sys
from .base import *
DEBUG = False
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = os.environ.get('SECRET_KEY')
# AWS S3 settings
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
# Necessary to overcome broken pipe error if not default US location
# (https://github.com/boto/boto/issues/621).
if os.environ.get('AWS_S3_HOST', False):
AWS_S3_HOST = os.environ.get('AWS_S3_HOST')
MEDIA_URL = "https://%s/" % (AWS_S3_CUSTOM_DOMAIN)
ALLOWED_HOSTS = [
'localhost',
'rtei.herokuapp.com'
]
# Email to receive contact requests from the form on /about/contact-us/
RTEI_CONTACT_FORM_EMAIL = os.environ.get('RTEI_CONTACT_FORM_EMAIL')
EMAIL_HOST = os.environ.get('EMAIL_HOST')
EMAIL_PORT = os.environ.get('EMAIL_PORT', 587)
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD')
EMAIL_USE_TLS = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'stream': sys.stdout
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'rtei': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
try:
from .local import *
except ImportError:
pass
|
Python
| 0
|
@@ -895,16 +895,74 @@
app.com'
+,%0A 'rtei-production.herokuapp.com',%0A 'www.rtei.org',
%0A%5D%0A%0A# Em
|
eff874de1ae663ee65f37990cd9b75e7ad574750
|
Support ability to pass "*" as wildcard backend in haproxyconn enable/disable_server modules.
|
salt/modules/haproxyconn.py
|
salt/modules/haproxyconn.py
|
# -*- coding: utf-8 -*-
'''
Support for haproxy
.. versionadded:: 2014.7.0
'''
from __future__ import generators
from __future__ import absolute_import
# Import python libs
import stat
import os
import logging
try:
import haproxy.cmds
import haproxy.conn
HAS_HAPROXY = True
except ImportError:
HAS_HAPROXY = False
log = logging.getLogger(__name__)
__virtualname__ = 'haproxy'
def __virtual__():
'''
Only load the module if haproxyctl is installed
'''
if HAS_HAPROXY:
return __virtualname__
return False
def _get_conn(socket='/var/run/haproxy.sock'):
'''
Get connection to haproxy socket.
'''
assert os.path.exists(socket), '{0} does not exist.'.format(socket)
issock = os.stat(socket).st_mode
assert stat.S_ISSOCK(issock), '{0} is not a socket.'.format(socket)
ha_conn = haproxy.conn.HaPConn(socket)
return ha_conn
def list_servers(backend, socket='/var/run/haproxy.sock', objectify=False):
'''
List servers in haproxy backend.
backend
haproxy backend
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.list_servers mysql
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.listServers(backend=backend)
return ha_conn.sendCmd(ha_cmd, objectify=objectify)
def enable_server(name, backend, socket='/var/run/haproxy.sock'):
'''
Enable Server in haproxy
name
Server to enable
backend
haproxy backend
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.enable_server web1.example.com www
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.enableServer(server=name, backend=backend)
ha_conn.sendCmd(ha_cmd)
return list_servers(backend, socket=socket)
def disable_server(name, backend, socket='/var/run/haproxy.sock'):
'''
Disable server in haproxy.
name
Server to disable
backend
haproxy backend
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.disable_server db1.example.com mysql
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.disableServer(server=name, backend=backend)
ha_conn.sendCmd(ha_cmd)
return list_servers(backend, socket=socket)
def get_weight(name, backend, socket='/var/run/haproxy.sock'):
'''
Get server weight
name
Server name
backend
haproxy backend
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.get_weight web1.example.com www
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.getWeight(server=name, backend=backend)
return ha_conn.sendCmd(ha_cmd)
def set_weight(name, backend, weight=0, socket='/var/run/haproxy.sock'):
'''
Set server weight
name
Server name
backend
haproxy backend
weight
Server Weight
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.set_weight web1.example.com www 13
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.getWeight(server=name, backend=backend, weight=weight)
ha_conn.sendCmd(ha_cmd)
return get_weight(name, backend, socket=socket)
def show_frontends(socket='/var/run/haproxy.sock'):
'''
Show HaProxy frontends
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.show_frontends
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.showFrontends()
return ha_conn.sendCmd(ha_cmd)
def show_backends(socket='/var/run/haproxy.sock'):
'''
Show HaProxy Backends
socket
haproxy stats socket
CLI Example:
.. code-block:: bash
salt '*' haproxy.show_backends
'''
ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.showBackends()
return ha_conn.sendCmd(ha_cmd)
|
Python
| 0
|
@@ -1503,33 +1503,69 @@
haproxy backend
+, or all backends if %22*%22 is supplied
%0A
-
%0A socket%0A
@@ -1726,32 +1726,189 @@
et_conn(socket)%0A
+%0A if backend == '*':%0A backends = show_backends(socket=socket).split('%5Cn')%0A else:%0A backends = %5Bbackend%5D%0A%0A for backend in backends:%0A
ha_cmd = hap
@@ -1952,32 +1952,36 @@
ackend=backend)%0A
+
ha_conn.send
@@ -1984,32 +1984,33 @@
sendCmd(ha_cmd)%0A
+%0A
return list_
@@ -2008,44 +2008,16 @@
urn
-list_servers(backend, socket=socket)
+backends
%0A%0A%0Ad
@@ -2185,32 +2185,68 @@
haproxy backend
+, or all backends if %22*%22 is supplied
%0A%0A socket%0A
@@ -2410,32 +2410,189 @@
et_conn(socket)%0A
+%0A if backend == '*':%0A backends = show_backends(socket=socket).split('%5Cn')%0A else:%0A backends = %5Bbackend%5D%0A%0A for backend in backends:%0A
ha_cmd = hap
@@ -2641,24 +2641,28 @@
nd=backend)%0A
+
ha_conn.
@@ -2669,32 +2669,33 @@
sendCmd(ha_cmd)%0A
+%0A
return list_
@@ -2693,44 +2693,16 @@
urn
-list_servers(backend, socket=socket)
+backends
%0A%0A%0Ad
|
6d8f5e3cd6d2c2997b53121b55bd5da3114ab517
|
Use the same function for multiple purposes
|
salt/modules/nagios_json.py
|
salt/modules/nagios_json.py
|
# -*- coding: utf-8 -*-
'''
Check Host & Service status from Nagios via JSON RPC.
.. versionadded:: Beryllium
'''
# Import python libs
from __future__ import absolute_import
import logging
import httplib
# Import 3rd-party libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin
# pylint: enable=import-error,no-name-in-module
try:
import requests
from requests.exceptions import ConnectionError
REQUESTS_AVAILABLE = True
except ImportError:
REQUESTS_AVAILABLE = False
log = logging.getLogger(__name__)
def __virtual__():
'''
Only load if requests is successfully imported
'''
if REQUESTS_AVAILABLE:
return 'nagios_rpc'
log.debug('Unable to initialize "nagios_rpc": library "requests" is not installed.')
return False
def _config():
'''
Get configuration items for URL, Username and Password
'''
return {
'url': __salt__['config.get']('nagios:url', ''),
'username': __salt__['config.get']('nagios:username', ''),
'password': __salt__['config.get']('nagios:password', ''),
}
def _status_query(query, hostname, service=None, method='GET', **kwargs):
'''
Send query along to Nagios.
'''
data = {}
req_params = {
'hostname': hostname,
'query': query,
}
if service:
req_params['servicedescription'] = service
url = kwargs.get('url')
username = kwargs.get('username')
password = kwargs.get('password')
# Make sure "cgi-bin/statusjson.cgi" in the URL
url = url.split("cgi-bin")[0]
if not url.endswith('/'):
url += '/'
url = _urljoin(url, 'statusjson.cgi')
try:
if username and password:
auth = (username, password,)
else:
auth = None
result = requests.request(method=method,
url=url,
params=req_params,
data=data,
verify=True,
auth=auth)
if result.status_code == httplib.OK:
data = result.json()
elif result.status_code == httplib.UNAUTHORIZED:
log.error('Nagios authentication failed. Please check the configuration.')
elif result.status_code == httplib.NOT_FOUND:
log.error('URL {0} for Nagios was not found.'.format(url))
else:
log.debug('Results: {0}'.format(result.text))
except ConnectionError as conn_err:
log.error('Error {0}'.format(conn_err))
return data
def status(hostname, service=None):
'''
Check status of a particular host or particular service on it in Nagios.
If service parameter is omitted, then check host itself.
:param hostname: The hostname to check the status of the service in Nagios.
:param service: The service to check the status of in Nagios.
:return: Boolean True is the status is 'OK' or 'Warning', False if 'Critical'
CLI Example:
.. code-block:: bash
salt '*' nagios_rpc.service_status hostname=webserver.domain.com
salt '*' nagios_rpc.service_status hostname=webserver.domain.com service='HTTP'
'''
config = _config()
if not config['nagios_url']:
log.error('Missing nagios_url')
return False
results = _status_query(query='service',
nagios_url=config['nagios_url'],
nagios_username=config['nagios_username'],
nagios_password=config['nagios_password'],
hostname=hostname,
servicedescription=service_description)
if not config['url']:
log.error('Missing Nagios URL in the configuration')
return False
return results.get('data', {}).get(target, {}).get('status', 0) > 0
|
Python
| 0.000029
|
@@ -3326,31 +3326,24 @@
not config%5B'
-nagios_
url'%5D:%0A
@@ -3364,26 +3364,47 @@
Missing
-n
+N
agios
-_url
+ URL in the configuration
')%0A
@@ -3420,16 +3420,61 @@
False%0A%0A
+ target = service and 'service' or 'host'%0A
resu
@@ -3497,23 +3497,14 @@
ery(
-query='service'
+target
,%0A
@@ -3533,179 +3533,106 @@
-nagios_url=config%5B'nagios_url'%5D,%0A nagios_username=config%5B'nagios_username'%5D,%0A nagios_password=config%5B'nagios_password
+hostname,%0A service=service,%0A url=config%5B'url
'%5D,%0A
@@ -3663,25 +3663,35 @@
-host
+user
name=
-host
+config%5B'user
name
+'%5D
,%0A
@@ -3720,157 +3720,36 @@
-servicedescription=service_description)%0A%0A if not config%5B'url'%5D:%0A log.error('Missing Nagios URL in the configuration')%0A return False%0A
+password=config%5B'password'%5D)
%0A%0A
|
ce741c0bb862d0a9cd4c58576836a0cbbb669833
|
Add docstring
|
salt/modules/win_network.py
|
salt/modules/win_network.py
|
'''
Module for gathering and managing network information
'''
# Import python libs
import re
# Import salt libs
import salt.utils
from salt.utils.socket_util import sanitize_host
# Import 3rd party libraries
HAS_WMI = False
try:
import wmi
HAS_WMI = True
except ImportError:
pass
def __virtual__():
'''
Only works on Windows systems
'''
if salt.utils.is_windows():
return 'network'
return False
def ping(host):
'''
Performs a ping to a host
CLI Example::
salt '*' network.ping archlinux.org
'''
cmd = 'ping -n 4 {0}'.format(sanitize_host(host))
return __salt__['cmd.run'](cmd)
def netstat():
'''
Return information on open ports and states
CLI Example::
salt '*' network.netstat
'''
ret = []
cmd = 'netstat -na'
lines = __salt__['cmd.run'](cmd).splitlines()
for line in lines:
comps = line.split()
if line.startswith(' TCP'):
ret.append({
'local-address': comps[1],
'proto': comps[0],
'remote-address': comps[2],
'state': comps[3]})
if line.startswith(' UDP'):
ret.append({
'local-address': comps[1],
'proto': comps[0],
'remote-address': comps[2],
'state': None})
return ret
def traceroute(host):
'''
Performs a traceroute to a 3rd party host
CLI Example::
salt '*' network.traceroute archlinux.org
'''
ret = []
cmd = 'tracert {0}'.format(sanitize_host(host))
lines = __salt__['cmd.run'](cmd).splitlines()
for line in lines:
if not ' ' in line:
continue
if line.startswith('Trac'):
continue
if line.startswith('over'):
continue
comps = line.split()
complength = len(comps)
# This method still needs to better catch rows of other lengths
# For example if some of the ms returns are '*'
if complength == 9:
result = {
'count': comps[0],
'hostname': comps[7],
'ip': comps[8],
'ms1': comps[1],
'ms2': comps[3],
'ms3': comps[5]}
ret.append(result)
elif complength == 8:
result = {
'count': comps[0],
'hostname': None,
'ip': comps[7],
'ms1': comps[1],
'ms2': comps[3],
'ms3': comps[5]}
ret.append(result)
else:
result = {
'count': comps[0],
'hostname': None,
'ip': None,
'ms1': None,
'ms2': None,
'ms3': None}
ret.append(result)
return ret
def nslookup(host):
'''
Query DNS for information about a domain or ip address
CLI Example::
salt '*' network.nslookup archlinux.org
'''
ret = []
cmd = 'nslookup {0}'.format(sanitize_host(host))
lines = __salt__['cmd.run'](cmd).splitlines()
for line in lines:
if line.startswith('Non-authoritative'):
continue
if ":" in line:
comps = line.split(":")
ret.append({comps[0].strip(): comps[1].strip()})
return ret
def dig(host):
'''
Performs a DNS lookup with dig
Note: dig must be installed on the Windows minion
CLI Example::
salt '*' network.dig archlinux.org
'''
cmd = 'dig {0}'.format(sanitize_host(host))
return __salt__['cmd.run'](cmd)
def _cidr_to_ipv4_netmask(cidr_bits):
'''
Returns an IPv4 netmask
'''
netmask = ''
for idx in range(4):
if idx:
netmask += '.'
if cidr_bits >= 8:
netmask += '255'
cidr_bits -= 8
else:
netmask += '{0:d}'.format(256 - (2 ** (8 - cidr_bits)))
cidr_bits = 0
return netmask
def _interfaces_ipconfig(out):
'''
Returns a dictionary of interfaces with various information about each
(up/down state, ip address, netmask, and hwaddr)
'''
ifaces = dict()
iface = None
adapter_iface_regex = re.compile(r'adapter (\S.+):$')
for line in out.splitlines():
if not line:
continue
# TODO what does Windows call Infiniband and 10/40gige adapters
if line.startswith('Ethernet'):
iface = ifaces[adapter_iface_regex.search(line).group(1)]
iface['up'] = True
addr = None
continue
if iface:
key, val = line.split(',', 1)
key = key.strip(' .')
val = val.strip()
if addr and key in ('Subnet Mask'):
addr['netmask'] = val
elif key in ('IP Address', 'IPv4 Address'):
if 'inet' not in iface:
iface['inet'] = list()
addr = {'address': val.rstrip('(Preferred)'),
'netmask': None,
'broadcast': None} # TODO find the broadcast
iface['inet'].append(addr)
elif 'IPv6 Address' in key:
if 'inet6' not in iface:
iface['inet'] = list()
# XXX What is the prefixlen!?
addr = {'address': val.rstrip('(Preferred)'),
'prefixlen': None}
iface['inet6'].append(addr)
elif key in ('Physical Address'):
iface['hwaddr'] = val
elif key in ('Media State'):
# XXX seen used for tunnel adaptors
# might be useful
iface['up'] = (val != 'Media disconnected')
def interfaces():
with salt.utils.winapi.Com():
c = wmi.WMI()
ifaces = {}
for iface in c.Win32_NetworkAdapterConfiguration(IPEnabled=1):
ifaces[iface.Description] = dict()
if iface.MACAddress:
ifaces[iface.Description]['hwaddr'] = iface.MACAddress
if iface.IPEnabled:
ifaces[iface.Description]['up'] = True
ifaces[iface.Description]['inet'] = []
for ip in iface.IPAddress:
item = {}
item['broadcast'] = iface.DefaultIPGateway[0]
item['netmask'] = iface.IPSubnet[0]
item['label'] = iface.Description
item['address'] = ip
ifaces[iface.Description]['inet'].append(item)
else:
ifaces[iface.Description]['up'] = False
return ifaces
|
Python
| 0.000005
|
@@ -5795,24 +5795,85 @@
terfaces():%0A
+ '''%0A Return details about network interfaces%0A '''%0A%0A
with sal
|
1df00cb6adf8b9cac677f5f6a272331ab5388c90
|
Update main.py
|
vkfeed/pages/main.py
|
vkfeed/pages/main.py
|
# -*- coding: utf-8 -*-
'''Generates the main page.'''
from __future__ import unicode_literals
import re
import urllib
import webapp2
import RSSvk.utils
class MainPage(webapp2.RequestHandler):
'''Generates the main page.'''
def get(self):
'''Processes a GET request.'''
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(RSSvk.utils.render_template('main.html'))
def post(self):
'''Processes a POST request.'''
profile_url = self.request.get('profile_url', '')
match = re.match(r'''^
\s*
(?:https?://(?:www\.)?(?:vk\.com|vkontakte\.ru)/)?
(?P<profile_id>[a-zA-Z0-9._-]+)/?
\s*
$''', profile_url, re.IGNORECASE | re.VERBOSE)
if match:
params = {}
if self.request.get('foreign_posts') == '1':
params['foreign_posts'] = '1'
if self.request.get('big_photos') == '1':
params['big_photos'] = '1'
if self.request.get('show_photo') != '1':
params['show_photo'] = '0'
params = '?' + urllib.urlencode(params) if params else ''
self.redirect('/feed/' + match.group('profile_id') + '/wall' + params)
else:
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(RSSvk.utils.render_template('main.html', {
'post_error': '''
Неверно указан URL профиля.
Адрес должен быть вида http://vk.com/имя_профиля.
Имя профиля должно удовлетворять требованиям, предъявляемым администрацией ВКонтакте.
'''
}))
|
Python
| 0.000001
|
@@ -139,21 +139,22 @@
%0Aimport
-RSSvk
+vkfeed
.utils%0A%0A
@@ -391,37 +391,38 @@
ponse.out.write(
-RSSvk
+vkfeed
.utils.render_te
@@ -1424,13 +1424,14 @@
ite(
-RSSvk
+vkfeed
.uti
|
8c89a8252c807581661d729282c94744040dceb3
|
Add OutStream.closed property
|
IPython/kernel/zmq/iostream.py
|
IPython/kernel/zmq/iostream.py
|
"""Wrappers for forwarding stdout/stderr over zmq"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import threading
import time
import uuid
from io import StringIO, UnsupportedOperation
import zmq
from zmq.eventloop.ioloop import IOLoop
from .session import extract_header
from IPython.utils import py3compat
from IPython.utils.py3compat import unicode_type
from IPython.utils.warn import warn
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
MASTER = 0
CHILD = 1
#-----------------------------------------------------------------------------
# Stream classes
#-----------------------------------------------------------------------------
class OutStream(object):
"""A file like object that publishes the stream to a 0MQ PUB socket."""
# The time interval between automatic flushes, in seconds.
_subprocess_flush_limit = 256
flush_interval = 0.05
topic=None
def __init__(self, session, pub_socket, name, pipe=True):
self.encoding = 'UTF-8'
self.session = session
self.pub_socket = pub_socket
self.name = name
self.topic = b'stream.' + py3compat.cast_bytes(name)
self.parent_header = {}
self._new_buffer()
self._buffer_lock = threading.Lock()
self._master_pid = os.getpid()
self._master_thread = threading.current_thread().ident
self._pipe_pid = os.getpid()
self._pipe_flag = pipe
if pipe:
self._setup_pipe_in()
def _setup_pipe_in(self):
"""setup listening pipe for subprocesses"""
ctx = self.pub_socket.context
# use UUID to authenticate pipe messages
self._pipe_uuid = uuid.uuid4().bytes
self._pipe_in = ctx.socket(zmq.PULL)
self._pipe_in.linger = 0
try:
self._pipe_port = self._pipe_in.bind_to_random_port("tcp://127.0.0.1")
except zmq.ZMQError as e:
warn("Couldn't bind IOStream to 127.0.0.1: %s" % e +
"\nsubprocess output will be unavailable."
)
self._pipe_flag = False
self._pipe_in.close()
del self._pipe_in
return
self._pipe_poller = zmq.Poller()
self._pipe_poller.register(self._pipe_in, zmq.POLLIN)
if IOLoop.initialized():
# subprocess flush should trigger flush
# if kernel is idle
IOLoop.instance().add_handler(self._pipe_in,
lambda s, event: self.flush(),
IOLoop.READ,
)
def _setup_pipe_out(self):
# must be new context after fork
ctx = zmq.Context()
self._pipe_pid = os.getpid()
self._pipe_out = ctx.socket(zmq.PUSH)
self._pipe_out_lock = threading.Lock()
self._pipe_out.connect("tcp://127.0.0.1:%i" % self._pipe_port)
def _is_master_process(self):
return os.getpid() == self._master_pid
def _is_master_thread(self):
return threading.current_thread().ident == self._master_thread
def _have_pipe_out(self):
return os.getpid() == self._pipe_pid
def _check_mp_mode(self):
"""check for forks, and switch to zmq pipeline if necessary"""
if not self._pipe_flag or self._is_master_process():
return MASTER
else:
if not self._have_pipe_out():
self._flush_buffer()
# setup a new out pipe
self._setup_pipe_out()
return CHILD
def set_parent(self, parent):
self.parent_header = extract_header(parent)
def close(self):
self.pub_socket = None
def _flush_from_subprocesses(self):
"""flush possible pub data from subprocesses into my buffer"""
if not self._pipe_flag or not self._is_master_process():
return
for i in range(self._subprocess_flush_limit):
if self._pipe_poller.poll(0):
msg = self._pipe_in.recv_multipart()
if msg[0] != self._pipe_uuid:
continue
else:
self._buffer.write(msg[1].decode(self.encoding, 'replace'))
# this always means a flush,
# so reset our timer
self._start = 0
else:
break
def _schedule_flush(self):
"""schedule a flush in the main thread
only works with a tornado/pyzmq eventloop running
"""
if IOLoop.initialized():
IOLoop.instance().add_callback(self.flush)
else:
# no async loop, at least force the timer
self._start = 0
def flush(self):
"""trigger actual zmq send"""
if self.pub_socket is None:
raise ValueError(u'I/O operation on closed file')
mp_mode = self._check_mp_mode()
if mp_mode != CHILD:
# we are master
if not self._is_master_thread():
# sub-threads must not trigger flush directly,
# but at least they can schedule an async flush, or force the timer.
self._schedule_flush()
return
self._flush_from_subprocesses()
data = self._flush_buffer()
if data:
content = {u'name':self.name, u'text':data}
msg = self.session.send(self.pub_socket, u'stream', content=content,
parent=self.parent_header, ident=self.topic)
if hasattr(self.pub_socket, 'flush'):
# socket itself has flush (presumably ZMQStream)
self.pub_socket.flush()
else:
with self._pipe_out_lock:
string = self._flush_buffer()
tracker = self._pipe_out.send_multipart([
self._pipe_uuid,
string.encode(self.encoding, 'replace'),
], copy=False, track=True)
try:
tracker.wait(1)
except:
pass
def isatty(self):
return False
def __next__(self):
raise IOError('Read not supported on a write only stream.')
if not py3compat.PY3:
next = __next__
def read(self, size=-1):
raise IOError('Read not supported on a write only stream.')
def readline(self, size=-1):
raise IOError('Read not supported on a write only stream.')
def fileno(self):
raise UnsupportedOperation("IOStream has no fileno.")
def write(self, string):
if self.pub_socket is None:
raise ValueError('I/O operation on closed file')
else:
# Make sure that we're handling unicode
if not isinstance(string, unicode_type):
string = string.decode(self.encoding, 'replace')
is_child = (self._check_mp_mode() == CHILD)
self._buffer.write(string)
if is_child:
# newlines imply flush in subprocesses
# mp.Pool cannot be trusted to flush promptly (or ever),
# and this helps.
if '\n' in string:
self.flush()
# do we want to check subprocess flushes on write?
# self._flush_from_subprocesses()
current_time = time.time()
if self._start < 0:
self._start = current_time
elif current_time - self._start > self.flush_interval:
self.flush()
def writelines(self, sequence):
if self.pub_socket is None:
raise ValueError('I/O operation on closed file')
else:
for string in sequence:
self.write(string)
def _flush_buffer(self):
"""clear the current buffer and return the current buffer data"""
data = u''
if self._buffer is not None:
data = self._buffer.getvalue()
self._buffer.close()
self._new_buffer()
return data
def _new_buffer(self):
self._buffer = StringIO()
self._start = -1
|
Python
| 0.000002
|
@@ -3811,24 +3811,100 @@
ket = None%0A%0A
+ @property%0A def closed(self):%0A return self.pub_socket is None%0A%0A
def _flu
|
be8f36aeacfa448fe9ea6dfa3cdf6f7404c9a88a
|
use future division
|
chainercv/links/model/segnet/segnet_basic.py
|
chainercv/links/model/segnet/segnet_basic.py
|
import numpy as np
import chainer
import chainer.functions as F
import chainer.links as L
from chainercv.transforms import resize
class SegNetBasic(chainer.Chain):
"""SegNet Basic for semantic segmentation.
This is a SegNet [#]_ model for semantic segmenation. This is based on
SegNetBasic model that is found here_.
.. [#] Vijay Badrinarayanan, Alex Kendall and Roberto Cipolla "SegNet: A \
Deep Convolutional Encoder-Decoder Architecture for Image Segmentation." \
PAMI, 2017
.. _here: http://github.com/alexgkendall/SegNet-Tutorial
Args:
n_class (int): The number of channels for the final convolutional
layer. SegNetBasic basically takes the number of target classes as
this argment.
"""
def __init__(self, n_class):
w = chainer.initializers.HeNormal()
super(SegNetBasic, self).__init__(
conv1=L.Convolution2D(None, 64, 7, 1, 3, nobias=True, initialW=w),
conv1_bn=L.BatchNormalization(64, initial_beta=0.001),
conv2=L.Convolution2D(64, 64, 7, 1, 3, nobias=True, initialW=w),
conv2_bn=L.BatchNormalization(64, initial_beta=0.001),
conv3=L.Convolution2D(64, 64, 7, 1, 3, nobias=True, initialW=w),
conv3_bn=L.BatchNormalization(64, initial_beta=0.001),
conv4=L.Convolution2D(64, 64, 7, 1, 3, nobias=True, initialW=w),
conv4_bn=L.BatchNormalization(64, initial_beta=0.001),
conv_decode4=L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=w),
conv_decode4_bn=L.BatchNormalization(64, initial_beta=0.001),
conv_decode3=L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=w),
conv_decode3_bn=L.BatchNormalization(64, initial_beta=0.001),
conv_decode2=L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=w),
conv_decode2_bn=L.BatchNormalization(64, initial_beta=0.001),
conv_decode1=L.Convolution2D(
64, 64, 7, 1, 3, nobias=True, initialW=w),
conv_decode1_bn=L.BatchNormalization(64, initial_beta=0.001),
conv_classifier=L.Convolution2D(
64, n_class, 1, 1, 0, initialW=w)
)
self.n_class = n_class
self.train = True
def _upsampling_2d(self, x, pool):
if x.shape != pool.indexes.shape:
min_h = min(x.shape[2], pool.indexes.shape[2])
min_w = min(x.shape[3], pool.indexes.shape[3])
x = x[:, :, :min_h, :min_w]
pool.indexes = pool.indexes[:, :, :min_h, :min_w]
outsize = (x.shape[2] * 2, x.shape[3] * 2)
return F.upsampling_2d(
x, pool.indexes, ksize=(pool.kh, pool.kw),
stride=(pool.sy, pool.sx), pad=(pool.ph, pool.pw), outsize=outsize)
def __call__(self, x):
"""Compute an image-wise score from a batch of images
Args:
x (chainer.Variable): A variable with 4D image array.
Returns:
chainer.Variable:
An image-wise score. Its channel size is :obj:`self.n_class`.
"""
p1 = F.MaxPooling2D(2, 2, use_cudnn=False)
p2 = F.MaxPooling2D(2, 2, use_cudnn=False)
p3 = F.MaxPooling2D(2, 2, use_cudnn=False)
p4 = F.MaxPooling2D(2, 2, use_cudnn=False)
h = F.local_response_normalization(x, 5, 1, 1e-4 / 5., 0.75)
h = p1(F.relu(self.conv1_bn(self.conv1(h), test=not self.train)))
h = p2(F.relu(self.conv2_bn(self.conv2(h), test=not self.train)))
h = p3(F.relu(self.conv3_bn(self.conv3(h), test=not self.train)))
h = p4(F.relu(self.conv4_bn(self.conv4(h), test=not self.train)))
h = self._upsampling_2d(h, p4)
h = self.conv_decode4_bn(self.conv_decode4(h), test=not self.train)
h = self._upsampling_2d(h, p3)
h = self.conv_decode3_bn(self.conv_decode3(h), test=not self.train)
h = self._upsampling_2d(h, p2)
h = self.conv_decode2_bn(self.conv_decode2(h), test=not self.train)
h = self._upsampling_2d(h, p1)
h = self.conv_decode1_bn(self.conv_decode1(h), test=not self.train)
score = self.conv_classifier(h)
return score
def predict(self, imgs):
"""Conduct semantic segmentations from images.
Args:
imgs (iterable of numpy.ndarray): Arrays holding images.
All images are in CHW and RGB format
and the range of their values are :math:`[0, 255]`.
Returns:
list of numpy.ndarray:
List of integer labels predicted from each image in the input \
list.
"""
labels = []
for img in imgs:
C, H, W = img.shape
x = chainer.Variable(
self.xp.asarray(img[np.newaxis]), volatile=chainer.flag.ON)
score = self.__call__(x)[0].data
if score.shape != (C, H, W):
dtype = score.dtype
score = resize(score, (W, H)).astype(dtype)
label = self.xp.argmax(score, axis=0)
labels.append(label)
return labels
|
Python
| 0
|
@@ -1,12 +1,45 @@
+from __future__ import division%0A%0A
import numpy
|
4b5e8dc1808d1a6107545d486b4097482d07635c
|
Add dataum_to_img method for pascal dataset
|
fcn/pascal.py
|
fcn/pascal.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cPickle as pickle
import os.path as osp
import numpy as np
from scipy.misc import imread
from sklearn.datasets.base import Bunch
import plyvel
import fcn
class SegmentationClassDataset(Bunch):
target_names = np.array([
'background',
'aeroplane',
'bicycle',
'bird',
'boat',
'bottle',
'bus',
'car',
'cat',
'chair',
'cow',
'diningtable',
'dog',
'horse',
'motorbike',
'person',
'potted plant',
'sheep',
'sofa',
'train',
'tv/monitor',
])
def __init__(self, db_path=None):
super(self.__class__, self).__init__()
if db_path is None:
db_path = osp.join(fcn.data_dir, 'SegmentationClassDataset_db')
self.db = plyvel.DB(db_path, create_if_missing=True)
self.pascal_dir = osp.join(fcn.data_dir, 'pascal/VOC2012')
self._load_segmentation_files()
def _load_segmentation_files(self):
for type in ['train', 'trainval', 'val']:
id_list_file = osp.join(
self.pascal_dir, 'ImageSets/Segmentation/{0}.txt'.format(type))
ids = [id_.strip() for id_ in open(id_list_file)]
setattr(self, type, np.array(ids))
def _load_datum(self, id):
# check cache
datum = self.db.get(str(id))
if datum is not None:
return pickle.loads(datum)
# there is no cache
img_file = osp.join(self.pascal_dir, 'JPEGImages', id + '.jpg')
img = imread(img_file, mode='RGB')
label_rgb_file = osp.join(
self.pascal_dir, 'SegmentationClass', id + '.png')
label_rgb = imread(label_rgb_file, mode='RGB')
label = self._label_rgb_to_32sc1(label_rgb)
datum = (img, label)
# save cache
self.db.put(str(id), pickle.dumps(datum))
return datum
def _label_rgb_to_32sc1(self, label_rgb):
assert label_rgb.dtype == np.uint8
label = np.zeros(label_rgb.shape[:2], dtype=np.int32)
label.fill(-1)
cmap = fcn.util.labelcolormap(len(self.target_names))
cmap = (cmap * 255).astype(np.uint8)
for l, rgb in enumerate(cmap):
mask = np.all(label_rgb == rgb, axis=-1)
label[mask] = l
return label
def next_batch(self, batch_size, type, indices=None):
"""Generate next batch whose size is the specified batch_size."""
ids = getattr(self, type)
if indices is None:
indices = np.random.randint(0, len(ids), batch_size)
batch = []
for id in ids[indices]:
datum = self._load_datum(id)
batch.append(datum)
return batch
@staticmethod
def img_to_datum(img):
datum = img.astype(np.float32)
datum = datum[:, :, ::-1] # RGB -> BGR
datum -= np.array((104.00698793, 116.66876762, 122.67891434))
datum = datum.transpose((2, 0, 1))
return datum
|
Python
| 0
|
@@ -724,16 +724,84 @@
,%0A %5D)
+%0A mean_bgr = np.array((104.00698793, 116.66876762, 122.67891434))
%0A%0A de
@@ -2930,26 +2930,8 @@
ch%0A%0A
- @staticmethod%0A
@@ -2947,16 +2947,22 @@
o_datum(
+self,
img):%0A
@@ -3067,82 +3067,139 @@
-=
-np.array((104.00698793, 116.66876762, 122.67891434))%0A datum = datum
+self.mean_bgr%0A datum = datum.transpose((2, 0, 1))%0A return datum%0A%0A def datum_to_img(self, blob):%0A bgr = blob
.tra
@@ -3202,32 +3202,140 @@
.transpose((
+1,
2, 0
-, 1)
+))%0A bgr += self.mean_bgr%0A rgb = bgr%5B:, :, ::-1%5D # BGR -%3E RGB%0A rgb = rgb.astype(np.uint8
)%0A re
@@ -3323,30 +3323,28 @@
p.uint8)%0A return
-datum
+rgb
%0A
|
7474f18234d46e827400a79a8117590215bca21d
|
Fix error in forcefield file
|
examples/neq-switching/run_equilibrium_setup.py
|
examples/neq-switching/run_equilibrium_setup.py
|
import numpy as np
import os
import tqdm
from openeye import oechem, oeiupac
from openmmtools import integrators, states, mcmc, constants
from openmoltools import forcefield_generators
from perses.rjmc.topology_proposal import TopologyProposal, SystemGenerator
from perses.rjmc.geometry import FFAllAngleGeometryEngine
from perses.annihilation.ncmc_switching import NCMCEngine
from perses.tests.utils import extractPositionsFromOEMOL
from simtk import openmm, unit
from io import StringIO
from simtk.openmm import app
import copy
from perses.dispersed.feptasks import compute_reduced_potential
import mdtraj as md
temperature = 300.0*unit.kelvin
beta = 1.0 / (temperature*constants.kB)
def generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename):
ifs = oechem.oemolistream()
ifs.open(ligand_filename)
# get the list of molecules
mol_list = [oechem.OEMol(mol) for mol in ifs.GetOEMols()]
mol_dict = {oechem.OEMolToSmiles(mol) : mol for mol in mol_list}
ligand_topology_dict = {smiles : forcefield_generators.generateTopologyFromOEMol(mol) for smiles, mol in mol_dict}
protein_pdbfile = open(protein_pdb_filename, 'r')
pdb_file = app.PDBFile(protein_pdbfile)
protein_pdbfile.close()
receptor_positions = pdb_file.positions
receptor_topology = pdb_file.topology
receptor_md_topology = md.Topology.from_openmm(receptor_topology)
n_receptor_atoms = receptor_md_topology.n_atoms
complex_topologies = {}
complex_positions = {}
for smiles, ligand_topology in ligand_topology_dict.items():
ligand_md_topology = md.Topology.from_openmm(ligand_topology)
n_complex_atoms = ligand_md_topology.n_atoms + n_receptor_atoms
copy_receptor_md_topology = copy.deepcopy(receptor_md_topology)
complex_positions = unit.Quantity(np.array([n_complex_atoms, 3]), unit=unit.nanometers)
complex_topology = copy_receptor_md_topology.join(ligand_md_topology)
complex_topologies[smiles] = complex_topology
ligand_positions = extractPositionsFromOEMOL(mol_dict[smiles])
complex_positions[:n_receptor_atoms, :] = receptor_positions
complex_positions[n_receptor_atoms:, :] = ligand_positions
complex_positions[smiles] = complex_positions
return complex_topologies, complex_positions
def solvate_system(topology, positions, system_generator, padding=9.0 * unit.angstrom, num_added=None, water_model='tip3p'):
modeller = app.Modeller(topology, positions)
modeller.addSolvent(system_generator._forcefield, model=water_model, padding=padding, numAdded=num_added)
solvated_topology = modeller.topology
solvated_positions = modeller.positions
solvated_system = system_generator.build_system(solvated_topology)
return solvated_positions, solvated_topology, solvated_system
def create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix):
barostat = openmm.MonteCarloBarostat(1.0*unit.atmosphere, temperature, 50)
system_generator = SystemGenerator(['ff99sbildn.xml', 'gaff.xml', 'tip3p.xml'], barostat=barostat, forcefield_kwargs={'nonbondedMethod': app.PME,
'constraints': app.HBonds,
'hydrogenMass': 4 * unit.amus})
complex_topologies, complex_positions = generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename)
list_of_smiles = list(complex_topologies.keys())
initial_smiles = list_of_smiles[0]
initial_topology = complex_topologies[initial_smiles]
initial_positions = complex_positions[initial_smiles]
solvated_initial_positions, solvated_topology, solvated_system = solvate_system(initial_topology, initial_positions, system_generator)
md_topology = md.Topology.from_openmm(solvated_topology)
num_added = md_topology.n_residues - initial_topology.n_residues
if not os.path.exists(output_directory):
os.mkdir(output_directory)
np.save("{}_{}_initial.npy".format(project_prefix, 0), (solvated_initial_positions, md_topology, solvated_system))
for i in tqdm.trange(1, len(list_of_smiles)):
smiles = list_of_smiles[i]
topology = complex_topologies[smiles]
positions = complex_positions[smiles]
solvated_positions, solvated_topology, solvated_system = solvate_system(topology, positions, system_generator, padding=None, num_added=num_added)
np.save("{}_{}_initial.npy".format(project_prefix, i),
(solvated_positions, md.Topology.from_openmm(solvated_topology), solvated_system))
if __name__=="__main__":
import sys
import yaml
yaml_filename = sys.argv[1]
with open(yaml_filename, "r") as yaml_file:
options = yaml.load(yaml_file)
setup_options = options['setup']
ligand_filename = setup_options['ligand_filename']
protein_pdb_filename = setup_options['protein_pdb_filename']
project_prefix = setup_options['project_prefix']
output_directory = setup_options['output_directory']
create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix)
|
Python
| 0.000004
|
@@ -3083,18 +3083,30 @@
r(%5B'
-ff99sbildn
+amber14/protein.ff14SB
.xml
@@ -3121,16 +3121,24 @@
.xml', '
+amber14/
tip3p.xm
|
15129d5cc4c3a4981a41bebbfc6ace855004cd20
|
Add organizational structure.
|
students/psbriant/session08/circle.py
|
students/psbriant/session08/circle.py
|
class Circle:
def __init__(self, radius):
"""
"""
self.radius = radius
self.diameter = radius * 2
@classmethod
def from_diameter(cls, diameter):
self = cls(diameter / 2)
return self
def __str__(self):
return "A circle object with radius: {}".format(self.radius)
def __repr__(self):
return "Circle({})".format(self.radius)
@property
def diameter(self):
return self.radius * 2
@diameter.setter
def diameter(self, value):
self.radius = value / 2
|
Python
| 0.000085
|
@@ -1,12 +1,157 @@
+%22%22%22%0AName: Paul Briant%0ADate: 11/29/16%0AClass: Introduction to Python%0ASession: 08%0AAssignment: Circle Lab%0A%0ADescription:%0AClasses for Circle Lab%0A%22%22%22%0A%0A%0A
class Circle
|
13886b465c5ba81968064acfe6f60fd9b04aed8d
|
remove integrate hack
|
streams/inference/core.py
|
streams/inference/core.py
|
# coding: utf-8
""" Core functionality for doing stream inference """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os, sys
import logging
# Third-party
import emcee
import numpy as np
import astropy.units as u
from ..dynamics import Particle
from ..coordinates import _gc_to_hel, _hel_to_gc
from ..integrate import ParticleIntegrator
from .parameter import *
from .prior import *
__all__ = ["StreamModel"]
logger = logging.getLogger(__name__)
class StreamModel(object):
def __init__(self, potential, simulation,
satellite, particles, parameters=[]):
""" ...
Parameters
----------
...
"""
self.potential = potential
self.simulation = simulation
self.satellite = satellite
self.particles = particles
self.parameters = parameters
# TODO HACK because of:
self.simulation._table = None
def __call__(self, p, *args):
self.vector = np.array(p)
return self.ln_posterior(*args)
@property
def vector(self):
return np.concatenate(map(np.ravel,
[p.get() for p in self.parameters]))
@property
def ndim(self):
return len(self.sample())
@vector.setter
def vector(self, values):
ind = 0
for p in self.parameters:
if len(p):
p.set(values[ind:ind+len(p)])
ind += len(p)
else:
p.set(values[ind])
ind += 1
def sample(self, size=None):
if size is None:
return np.hstack([np.ravel(p.sample()) for p in self.parameters])
for ii in range(size):
x = np.hstack([np.ravel(p.sample()) for p in self.parameters])
try:
d[ii] = x
except NameError:
d = np.zeros((size,) + x.shape)
return d
def ln_prior(self):
ppar = np.concatenate([np.atleast_1d(p.ln_prior())\
for p in self.parameters])
if not np.all(np.isfinite(ppar)):
return -np.inf
return 0.
def ln_likelihood(self, *args):
""" This is a simplified version of the likelihood laid out by Hogg in
Bread and Butter (https://github.com/davidwhogg/BreadAndButter/).
The stars are assumed to come from a Gaussian progenitor,
described by just two scales -- the tidal radius and
velocity dispersion.
"""
t1 = self.simulation.t1
t2 = self.simulation.t2
dt = -1. # TODO HACK
return np.random.uniform() # HACK
# The true positions/velocities of the particles are parameters
Nparticles = self.particles.nparticles
particles_gc = self.particles.to_frame("galactocentric")
satellite_gc = self.satellite.to_frame("galactocentric")
acc = np.zeros((Nparticles+1,3))
pi = ParticleIntegrator((particles_gc,satellite_gc),
self.potential,
args=(Nparticles+1, acc))
particle_orbit,satellite_orbit = pi.run(t1=t1, t2=t2, dt=dt)
# These are the unbinding time indices for each particle
t_idx = np.array([np.argmin(np.fabs(satellite_orbit.t.value - tub)) \
for tub in self.particles.tub])
Ntimesteps = len(particle_orbit.t)
sat_var = np.zeros((Ntimesteps,6))
sat_var[:,:3] = self.potential._tidal_radius(self.satellite.m,
satellite_orbit._X[...,:3])*1.26
sat_var[:,3:] += self.satellite.v_disp
cov = sat_var**2
Sigma = np.array([cov[jj] for ii,jj in enumerate(t_idx)])
p_x = np.array([particle_orbit._X[jj,ii] \
for ii,jj in enumerate(t_idx)])
s_x = np.array([satellite_orbit._X[jj,0] \
for ii,jj in enumerate(t_idx)])
log_p_x_given_phi = -0.5*np.sum(-2.*np.log(Sigma) +
(p_x-s_x)**2/Sigma, axis=1) * abs(dt)
return np.sum(log_p_x_given_phi)
# if self.obs_data is not None:
# log_p_D_given_x = -0.5*np.sum(-2.*np.log(self.obs_error)\
# + (hel-self.obs_data)**2/self.obs_error**2)
# else:
# log_p_D_given_x = 0.
# if self.obs_data_sat is not None:
# log_p_D_given_x_sat = -0.5*np.sum(-2.*np.log(self.obs_error_sat)\
# + (hel_sat-self.obs_data_sat)**2/self.obs_error_sat**2)
# else:
# log_p_D_given_x_sat = 0.
def ln_posterior(self, *args):
lp = self.ln_prior()
if not np.isfinite(lp):
return -np.inf
ll = self.ln_likelihood(*args)
if not np.isfinite(ll):
return -np.inf
return lp + ll
|
Python
| 0
|
@@ -2685,51 +2685,8 @@
CK%0A%0A
- return np.random.uniform() # HACK%0A%0A
|
c79723b179b0bfeda9b324139d8478bf4f24c1e5
|
add unicode char to test print
|
Lib/glyphNameFormatter/test.py
|
Lib/glyphNameFormatter/test.py
|
def printRange(rangeName):
from glyphNameFormatter import GlyphName
from glyphNameFormatter.unicodeRangeNames import getRangeByName
from glyphNameFormatter.data import unicode2name_AGD
for u in range(*getRangeByName(rangeName)):
g = GlyphName(uniNumber=u)
name = g.getName()
if name is None:
continue
AGDName = unicode2name_AGD.get(g.uniNumber, "")
if AGDName is None or AGDName == name:
AGDName = ""
print name.ljust(50), AGDName.ljust(20), "%04X" % g.uniNumber, "\t", g.uniName
|
Python
| 0.000002
|
@@ -522,17 +522,17 @@
e.ljust(
-2
+3
0), %22%2504
@@ -554,16 +554,51 @@
r, %22%5Ct%22,
+ g.uniLetter.encode(%22utf-8%22), %22%5Ct%22,
g.uniNa
|
7be409211181bed5d38bde3be0b6c3d892c9cb29
|
Fix patch key error and remove print statements
|
frappe/patches/v11_0/remove_skip_for_doctype.py
|
frappe/patches/v11_0/remove_skip_for_doctype.py
|
import frappe
from frappe.desk.form.linked_with import get_linked_doctypes
from frappe.patches.v11_0.replicate_old_user_permissions import get_doctypes_to_skip
# `skip_for_doctype` was a un-normalized way of storing for which
# doctypes the user permission was applicable.
# in this patch, we normalize this into `applicable_for` where
# a new record will be created for each doctype where the user permission
# is applicable
#
# if the user permission is applicable for all doctypes, then only
# one record is created
def execute():
frappe.reload_doctype('User Permission')
# to check if we need to migrate from skip_for_doctype
has_skip_for_doctype = frappe.db.has_column("User Permission", "skip_for_doctype")
skip_for_doctype_map = {}
new_user_permissions_list = []
user_permissions_to_delete = []
for user_permission in frappe.get_all('User Permission', fields=['*']):
skip_for_doctype = []
# while migrating from v11 -> v11
if has_skip_for_doctype:
if not user_permission.skip_for_doctype:
continue
skip_for_doctype = user_permission.skip_for_doctype.split('\n')
else: # while migrating from v10 -> v11
if skip_for_doctype_map[(user_permission.allow, user_permission.user)] == None:
skip_for_doctype = get_doctypes_to_skip(user_permission.allow, user_permission.user)
# cache skip for doctype for same user and doctype
skip_for_doctype_map[(user_permission.allow, user_permission.user)] = skip_for_doctype
else:
skip_for_doctype = skip_for_doctype_map[(user_permission.allow, user_permission.user)]
if skip_for_doctype:
# only specific doctypes are selected
# split this into multiple records and delete
linked_doctypes = get_linked_doctypes(user_permission.allow, True).keys()
applicable_for_doctypes = list(set(linked_doctypes) - set(skip_for_doctype))
user_permissions_to_delete.append(user_permission.name)
user_permission.name = None
user_permission.skip_for_doctype = None
for doctype in applicable_for_doctypes:
if doctype:
# Maintain sequence (name, user, allow, for_value, applicable_for, apply_to_all_doctypes)
new_user_permissions_list.append((
frappe.generate_hash("", 10),
user_permission.user,
user_permission.allow,
user_permission.for_value,
doctype,
0
))
else:
# No skip_for_doctype found! Just update apply_to_all_doctypes.
frappe.db.set_value('User Permission', user_permission.name, 'apply_to_all_doctypes', 1)
if new_user_permissions_list:
print(len(new_user_permissions_list))
frappe.db.sql('''
INSERT INTO `tabUser Permission`
(`name`, `user`, `allow`, `for_value`, `applicable_for`, `apply_to_all_doctypes`)
VALUES {}
'''.format(
', '.join(['%s'] * len(new_user_permissions_list))
), tuple(new_user_permissions_list))
if user_permissions_to_delete:
print(len(user_permissions_to_delete))
frappe.db.sql('DELETE FROM `tabUser Permission` WHERE `name` in ({})'
.format(','.join(['%s'] * len(user_permissions_to_delete))),
tuple(user_permissions_to_delete)
)
|
Python
| 0.000001
|
@@ -1152,33 +1152,37 @@
_for_doctype_map
-%5B
+.get(
(user_permission
@@ -1210,17 +1210,17 @@
on.user)
-%5D
+)
== None
@@ -2515,48 +2515,8 @@
st:%0A
-%09%09print(len(new_user_permissions_list))%0A
%09%09fr
@@ -2809,49 +2809,8 @@
te:%0A
-%09%09print(len(user_permissions_to_delete))%0A
%09%09fr
|
c09274936df73668afd14ccac6d7f7c322d5e8b8
|
Add naive logging in Main.py
|
Main.py
|
Main.py
|
"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on, concat_pdf_pages, is_landscape, write_pdf_file
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def main():
# Get to directory with PDF files to work on
args = parser.parse_args()
directory = args.directory
# Open the PDF files
all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)]
opened_files = [open(path, 'rb') for path in all_pdf_files]
# Take all the pages in all the PDF files into a generator
all_pages = concat_pdf_pages(opened_files)
def make_pagenum_even(pdf_writer):
"""Helper function that append a blank page if the number of page is an odd number, in order to make the
page number even."""
if pdf_writer.getNumPages() % 2 == 1:
pdf_writer.addBlankPage()
# For all pages that belongs to the same document ID
for idx, pages_to_write in enumerate(split_on(all_pages, predicate=is_landscape), start=1):
# Create a PDF writer instance
pdf_writer = PdfFileWriter()
# Put those pages into a writer
for page in pages_to_write:
pdf_writer.addPage(page)
# Make sure the output PDF will have an even number of pages
# which makes printing the PDF file easier
make_pagenum_even(pdf_writer)
output_filename = '{0:05}.pdf'.format(idx)
# And write those pages to a single PDF file
write_pdf_file(output_filename, pdf_writer)
# Make sure to close all the files that were opened
for file in opened_files:
file.close()
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -43,16 +43,31 @@
rgparse%0A
+import logging%0A
import o
@@ -456,16 +456,138 @@
ory'%0A)%0A%0A
+# Get default logger%0Alog = logging.getLogger(__name__)%0Alog.addHandler(logging.StreamHandler())%0Alog.setLevel(logging.INFO)%0A
%0Adef mai
@@ -701,16 +701,70 @@
irectory
+%0A log.info('Working on PDF files in %25s', directory)
%0A%0A #
@@ -892,16 +892,100 @@
ctory)%5D%0A
+ log.info('Found the following PDF files%5Cn %25s', '%5Cn '.join(all_pdf_files))%0A
open
@@ -1379,16 +1379,65 @@
2 == 1:%0A
+ log.debug(' Adding a blank page')%0A
@@ -1749,35 +1749,161 @@
-for page in pages_to_write:
+log.info('Adding %25d pages to PDFWriter', len(pages_to_write))%0A for page in pages_to_write:%0A log.debug(' Adding page %25s', repr(page))
%0A
@@ -2200,16 +2200,77 @@
DF file%0A
+ log.info('Writing PDF pages to %25s', output_filename)%0A
@@ -2370,16 +2370,58 @@
opened%0A
+ log.debug('Closing all opened files')%0A
for
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.