commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
34b61df7608f93aa6f4378944b487a947afaabf8 | Remove a bunch of fixed/wfm bugs from detect_leaks.py. Scary... | nth10sd/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz | dom/automation/detect_leaks.py | dom/automation/detect_leaks.py | #!/usr/bin/env python
def ath(array):
hash = {}
for s in array:
hash[s] = True
return hash
knownHash = ath([
# bug 391976
"nsMathMLContainerFrame",
"nsMathMLmtableOuterFrame",
"nsMathMLmtdInnerFrame",
"nsMathMLmactionFrame",
# Bug 398462
"nsBaseAppShell",
"nsRunnable",
# Bug 403199
"nsSimpleNestedURI"
])
# Things that are known to leak AND entrain smaller objects.
# If one of these leaks, leaks of small objects will not be reported.
knownLargeHash = ath([
# Bug 397206
"BackstagePass",
# Bug 102229 or bug 419562
"nsDNSService",
# Bug 424418
"nsRDFResource",
# Bug 417630 and friends
"nsJVMManager"
])
# Large items that
# - should be reported even if things in knownLargeHash leak
# - should quell the reporting of smaller objects
# currently empty :(
otherLargeHash = ath([
"nsGlobalWindow",
"nsDocument",
"nsDocShell"
])
def amiss(logPrefix):
currentFile = file(logPrefix + "-out", "r")
for line in currentFile:
# line = line.strip("\x07").rstrip("\n")
if (line.startswith("nsTraceRefcntImpl::DumpStatistics")):
break
else:
currentFile.close()
return False
smallLeaks = ""
largeKnownLeaks = ""
largeOtherLeaks = ""
for line in currentFile:
line = line.strip("\x07").rstrip("\n").lstrip(" ")
if (line == "nsStringStats"):
break
a = line.split(" ")[1]
if a in knownLargeHash:
largeKnownLeaks += "*** Large K object " + a + "\n"
if a in otherLargeHash:
largeOtherLeaks += "*** Large O object " + a + "\n"
if not a in knownHash:
smallLeaks += a + "\n"
if largeOtherLeaks != "":
print "Leaked:"
print largeOtherLeaks
print largeKnownLeaks
currentFile.close()
return True
elif largeKnownLeaks != "":
# print "(Known large leaks, and no other large leaks, so all leaks were ignored)"
currentFile.close()
return False
elif smallLeaks != "":
print "Leaked:"
print smallLeaks
currentFile.close()
return True
else:
# print "No leaks :)"
currentFile.close()
return False
# print "detect_leaks is ready"
| #!/usr/bin/env python
def ath(array):
hash = {}
for s in array:
hash[s] = True
return hash
knownHash = ath([
# bug 391976
"nsMathMLContainerFrame",
"nsMathMLmtableOuterFrame",
"nsMathMLmtdInnerFrame",
"nsMathMLmactionFrame",
# Bug 398462
"nsBaseAppShell",
"nsRunnable",
# Bug 403199
"nsSimpleNestedURI",
# Bug 415112
"AtomImpl",
"nsInstantiationNode",
# Bug 427922
"nsStringBuffer"
])
# Things that are known to leak AND entrain smaller objects.
# If one of these leaks, leaks of small objects will not be reported.
knownLargeHash = ath([
# Bug 425821
"nsDocument",
# Bug 397206
"BackstagePass",
# Bug 102229 or bug 419562
"nsDNSService",
# Bug 424418
"nsRDFResource",
# Bug 413582 and friends
"nsGenericElement",
# Bug 429085 (which also entrains a bunch of the above)
"nsGlobalWindow",
# Bug 429085 or bug 425821?
"nsDocShell",
# Bug 417630 and friends
"nsJVMManager"
])
# Large items that
# - should be reported even if things in knownLargeHash leak
# - should quell the reporting of smaller objects
# currently empty :(
otherLargeHash = ath([
])
def amiss(logPrefix):
currentFile = file(logPrefix + "-out", "r")
for line in currentFile:
# line = line.strip("\x07").rstrip("\n")
if (line.startswith("nsTraceRefcntImpl::DumpStatistics")):
break
else:
currentFile.close()
return False
smallLeaks = ""
largeKnownLeaks = ""
largeOtherLeaks = ""
for line in currentFile:
line = line.strip("\x07").rstrip("\n").lstrip(" ")
if (line == "nsStringStats"):
break
a = line.split(" ")[1]
if a in knownLargeHash:
largeKnownLeaks += "*** Large K object " + a + "\n"
if a in otherLargeHash:
largeOtherLeaks += "*** Large O object " + a + "\n"
if not a in knownHash:
smallLeaks += a + "\n"
if largeOtherLeaks != "":
print "Leaked:"
print largeOtherLeaks
print largeKnownLeaks
currentFile.close()
return True
elif largeKnownLeaks != "":
# print "(Known large leaks, and no other large leaks, so all leaks were ignored)"
currentFile.close()
return False
elif smallLeaks != "":
print "Leaked:"
print smallLeaks
currentFile.close()
return True
else:
# print "No leaks :)"
currentFile.close()
return False
# print "detect_leaks is ready"
| mpl-2.0 | Python |
b8cc7058e13d71f4e6714f5aa013815016525f5d | print info as strings, not numbers. will catch pending vms too. | stroucki/tashi,stroucki/tashi,stroucki/tashi | src/tashi/accounting/accountingservice.py | src/tashi/accounting/accountingservice.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import threading
import time
from tashi import createClient
class AccountingService(object):
"""RPC service for the Accounting service"""
def __init__(self, config):
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.INFO)
self.config = config
self.pollSleep = None
# XXXstroucki new python has fallback values
try:
self.pollSleep = self.config.getint("AccountingService", "pollSleep")
except:
pass
if self.pollSleep is None:
self.pollSleep = 600
self.cm = createClient(config)
threading.Thread(target=self.__start).start()
# remote
def record(self, strings):
for string in strings:
self.log.info("Remote: %s" % (string))
def __start(self):
while True:
try:
instances = self.cm.getInstances()
for instance in instances:
# XXXstroucki this currently duplicates what the CM was doing.
self.log.info('Accounting: id %s host %s vmId %s user %s cores %s memory %s' % (instance.id, instance.hostId, instance.vmId, instance.userId, instance.cores, instance.memory))
except:
self.log.warning("Accounting iteration failed")
# wait to do the next iteration
time.sleep(self.pollSleep)
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import threading
import time
from tashi import createClient
class AccountingService(object):
"""RPC service for the Accounting service"""
def __init__(self, config):
self.log = logging.getLogger(__name__)
self.log.setLevel(logging.INFO)
self.config = config
self.pollSleep = None
# XXXstroucki new python has fallback values
try:
self.pollSleep = self.config.getint("AccountingService", "pollSleep")
except:
pass
if self.pollSleep is None:
self.pollSleep = 600
self.cm = createClient(config)
threading.Thread(target=self.__start).start()
# remote
def record(self, strings):
for string in strings:
self.log.info("Remote: %s" % (string))
def __start(self):
while True:
try:
instances = self.cm.getInstances()
for instance in instances:
# XXXstroucki this currently duplicates what the CM was doing.
self.log.info('Accounting: id %d host %d vmId %d user %d cores %d memory %d' % (instance.id, instance.hostId, instance.vmId, instance.userId, instance.cores, instance.memory))
except:
self.log.warning("Accounting iteration failed")
# wait to do the next iteration
time.sleep(self.pollSleep)
| apache-2.0 | Python |
a8283f5d2c1d970b7b676d491ad8c9472abfe667 | Fix tests since we changed imports. | schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse | boardinghouse/tests/test_template_tag.py | boardinghouse/tests/test_template_tag.py | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | bsd-3-clause | Python |
eb21ec1021b1447c5bb3be6104b0576a7426de7b | 更新 apps ELOs, 新增所有 functions 的註解 | yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo,yrchen/CommonRepo | commonrepo/elos/apps.py | commonrepo/elos/apps.py | # -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: yrchen@ATCity.org
# Maintained By: yrchen@ATCity.org
#
"""
App configurations for ELOs package in Common Repository project.
"""
from __future__ import absolute_import, unicode_literals
from django.apps import AppConfig
from actstream import registry
__author__ = 'yrchen@ATCity.org (Xaver Y.R. Chen)'
class ELOsAppConfig(AppConfig):
"""
Configs of ELOs packages.
"""
name = 'commonrepo.elos'
def ready(self):
# Register the ELOs package to Activity Stream system
registry.register(self.get_model('ELO'))
# Load settings of signals
import commonrepo.elos.signals
| # -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: yrchen@ATCity.org
# Maintained By: yrchen@ATCity.org
#
"""
App configurations for ELOs package in Common Repository project.
"""
from __future__ import absolute_import, unicode_literals
from django.apps import AppConfig
from actstream import registry
__author__ = 'yrchen@ATCity.org (Xaver Y.R. Chen)'
class ELOsAppConfig(AppConfig):
name = 'commonrepo.elos'
def ready(self):
registry.register(self.get_model('ELO'))
import commonrepo.elos.signals
| apache-2.0 | Python |
21b1f9bd8e546151e1e183b41237337efcc0bbc3 | Add country code | rogers0/namebench,google/namebench,google/namebench,protron/namebench,google/namebench | libnamebench/geoip.py | libnamebench/geoip.py | # Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class used for determining GeoIP location."""
import re
import tempfile
import urllib
# external dependencies (from third_party)
try:
import third_party
except ImportError:
pass
import httplib2
import simplejson
import util
def GetFromGoogleLocAPI():
"""Use the Google Loc JSON API from Google Gears.
NOTE: This is in violation of the Gears Terms of Service. See:
http://code.google.com/p/gears/wiki/GeolocationAPI
This method does however return the most accurate results.
"""
h = httplib2.Http(tempfile.gettempdir(), timeout=10)
url = 'http://www.google.com/loc/json'
post_data = { 'request_address': 'true', 'version': '1.1.0', 'source': 'namebench ' }
resp, content = h.request(url, 'POST', simplejson.dumps(post_data))
try:
data = simplejson.loads(content)['location']
return {
'region_name': data['address'].get('region'),
'country_name': data['address'].get('country'),
'country_code': data['address'].get('country_code'),
'city': data['address'].get('city'),
'latitude': data['latitude'],
'longitude': data['longitude'],
'source': 'gloc'
}
except:
print "* Failed to use GoogleLocAPI: %s" % util.GetLastExceptionString()
return {}
def GetFromMaxmindJSAPI():
h = httplib2.Http(tempfile.gettempdir(), timeout=10)
resp, content = h.request("http://j.maxmind.com/app/geoip.js", 'GET')
keep = ['region_name', 'country_name', 'city', 'latitude', 'longitude', 'country_code']
results = dict([x for x in re.findall("geoip_(.*?)\(.*?\'(.*?)\'", content) if x[0] in keep])
results.update({'source': 'mmind'})
if results:
return results
else:
return {}
def GetGeoData():
try:
jsapi_data = GetFromGoogleLocAPI()
if jsapi_data:
return jsapi_data
else:
return GetFromMaxmindJSAPI()
except:
print "Failed to get Geodata: %s" % util.GetLastExceptionString()
return {}
| # Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class used for determining GeoIP location."""
import re
import tempfile
import urllib
# external dependencies (from third_party)
try:
import third_party
except ImportError:
pass
import httplib2
import simplejson
import util
def GetFromGoogleLocAPI():
"""Use the Google Loc JSON API from Google Gears.
NOTE: This is in violation of the Gears Terms of Service. See:
http://code.google.com/p/gears/wiki/GeolocationAPI
This method does however return the most accurate results.
"""
h = httplib2.Http(tempfile.gettempdir(), timeout=10)
url = 'http://www.google.com/loc/json'
post_data = { 'request_address': 'true', 'version': '1.1.0', 'source': 'namebench ' }
resp, content = h.request(url, 'POST', simplejson.dumps(post_data))
try:
data = simplejson.loads(content)['location']
return {
'region_name': data['address'].get('region'),
'country_name': data['address'].get('country'),
'city': data['address'].get('city'),
'latitude': data['latitude'],
'longitude': data['longitude'],
'source': 'gloc'
}
except:
print "* Failed to use GoogleLocAPI: %s" % util.GetLastExceptionString()
return {}
def GetFromMaxmindJSAPI():
h = httplib2.Http(tempfile.gettempdir(), timeout=10)
resp, content = h.request("http://j.maxmind.com/app/geoip.js", 'GET')
keep = ['region_name', 'country_name', 'city', 'latitude', 'longitude']
results = dict([x for x in re.findall("geoip_(.*?)\(.*?\'(.*?)\'", content) if x[0] in keep])
results.update({'source': 'mmind'})
if results:
return results
else:
return {}
def GetGeoData():
try:
jsapi_data = GetFromGoogleLocAPI()
if jsapi_data:
return jsapi_data
else:
return GetFromMaxmindJSAPI()
except:
print "Failed to get Geodata: %s" % util.GetLastExceptionString()
return {}
| apache-2.0 | Python |
b65b5567e5cfddfdc10bc3a9a96647678916c3e6 | add copy logic | iku000888/Excel_Translation_Helper | library-examples/read-replace-export-excel.py | library-examples/read-replace-export-excel.py | """
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
import shutil
from openpyxl import load_workbook, Workbook
shutil.copyfile('sample-input-fortest.xlsx','sample-input-fortest-out.xlsx')
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
print "can iterate sheets, rows and columns intuitively"
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
seq_no+=1
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
| """
Proto type that does the following:
input:Excel file in language A
output 1:Copy of input file, with original strings replaced with serial numbers
output 2:Single xlsx file that contains serial numbers and original texts from input file.
"""
from openpyxl import load_workbook, Workbook
#point to the file to be read. Intuitive.
wb2 = load_workbook('sample-input-fortest.xlsx')
#convince your self that sheet names are retireved.
sheet_names = wb2.get_sheet_names()
print sheet_names
#work book is simply a list of sheets
sheet = wb2[sheet_names[0]]
print sheet
print "can iterate sheets, rows and columns intuitively"
string_list = list()
string_list.append(("sequence_number","original language"))
seq_no = 1
for sheet in wb2:
for row in sheet.rows:
for cell in row:
if None!=cell.value:
string_list.append((seq_no,cell.value))
seq_no+=1
wb_out = Workbook(write_only=True)
ws = wb_out.create_sheet()
for string in string_list:
ws.append(string)
wb_out.save('new_big_file.xlsx')
| apache-2.0 | Python |
a16a2c490582ea9e78b81ccfc2355c5a4377ceec | Update S3Outposts per 2021-07-29 changes | cloudtools/troposphere,cloudtools/troposphere | troposphere/s3outposts.py | troposphere/s3outposts.py | # Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 39.8.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class VpcConfiguration(AWSProperty):
props = {
"VpcId": (str, False),
}
class AccessPoint(AWSObject):
resource_type = "AWS::S3Outposts::AccessPoint"
props = {
"Bucket": (str, True),
"Name": (str, True),
"Policy": (dict, False),
"VpcConfiguration": (VpcConfiguration, True),
}
class AbortIncompleteMultipartUpload(AWSProperty):
props = {
"DaysAfterInitiation": (integer, True),
}
class Rule(AWSProperty):
props = {
"AbortIncompleteMultipartUpload": (AbortIncompleteMultipartUpload, False),
"ExpirationDate": (str, False),
"ExpirationInDays": (integer, False),
"Filter": (dict, False),
"Id": (str, False),
"Status": (str, False),
}
class LifecycleConfiguration(AWSProperty):
props = {
"Rules": ([Rule], True),
}
class Bucket(AWSObject):
resource_type = "AWS::S3Outposts::Bucket"
props = {
"BucketName": (str, True),
"LifecycleConfiguration": (LifecycleConfiguration, False),
"OutpostId": (str, True),
"Tags": (Tags, False),
}
class BucketPolicy(AWSObject):
resource_type = "AWS::S3Outposts::BucketPolicy"
props = {
"Bucket": (str, True),
"PolicyDocument": (dict, True),
}
class Endpoint(AWSObject):
resource_type = "AWS::S3Outposts::Endpoint"
props = {
"AccessType": (str, False),
"CustomerOwnedIpv4Pool": (str, False),
"OutpostId": (str, True),
"SecurityGroupId": (str, True),
"SubnetId": (str, True),
}
| # Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class VpcConfiguration(AWSProperty):
props = {
"VpcId": (str, False),
}
class AccessPoint(AWSObject):
resource_type = "AWS::S3Outposts::AccessPoint"
props = {
"Bucket": (str, True),
"Name": (str, True),
"Policy": (dict, False),
"VpcConfiguration": (VpcConfiguration, True),
}
class AbortIncompleteMultipartUpload(AWSProperty):
props = {
"DaysAfterInitiation": (integer, True),
}
class Rule(AWSProperty):
props = {
"AbortIncompleteMultipartUpload": (AbortIncompleteMultipartUpload, False),
"ExpirationDate": (str, False),
"ExpirationInDays": (integer, False),
"Filter": (dict, False),
"Id": (str, False),
"Status": (str, False),
}
class LifecycleConfiguration(AWSProperty):
props = {
"Rules": ([Rule], True),
}
class Bucket(AWSObject):
resource_type = "AWS::S3Outposts::Bucket"
props = {
"BucketName": (str, True),
"LifecycleConfiguration": (LifecycleConfiguration, False),
"OutpostId": (str, True),
"Tags": (Tags, False),
}
class BucketPolicy(AWSObject):
resource_type = "AWS::S3Outposts::BucketPolicy"
props = {
"Bucket": (str, True),
"PolicyDocument": (dict, True),
}
class Endpoint(AWSObject):
resource_type = "AWS::S3Outposts::Endpoint"
props = {
"OutpostId": (str, True),
"SecurityGroupId": (str, True),
"SubnetId": (str, True),
}
| bsd-2-clause | Python |
ed60c16993a04ac74b8f78ab94544c67fbfd1439 | Update closest_points.py | jilljenn/tryalgo | tryalgo/closest_points.py | tryalgo/closest_points.py | #!/usr/bin/env python3
# Closest pair of points
# trouver la paire de points la plus proche
# jill-jenn vie et christoph durr - 2014-2015
from random import randint
# snip{
from math import hypot # hypot(dx, dy) = sqrt(dx * dx + dy * dy)
from random import shuffle
__all__ = ["closest_points"]
def dist(p, q):
return hypot(p[0] - q[0], p[1] - q[1])
def cell(point, pas):
x, y = point
return (int(x // pas), int(y // pas))
def ameliore(S, d):
G = {} # grille
for p in S:
a, b = cell(p, d / 2)
for a1 in range(a - 2, a + 3):
for b1 in range(b - 2, b + 3):
if (a1, b1) in G:
q = G[a1, b1]
pq = dist(p, q)
if pq < d:
return pq, p, q
G[a, b] = p
return None
def closest_points(S):
"""Closest pair of points
:param S: list of points
:requires: size of S at least 2
:modifies: changes the order in S
:returns: pair of points p,q from S with minimum Euclidean distance
:complexity: expected linear time
"""
shuffle(S)
assert len(S) >= 2
p = S[0]
q = S[1]
d = dist(p, q)
while d > 0:
r = ameliore(S, d)
if r:
d, p, q = r
else:
break
return (p, q)
# snip}
if __name__ == "__main__":
def tikz_points(S):
for p in S:
print("\\filldraw[black] (%f, %f) circle (1pt);" % p)
def tikz_polygone(S):
for i in range(len(S)):
print('\\draw (%f, %f) -- (%f, %f);' % (S[i - 1] + S[i]))
S = [(randint(0, 400) / 100, randint(0, 400) / 100) for _ in range(32)]
tikz_points(S)
tikz_polygone(closest_points(S))
| #!/usr/bin/env python3
# Closest pair of points
# trouver la paire de points la plus proche
# jill-jenn vie et christoph durr - 2014-2015
from random import randint
# snip{
from math import hypot # hypot(dx, dy) = sqrt(dx * dx + dy * dy)
from random import shuffle
def dist(p, q):
return hypot(p[0] - q[0], p[1] - q[1])
def cell(point, pas):
x, y = point
return (int(x // pas), int(y // pas))
def ameliore(S, d):
G = {} # grille
for p in S:
a, b = cell(p, d / 2)
for a1 in range(a - 2, a + 3):
for b1 in range(b - 2, b + 3):
if (a1, b1) in G:
q = G[a1, b1]
pq = dist(p, q)
if pq < d:
return pq, p, q
G[a, b] = p
return None
def closest_points(S):
"""Closest pair of points
:param S: list of points
:requires: size of S at least 2
:modifies: changes the order in S
:returns: pair of points p,q from S with minimum Euclidean distance
:complexity: expected linear time
"""
shuffle(S)
assert len(S) >= 2
p = S[0]
q = S[1]
d = dist(p, q)
while d > 0:
r = ameliore(S, d)
if r:
d, p, q = r
else:
break
return (p, q)
# snip}
__all__ = ["closest_points"]
if __name__ == "__main__":
def tikz_points(S):
for p in S:
print("\\filldraw[black] (%f, %f) circle (1pt);" % p)
def tikz_polygone(S):
for i in range(len(S)):
print('\\draw (%f, %f) -- (%f, %f);' % (S[i - 1] + S[i]))
S = [(randint(0, 400) / 100, randint(0, 400) / 100) for _ in range(32)]
tikz_points(S)
tikz_polygone(closest_points(S))
| mit | Python |
88f96a3828ab9a2225a8cf86306a333cd53b8717 | Make rehashing work properly | DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd | txircd/modules/manhole.py | txircd/modules/manhole.py | from twisted.conch.manhole_tap import makeService
from twisted.internet import reactor
class Spawner(object):
def __init__(self, ircd):
self.manhole = makeService({
'namespace': {'ircd': ircd},
'passwd': 'manhole.passwd',
'telnetPort': None,
'sshPort': '65432'
})
def spawn(self):
# Wait 100ms in event of a rehash so that the old module
# has time to shut down. Could cause race conditions!
reactor.callLater(0.1, self.manhole.startService)
return {}
def cleanup(self):
self.manhole.stopService()
| from twisted.conch.manhole_tap import makeService
class Spawner(object):
def __init__(self, ircd):
self.manhole = makeService({
'namespace': {'ircd': ircd},
'passwd': 'manhole.passwd',
'telnetPort': None,
'sshPort': '65432'
})
def spawn(self):
self.manhole.startService()
return {}
def cleanup(self):
self.manhole.stopService()
| bsd-3-clause | Python |
ece5aa474a824eb2dfee8199e176baf1cbc33ad5 | Move calibration into class and out of main. | hmflash/tikitank,hmflash/tikitank,flaub/tikitank,flaub/tikitank,flaub/tikitank,flaub/tikitank,hmflash/tikitank,hmflash/tikitank,hmflash/tikitank,flaub/tikitank | treads.py | treads.py | #!/usr/bin/env python
import beaglebone_pru_adc as adc
import subprocess, os.path
import sys, time
class SpeedSensor(adc.Capture):
def __init__(self, firmware, *k, **kw):
super(SpeedSensor, self).__init__(*k, **kw)
fw = os.path.splitext(firmware)[0]
cmd = [ 'pasm', '-V3', '-b', firmware, fw ]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
print stdout
raise IOError('Failed to compile "%s"' % firmware)
self._firmware = fw = os.path.abspath(fw + '.bin')
self.encoder0_pin = kw.get('pin', 0)
def calibrate(self):
print 'Running encoder for 10 seconds...'
self.encoder0_threshold = 4096
self.encoder0_delay = 0
adc._pru_adc.Capture.start(self, self._firmware)
time.sleep(10)
timer = s.timer
_, min0, max0, _, _ = s.encoder0_values
self.close()
print 'Capture runs at %d readings per second' % (timer // 10)
print 'Time value of one timer unit is %d nanosec' % (1000000000 // timer)
print 'Range for the encoder:', min0, '-', max0
print 'Recommended threshold value for encoder is:', int(0.9*(max0-min0))
def start(self):
s.encoder0_threshold = 750
s.encoder0_delay = 100
adc._pru_adc.Capture.start(self, self._firmware)
def close(self):
self.stop()
self.wait()
super(SpeedSensor, self).close()
if __name__ == '__main__':
s = SpeedSensor('tank/firmware.p')
if '--test' in sys.argv:
s.calibrate()
sys.exit()
try:
s.start()
if '-v' in sys.argv:
while True:
print s.timer, s.encoder0_values
num = 0
while num < 10:
delta = s.encoder0_ticks - num
if delta:
print 'Tick: %s' % delta
num += delta
except KeyboardInterrupt:
s.close()
| #!/usr/bin/env python
import beaglebone_pru_adc as adc
import subprocess, os.path
import sys, time
class SpeedSensor(adc.Capture):
def __init__(self, *k, **kw):
super(SpeedSensor, self).__init__(*k, **kw)
def start(self, firmware_src):
fw_bin = os.path.splitext(firmware_src)[0]
cmd = [ 'pasm', '-V3', '-b', firmware_src, fw_bin ]
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
print stdout
raise IOError('Failed to compile "%s"' % firmware_src)
fw = os.path.abspath(fw_bin + '.bin')
adc._pru_adc.Capture.start(self, fw)
def close(self):
self.stop()
self.wait()
super(SpeedSensor, self).close()
if __name__ == '__main__':
s = SpeedSensor()
if len(sys.argv) > 1:
s.encoder0_threshold = 4096
s.start('tank/firmware.p')
time.sleep(10)
timer = s.timer
_, min0, max0, _, _ = s.encoder0_values
s.close()
print 'Capture runs at %d readings per second' % (timer // 10)
print 'Time value of one timer unit is %d nanosec' % (1000000000 // timer)
print 'Range for the encoder:', min0, '-', max0
print 'Recommended threshold value for encoder is:', int(0.9*(max0-min0))
sys.exit()
else:
s.encoder0_threshold = 750
s.encoder0_delay = 100
s.start('tank/firmware.p')
num = 0
last = s.encoder0_ticks
try:
while num < 10:
#print s.timer, s.encoder0_values
#continue
val = s.encoder0_ticks
if last != val:
delta = val - last
num += delta
print 'Tick: %s' % delta
last = val
except KeyboardInterrupt:
s.close()
| bsd-2-clause | Python |
23cec52bc136773e3e54251ca30d08c0733bcf1f | Add --message option to stop | TailorDev/Watson,TailorDev/Watson,wehlutyk/Watson,yloiseau/Watson | watson.py | watson.py | import os
import json
import datetime
import click
WATSON_FILE = os.path.join(os.path.expanduser('~'), '.watson')
def get_watson():
try:
with open(WATSON_FILE) as f:
return json.load(f)
except FileNotFoundError:
return {}
except ValueError:
return {}
def save_watson(content):
with open(WATSON_FILE, 'w+') as f:
return json.dump(content, f, indent=2)
@click.group()
def cli():
pass
@cli.command()
@click.argument('project')
def start(project):
watson = get_watson()
start_time = datetime.datetime.now()
if watson.get('current') is not None:
project = watson['current'].get('project', "?")
click.echo(
("Project {} is already started"
.format(project, start_time)),
err=True
)
return
click.echo("Starting {} at {:%H:%M:%S}".format(project, start_time))
watson['current'] = {
'project': project,
'start': start_time.isoformat()
}
save_watson(watson)
@cli.command()
@click.option('-m', '--message', default=None,
help="Add a message to this frame")
def stop(message):
watson = get_watson()
stop_time = datetime.datetime.now()
current = watson.get('current')
if not current or not current.get('project'):
click.echo("No project started", err=True)
return
click.echo("Stopping {}.".format(current['project']))
if not watson.get('projects'):
watson['projects'] = {}
project = watson['projects'].get(current['project'])
if not project:
project = {'frames': []}
watson['projects'][current['project']] = project
frame = {
'start': current['start'],
'stop': stop_time.isoformat()
}
if message:
frame['message'] = message
project['frames'].append(frame)
del watson['current']
save_watson(watson)
if __name__ == '__main__':
cli()
| import os
import json
import datetime
import click
WATSON_FILE = os.path.join(os.path.expanduser('~'), '.watson')
def get_watson():
try:
with open(WATSON_FILE) as f:
return json.load(f)
except FileNotFoundError:
return {}
except ValueError:
return {}
def save_watson(content):
with open(WATSON_FILE, 'w+') as f:
return json.dump(content, f, indent=2)
@click.group()
def cli():
pass
@cli.command()
@click.argument('project')
def start(project):
watson = get_watson()
start_time = datetime.datetime.now()
if watson.get('current') is not None:
project = watson['current'].get('project', "?")
click.echo(
("Project {} is already started"
.format(project, start_time)),
err=True
)
return
click.echo("Starting {} at {:%H:%M:%S}".format(project, start_time))
watson['current'] = {
'project': project,
'start': start_time.isoformat()
}
save_watson(watson)
@cli.command()
def stop():
watson = get_watson()
stop_time = datetime.datetime.now()
current = watson.get('current')
if not current or not current.get('project'):
click.echo("No project started", err=True)
return
click.echo("Stopping {}.".format(current['project']))
if not watson.get('projects'):
watson['projects'] = {}
project = watson['projects'].get(current['project'])
if not project:
project = {'frames': []}
watson['projects'][current['project']] = project
project['frames'].append({
'start': current['start'],
'stop': stop_time.isoformat()
})
del watson['current']
save_watson(watson)
if __name__ == '__main__':
cli()
| mit | Python |
7ce419de1f39050940b8399401a77b2096b74ca2 | Increment version number to 0.11.0 | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.11.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.10.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| mit | Python |
7d0dbc4947ff7395a772d4849f34a100f31877ef | add arg in comment | ClaudiaSaxer/PlasoScaffolder | src/plasoscaffolder/bll/mappings/init_mapping.py | src/plasoscaffolder/bll/mappings/init_mapping.py | # -*- coding: utf-8 -*-
""" Module representing function for the different files """
from plasoscaffolder.bll.mappings.mapping_helper import render_template
def get_formatter_init_create(plugin_name: str) -> str:
"""
renders formatter init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "formatter_init_create_template.py"
return _render_init(file_name, plugin_name)
def get_formatter_init_edit(plugin_name: str) -> str:
"""
renders formatter init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "formatter_init_edit_template.py"
return _render_init(file_name, plugin_name)
def get_parser_init_create(plugin_name: str) -> str:
"""
renders formatter init if you want to edit an existing init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "parser_init_create_template.py"
return _render_init(file_name, plugin_name)
def get_parser_init_edit(plugin_name: str) -> str:
"""
renders parser init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "parser_init_edit_template.py"
return _render_init(file_name, plugin_name)
def _render_init(file_name: str, plugin_name: str) -> str:
"""
renders parser init if you want to edit an existing init file
Args:
file_name: name of the file in the templates folder
plugin_name: the name of the plugin
Returns:string of the rendered template
"""
context = {'plugin_name': plugin_name}
rendered = render_template(file_name, context)
return rendered
| # -*- coding: utf-8 -*-
""" Module representing function for the different files """
from plasoscaffolder.bll.mappings.mapping_helper import render_template
def get_formatter_init_create(plugin_name: str) -> str:
"""
renders formatter init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "formatter_init_create_template.py"
return _render_init(file_name, plugin_name)
def get_formatter_init_edit(plugin_name: str) -> str:
"""
renders formatter init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "formatter_init_edit_template.py"
return _render_init(file_name, plugin_name)
def get_parser_init_create(plugin_name: str) -> str:
"""
renders formatter init if you want to edit an existing init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "parser_init_create_template.py"
return _render_init(file_name, plugin_name)
def get_parser_init_edit(plugin_name: str) -> str:
"""
renders parser init if you want to create new init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
file_name = "parser_init_edit_template.py"
return _render_init(file_name, plugin_name)
def _render_init(file_name: str, plugin_name: str) -> str:
"""
renders parser init if you want to edit an existing init file
:param plugin_name: the plugin name
:return: string of the rendered template
"""
context = {'plugin_name': plugin_name}
rendered = render_template(file_name, context)
return rendered
| apache-2.0 | Python |
8142a48a10799f83d6d8a0d29c50afccd5e9fd68 | Add get_app_and_db function | piotr-rusin/url-shortener,piotr-rusin/url-shortener | url_shortener/__init__.py | url_shortener/__init__.py | # -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
import logging
from logging.handlers import TimedRotatingFileHandler
from flask import Flask
from flask_injector import FlaskInjector
from .views import url_shortener
from .forms import FormModule
from .domain_and_persistence import DomainAndPersistenceModule, SQLAlchemy
from .validation import ValidationModule
app = Flask(__name__)
app.config.from_object('url_shortener.default_config')
app.register_blueprint(url_shortener)
def _set_up_logging(app):
"""Set up logging for given Flask application object
:param app: an application for which the function will
set up logging
"""
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
def _get_injector(app):
"""Set up and return an instance of FlaskInjector
:param app: an application for which the function will set up
the injector
:return: an instance of FlaskInjector to be used by the application
"""
return FlaskInjector(
app=app,
modules=[
DomainAndPersistenceModule(app),
ValidationModule(app),
FormModule()
],
use_annotations=True
)
def get_app_and_db(configuration, from_envvar=False):
"""Get application instance and database object used by it
:param configuration: a string value referring to a file from which
configuration options will be loaded. This value may be either
the name of the file, or name of an environment variable set to
the name of configuration file
:param from_envvar: if True: configuration parameter will
be treated as name of an evnironment variable pointing to
the configuration file, if False: it will be treated as name of
the configuration file itself.
:returns: a tuple with application object as its first and
database object as its second element
"""
app = Flask(__name__)
app.config.from_object('url_shortener.default_config')
if from_envvar:
app.config.from_envvar(configuration)
else:
app.config.from_pyfile(configuration)
_set_up_logging(app)
app.register_blueprint(url_shortener)
injector = _get_injector(app)
return app, injector.injector.get(SQLAlchemy)
| # -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
import logging
from logging.handlers import TimedRotatingFileHandler
from flask import Flask
from flask_injector import FlaskInjector
from .views import url_shortener
from .forms import FormModule
from .domain_and_persistence import DomainAndPersistenceModule
from .validation import ValidationModule
app = Flask(__name__)
app.config.from_object('url_shortener.default_config')
app.register_blueprint(url_shortener)
def _set_up_logging(app):
"""Set up logging for given Flask application object
:param app: an application for which the function will
set up logging
"""
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
def _get_injector(app):
"""Set up and return an instance of FlaskInjector
:param app: an application for which the function will set up
the injector
:return: an instance of FlaskInjector to be used by the application
"""
return FlaskInjector(
app=app,
modules=[
DomainAndPersistenceModule(app),
ValidationModule(app),
FormModule()
],
use_annotations=True
)
| mit | Python |
a8a8656818ef8db333a6c35dab0bb4903fb32802 | fix crash on serial ip addresses | CanonicalLtd/subiquity,CanonicalLtd/subiquity | subiquity/controllers/welcome.py | subiquity/controllers/welcome.py | # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
from subiquity.controller import SubiquityTuiController
from subiquity.ui.views.welcome import get_languages, WelcomeView
from subiquity.ui.views.help import get_global_addresses
log = logging.getLogger('subiquity.controllers.welcome')
class WelcomeController(SubiquityTuiController):
autoinstall_key = model_name = "locale"
autoinstall_schema = {'type': 'string'}
autoinstall_default = 'en_US.UTF-8'
def interactive(self):
return self.app.interactive()
def load_autoinstall_data(self, data):
os.environ["LANG"] = data
def start(self):
lang = os.environ.get("LANG")
if lang is not None and lang.endswith(".UTF-8"):
lang = lang.rsplit('.', 1)[0]
for code, name in get_languages():
if code == lang:
self.model.switch_language(code)
break
else:
self.model.selected_language = lang
def make_ui(self):
language = self.model.selected_language
serial = self.app.opts.run_on_serial
if serial:
ips = get_global_addresses(self.app)
else:
ips = None
return WelcomeView(self, language, serial, ips)
def run_answers(self):
if 'lang' in self.answers:
self.done(self.answers['lang'])
def done(self, code):
log.debug("WelcomeController.done %s next_screen", code)
self.signal.emit_signal('l10n:language-selected', code)
self.model.switch_language(code)
self.configured()
self.app.next_screen()
def cancel(self):
# Can't go back from here!
pass
def serialize(self):
return self.model.selected_language
def deserialize(self, data):
self.model.switch_language(data)
def make_autoinstall(self):
return self.model.selected_language
| # Copyright 2015 Canonical, Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
from subiquity.controller import SubiquityTuiController
from subiquity.ui.views.welcome import get_languages, WelcomeView
from subiquity.ui.views.help import get_global_addresses
log = logging.getLogger('subiquity.controllers.welcome')
class WelcomeController(SubiquityTuiController):
autoinstall_key = model_name = "locale"
autoinstall_schema = {'type': 'string'}
autoinstall_default = 'en_US.UTF-8'
def interactive(self):
return self.app.interactive()
def load_autoinstall_data(self, data):
os.environ["LANG"] = data
def start(self):
lang = os.environ.get("LANG")
if lang is not None and lang.endswith(".UTF-8"):
lang = lang.rsplit('.', 1)[0]
for code, name in get_languages():
if code == lang:
self.model.switch_language(code)
break
else:
self.model.selected_language = lang
def make_ui(self):
language = self.model.selected_language
serial = self.app.opts.run_on_serial
if serial:
ips = get_global_addresses()
else:
ips = None
return WelcomeView(self, language, serial, ips)
def run_answers(self):
if 'lang' in self.answers:
self.done(self.answers['lang'])
def done(self, code):
log.debug("WelcomeController.done %s next_screen", code)
self.signal.emit_signal('l10n:language-selected', code)
self.model.switch_language(code)
self.configured()
self.app.next_screen()
def cancel(self):
# Can't go back from here!
pass
def serialize(self):
return self.model.selected_language
def deserialize(self, data):
self.model.switch_language(data)
def make_autoinstall(self):
return self.model.selected_language
| agpl-3.0 | Python |
4874f3cb6d6f28e01b0d26998a96caf28fa3995c | make src/scripts/tls_scanner/tls_scanner.py python3-compatible | randombit/botan,randombit/botan,randombit/botan,randombit/botan,randombit/botan | src/scripts/tls_scanner/tls_scanner.py | src/scripts/tls_scanner/tls_scanner.py | #!/usr/bin/env python3
# (C) 2017 Jack Lloyd
# Botan is released under the Simplified BSD License (see license.txt)
import sys
import time
import subprocess
import re
def format_report(client_output):
version_re = re.compile('TLS (v1\.[0-3]) using ([A-Z0-9_]+)')
version_match = version_re.search(client_output)
#print(client_output)
if version_match:
return "Established %s %s" % (version_match.group(1), version_match.group(2))
else:
return client_output
def scanner(args = None):
if args is None:
args = sys.argv
if len(args) != 2:
print("Error: Usage tls_scanner.py host_file")
return 2
scanners = {}
for url in [s.strip() for s in open(args[1]).readlines()]:
scanners[url] = subprocess.Popen(['../../../botan', 'tls_client', '--policy=policy.txt', url],
stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
for url in scanners.keys():
scanners[url].stdin.close()
report = {}
timeout = 10
for url in scanners.keys():
print("waiting for", url)
for i in range(timeout):
scanners[url].poll()
if scanners[url].returncode != None:
break
#print("Waiting %d more seconds for %s" % (timeout-i, url))
time.sleep(1)
if scanners[url].returncode != None:
output = scanners[url].stdout.read() + scanners[url].stderr.read()
report[url] = format_report(output.decode("utf-8"))
for url in report.keys():
print(url, ":", report[url])
return 0
if __name__ == '__main__':
sys.exit(scanner())
| #!/usr/bin/env python3
# (C) 2017 Jack Lloyd
# Botan is released under the Simplified BSD License (see license.txt)
import sys
import time
import subprocess
import re
def format_report(client_output):
version_re = re.compile('TLS (v1\.[0-2]) using ([A-Z0-9_]+)')
version_match = version_re.search(client_output)
#print client_output
if version_match:
return "Established %s %s" % (version_match.group(1), version_match.group(2))
else:
return client_output
def scanner(args = None):
if args is None:
args = sys.argv
if len(args) != 2:
print "Error: Usage tls_scanner.py host_file"
return 2
scanners = {}
for url in [s.strip() for s in open(args[1]).readlines()]:
scanners[url] = subprocess.Popen(['../../../botan', 'tls_client', '--policy=policy.txt', url],
stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
for url in scanners.keys():
scanners[url].stdin.close()
report = {}
timeout = 10
for url in scanners.keys():
print "waiting for", url
for i in range(timeout):
scanners[url].poll()
if scanners[url].returncode != None:
break
#print "Waiting %d more seconds for %s" % (timeout-i, url)
time.sleep(1)
if scanners[url].returncode != None:
output = scanners[url].stdout.read() + scanners[url].stderr.read()
report[url] = format_report(output)
for url in report.keys():
print url, ":", report[url]
return 0
if __name__ == '__main__':
sys.exit(scanner())
| bsd-2-clause | Python |
a8dfb7b0b99dba554d7bc08aadc63cbc6a626dd7 | Fix tempest test path | openstack/aodh,openstack/aodh | aodh/tests/tempest/plugin.py | aodh/tests/tempest/plugin.py | #
# Copyright 2015 NEC Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from tempest import config
from tempest.test_discover import plugins
import aodh
from aodh.tests.tempest import config as tempest_config
class AodhTempestPlugin(plugins.TempestPlugin):
def load_tests(self):
base_path = os.path.split(os.path.dirname(
os.path.abspath(aodh.__file__)))[0]
test_dir = "aodh/tests/tempest"
full_test_dir = os.path.join(base_path, test_dir)
return full_test_dir, base_path
def register_opts(self, conf):
config.register_opt_group(conf,
tempest_config.service_available_group,
tempest_config.ServiceAvailableGroup)
config.register_opt_group(conf,
tempest_config.alarming_group,
tempest_config.AlarmingGroup)
def get_opt_lists(self):
return [(tempest_config.alarming_group.name,
tempest_config.AlarmingGroup)]
| #
# Copyright 2015 NEC Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from tempest import config
from tempest.test_discover import plugins
import aodh
from aodh.tests.tempest import config as tempest_config
class AodhTempestPlugin(plugins.TempestPlugin):
def load_tests(self):
base_path = os.path.split(os.path.dirname(
os.path.abspath(aodh.__file__)))[0]
test_dir = "tempest"
full_test_dir = os.path.join(base_path, test_dir)
return full_test_dir, base_path
def register_opts(self, conf):
config.register_opt_group(conf,
tempest_config.service_available_group,
tempest_config.ServiceAvailableGroup)
config.register_opt_group(conf,
tempest_config.alarming_group,
tempest_config.AlarmingGroup)
def get_opt_lists(self):
return [(tempest_config.alarming_group.name,
tempest_config.AlarmingGroup)]
| apache-2.0 | Python |
0f49230309ac115ff78eddd36bcd153d7f3b75ea | Remove reference to "job" from ThreadPool | uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics | data_aggregator/threads.py | data_aggregator/threads.py | import queue
import threading
from multiprocessing import Queue
class ThreadPool():
def __init__(self, processes=20):
self.processes = processes
self.threads = [Thread() for _ in range(0, processes)]
self.mp_queue = Queue()
def yield_dead_threads(self):
for thread in self.threads:
if not thread.is_alive():
yield thread
def map(self, func, values):
completed_count = 0
values_iter = iter(values)
while completed_count < len(values):
try:
self.mp_queue.get_nowait()
completed_count += 1
except queue.Empty:
pass
for thread in self.yield_dead_threads():
try:
# run thread with the next value
value = next(values_iter)
thread.run(func, value, self.mp_queue)
except StopIteration:
break
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
pass
class Thread():
def __init__(self):
self.thread = None
def run(self, target, *args, **kwargs):
self.thread = threading.Thread(target=target,
args=args,
kwargs=kwargs)
self.thread.start()
def is_alive(self):
if self.thread:
return self.thread.is_alive()
else:
return False
| import queue
import threading
from multiprocessing import Queue
class ThreadPool():
def __init__(self, processes=20):
self.processes = processes
self.threads = [Thread() for _ in range(0, processes)]
self.mp_queue = Queue()
def yield_dead_threads(self):
for thread in self.threads:
if not thread.is_alive():
yield thread
def map(self, func, values):
completed_count = 0
values_iter = iter(values)
while completed_count < len(values):
try:
self.mp_queue.get_nowait()
completed_count += 1
except queue.Empty:
pass
for thread in self.yield_dead_threads():
try:
# run next job
job = next(values_iter)
thread.run(func, job, self.mp_queue)
except StopIteration:
break
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
pass
class Thread():
def __init__(self):
self.thread = None
def run(self, target, *args, **kwargs):
self.thread = threading.Thread(target=target,
args=args,
kwargs=kwargs)
self.thread.start()
def is_alive(self):
if self.thread:
return self.thread.is_alive()
else:
return False
| apache-2.0 | Python |
7b736aa6ef1a8d39c1ad1509daaadaae682f17a5 | Fix MessageContainer having the wrong constructor id | LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,andr-04/Telethon,LonamiWebs/Telethon,expectocode/Telethon | telethon/tl/message_container.py | telethon/tl/message_container.py | from . import TLObject
from ..extensions import BinaryWriter
class MessageContainer(TLObject):
constructor_id = 0x73f1f8dc
# TODO Currently it's a bit of a hack, since the container actually holds
# messages (message id, sequence number, request body), not requests.
# Probably create a proper "Message" class
def __init__(self, session, requests):
super().__init__()
self.content_related = False
self.session = session
self.requests = requests
def on_send(self, writer):
writer.write_int(MessageContainer.constructor_id, signed=False)
writer.write_int(len(self.requests))
for x in self.requests:
x.request_msg_id = self.session.get_new_msg_id()
writer.write_long(x.request_msg_id)
writer.write_int(
self.session.generate_sequence(x.content_related)
)
packet = x.to_bytes()
writer.write_int(len(packet))
writer.write(packet)
def to_bytes(self):
# TODO Change this to delete the on_send from this class
with BinaryWriter() as writer:
self.on_send(writer)
return writer.get_bytes()
@staticmethod
def iter_read(reader):
reader.read_int(signed=False) # code
size = reader.read_int()
for _ in range(size):
inner_msg_id = reader.read_long()
inner_sequence = reader.read_int()
inner_length = reader.read_int()
yield inner_msg_id, inner_sequence, inner_length
| from . import TLObject
from ..extensions import BinaryWriter
class MessageContainer(TLObject):
constructor_id = 0x8953ad37
# TODO Currently it's a bit of a hack, since the container actually holds
# messages (message id, sequence number, request body), not requests.
# Probably create a proper "Message" class
def __init__(self, session, requests):
super().__init__()
self.content_related = False
self.session = session
self.requests = requests
def on_send(self, writer):
writer.write_int(0x73f1f8dc, signed=False)
writer.write_int(len(self.requests))
for x in self.requests:
x.request_msg_id = self.session.get_new_msg_id()
writer.write_long(x.request_msg_id)
writer.write_int(
self.session.generate_sequence(x.content_related)
)
packet = x.to_bytes()
writer.write_int(len(packet))
writer.write(packet)
def to_bytes(self):
# TODO Change this to delete the on_send from this class
with BinaryWriter() as writer:
self.on_send(writer)
return writer.get_bytes()
@staticmethod
def iter_read(reader):
reader.read_int(signed=False) # code
size = reader.read_int()
for _ in range(size):
inner_msg_id = reader.read_long()
inner_sequence = reader.read_int()
inner_length = reader.read_int()
yield inner_msg_id, inner_sequence, inner_length
| mit | Python |
68738a46d0cea8a46502f487af7d78533b0d771d | define __unicode__ for models | ZebraHat/AgoraD-MarketplaceUI,ZebraHat/AgoraD-MarketplaceUI | marketplace/models.py | marketplace/models.py | #-------------------------------------------------------------------------------
# Name: models
# Purpose: defines models for the system
# Usage: from models import *
#
# Author: Lumate, LLC
#
# Created: 10/25/2013
# Copyright: (c) Lumate, LLC
# Licence: Public/Private
#-------------------------------------------------------------------------------
# Django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django import forms
class Company(models.Model):
# contains many Sellers
name = models.CharField("company name", max_length=64)
#logo = models.ImageField("company logo", blank=True)
link = models.URLField("website link", blank=True)
def __unicode__(self):
return self.name
class User(models.Model):
name = models.CharField("real name", max_length=64, primary_key=True)
email = models.CharField("email address", max_length=128) # used as username to log in
password = forms.PasswordInput()
#class Meta:
# abstract = True
def __unicode__(self):
return self.name
class Buyer(User):
user_id = models.ForeignKey(User, related_name='+')
class Seller(User):
user_id = models.ForeignKey(User, related_name='+')
company = models.ForeignKey(Company)
class Sellable(models.Model):
CATEGORIES = ( # TBD; should sellers be able to add their own?
('ANALYTICS', 'Analytics'),
('INTERESTS', 'Interests'),
('LOCATIONS', 'Locations')
)
# Metadata
seller = models.ForeignKey(Seller)
data_location = models.CharField("data location", max_length=64)
creation_date = models.DateField("creation date")
# List information
title = models.CharField("listing title", max_length=256)
description = models.CharField("listing description", max_length=1024) # probably make textfield instead
for_sale_date = models.DateField("date to start selling") # allow sellers to put data for sale at a future date
category = models.CharField("data category", max_length=1, choices=CATEGORIES)
def data_size(self): # computes data size if json blob/text dump, does something else if url to database
pass
def save(self, *args, **kwargs):
# Whenever a Sellable is modified, queue it up to save *after* any pending transfers of it are finished
# todo
super(Sellable, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
class Transfer(models.Model):
seller = models.ForeignKey(Seller)
buyer = models.ForeignKey(User, related_name='+') # allow buyers and sellers to buy, but only sellers to sell
data = models.ForeignKey(Sellable)
transaction_queued = models.DateField("transfer initiated")
transaction_completed = models.DateField("transfer complete")
def __unicode__(self):
return "transfer from " + self.seller + " to " + self.buyer | #-------------------------------------------------------------------------------
# Name: models
# Purpose: defines models for the system
# Usage: from models import *
#
# Author: Lumate, LLC
#
# Created: 10/25/2013
# Copyright: (c) Lumate, LLC
# Licence: Public/Private
#-------------------------------------------------------------------------------
# Django imports
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django import forms
class Company(models.Model):
# contains many Sellers
name = models.CharField("company name", max_length=64)
#logo = models.ImageField("company logo", blank=True)
link = models.URLField("website link", blank=True)
class User(models.Model):
name = models.CharField("real name", max_length=64, primary_key=True)
email = models.CharField("email address", max_length=128) # used as username to log in
password = forms.PasswordInput()
#class Meta:
# abstract = True
class Buyer(User):
user_id = models.ForeignKey(User, related_name='+')
class Seller(User):
user_id = models.ForeignKey(User, related_name='+')
company = models.ForeignKey(Company)
class Sellable(models.Model):
CATEGORIES = ( # TBD; should sellers be able to add their own?
('ANALYTICS', 'Analytics'),
('INTERESTS', 'Interests'),
('LOCATIONS', 'Locations')
)
# Metadata
seller = models.ForeignKey(Seller)
data_location = models.CharField("data location", max_length=64)
creation_date = models.DateField("creation date")
# List information
title = models.CharField("listing title", max_length=256)
description = models.CharField("listing description", max_length=1024) # probably make textfield instead
for_sale_date = models.DateField("date to start selling") # allow sellers to put data for sale at a future date
category = models.CharField("data category", max_length=1, choices=CATEGORIES)
def data_size(self): # computes data size if json blob/text dump, does something else if url to database
pass
def save(self, *args, **kwargs):
# Whenever a Sellable is modified, queue it up to save *after* any pending transfers of it are finished
# todo
super(Sellable, self).save(*args, **kwargs)
class Transfer(models.Model):
seller = models.ForeignKey(Seller)
buyer = models.ForeignKey(User, related_name='+') # allow buyers and sellers to buy, but only sellers to sell
data = models.ForeignKey(Sellable)
transaction_queued = models.DateField("transfer initiated")
transaction_completed = models.DateField("transfer complete") | lgpl-2.1 | Python |
cab0687f30ccac731497a5c723ba4fb6baa60435 | Reorder import list. | styner9/korail2,styner9/korail2,littmus/korail2,littmus/korail2 | korail2/__init__.py | korail2/__init__.py | # -*- coding: utf-8 -*-
"""
korail2
~~~~~~~
Korail (www.letskorail.com) wrapper for Python.
:copyright: (c) 2014 by Taehoon Kim.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .korail2 import Korail, Passenger, AdultPassenger, ChildPassenger, SeniorPassenger, TrainType, ReserveOption
from .korail2 import KorailError, NeedToLoginError, SoldOutError, NoResultsError
__version__ = '0.0.6'
__all__ = ['Korail', 'Passenger', 'AdultPassenger', 'ChildPassenger', 'SeniorPassenger', 'TrainType', 'ReserveOption',
'KorailError', 'NeedToLoginError', 'SoldOutError', 'NoResultsError']
| # -*- coding: utf-8 -*-
"""
korail2
~~~~~~~
Korail (www.letskorail.com) wrapper for Python.
:copyright: (c) 2014 by Taehoon Kim.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .korail2 import Korail, AdultPassenger, Passenger, ChildPassenger, SeniorPassenger, TrainType, ReserveOption
from .korail2 import KorailError, NeedToLoginError, SoldOutError, NoResultsError
__version__ = '0.0.6'
__all__ = ['Korail', 'AdultPassenger', 'Passenger', 'ChildPassenger', 'SeniorPassenger', 'TrainType', 'ReserveOption',
'KorailError', 'NeedToLoginError', 'SoldOutError', 'NoResultsError']
| bsd-3-clause | Python |
f9ca473abf7aea3cc146badf2d45ae715f635aac | Use correct parameter for HOST and PORT | atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui | kqueen_ui/server.py | kqueen_ui/server.py | from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('HOST'),
port=int(app.config.get('PORT'))
)
| from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('KQUEEN_UI_HOST'),
port=int(app.config.get('KQUEEN_UI_PORT'))
)
| mit | Python |
66522b64f61da31e35cafbfbb16fd0c176c1dbf9 | Add test for registering multiple methods | proxama/zorp | zorp/tests/registry.py | zorp/tests/registry.py | """
Registry tests
"""
import unittest
from registry import Registry, schema_from_function
class TestRegistry(unittest.TestCase):
"""
Test the registry
"""
def setUp(self):
"""
Create a new registry
"""
self.registry = Registry()
self.name = "my func"
self.func = lambda x, y=1: (x + y) * 2
self.schema = schema_from_function(self.func)
self.name2 = "my other func"
self.func2 = lambda z: z * 2
self.schema2 = schema_from_function(self.func2)
def test_init(self):
"""
Check the registry is empty on initialisation
"""
self.assertEqual({}, self.registry.methods)
self.assertEqual({}, self.registry.schemas)
def test_put(self):
"""
Confirm that a function is successfully registered
"""
self.registry.put(self.name, self.func)
self.assertIn(self.name, self.registry.methods)
self.assertIn(self.name, self.registry.schemas)
self.assertEqual(self.func, self.registry.methods[self.name])
self.assertDictEqual(self.schema, self.registry.schemas[self.name])
def test_get(self):
"""
Confirm that getting a function works correctly
"""
self.registry.put(self.name, self.func)
(schema, func) = self.registry.get(self.name)
self.assertEqual(self.func, func)
self.assertDictEqual(self.schema, schema)
def test_multiple_funcs(self):
"""
Test the registry can successfully store multiple functions
"""
self.registry.put(self.name, self.func)
self.registry.put(self.name2, self.func2)
(schema, func) = self.registry.get(self.name)
(schema2, func2) = self.registry.get(self.name2)
self.assertEqual(self.func, func)
self.assertDictEqual(self.schema, schema)
self.assertEqual(self.func2, func2)
self.assertDictEqual(self.schema2, schema2)
| """
Registry tests
"""
import unittest
from registry import Registry, schema_from_function
class TestRegistry(unittest.TestCase):
"""
Test the registry
"""
def setUp(self):
"""
Create a new registry
"""
self.registry = Registry()
self.name = "my func"
self.func = lambda x, y=1: (x + y) * 2
self.schema = schema_from_function(self.func)
def test_init(self):
"""
Check the registry is empty on initialisation
"""
self.assertEqual({}, self.registry.methods)
self.assertEqual({}, self.registry.schemas)
def test_put(self):
"""
Confirm that a function is successfully registered
"""
self.registry.put(self.name, self.func)
self.assertIn(self.name, self.registry.methods)
self.assertIn(self.name, self.registry.schemas)
self.assertEqual(self.func, self.registry.methods[self.name])
self.assertDictEqual(self.schema, self.registry.schemas[self.name])
def test_get(self):
"""
Confirm that getting a function works correctly
"""
self.registry.put(self.name, self.func)
(schema, func) = self.registry.get(self.name)
self.assertEqual(self.func, func)
self.assertDictEqual(self.schema, schema)
| mit | Python |
329586768bc1f502f1e4cd7c41a9e956aa73aeaf | Make PEP8 happy | VirusTotal/misp-modules,MISP/misp-modules,Rafiot/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,Rafiot/misp-modules,MISP/misp-modules,amuehlem/misp-modules,VirusTotal/misp-modules,amuehlem/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules | misp_modules/modules/import_mod/stiximport.py | misp_modules/modules/import_mod/stiximport.py | import json
import base64
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({"values": [attrib.value], "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
| import json
import base64
from pymisp.tools import stix
misperrors = {'error': 'Error'}
userConfig = {}
inputSource = ['file']
moduleinfo = {'version': '0.2', 'author': 'Hannah Ward',
'description': 'Import some stix stuff',
'module-type': ['import']}
moduleconfig = []
def handler(q=False):
# Just in case we have no data
if q is False:
return False
# The return value
r = {'results': []}
# Load up that JSON
q = json.loads(q)
# It's b64 encoded, so decode that stuff
package = base64.b64decode(q.get("data")).decode('utf-8')
# If something really weird happened
if not package:
return json.dumps({"success": 0})
pkg = stix.load_stix(package)
for attrib in pkg.attributes:
r["results"].append({ "values" : [attrib.value] , "types": [attrib.type], "categories": [attrib.category]})
return r
def introspection():
modulesetup = {}
try:
userConfig
modulesetup['userConfig'] = userConfig
except NameError:
pass
try:
inputSource
modulesetup['inputSource'] = inputSource
except NameError:
pass
return modulesetup
def version():
moduleinfo['config'] = moduleconfig
return moduleinfo
| agpl-3.0 | Python |
de962f504db139500573457264a3dd1e257e8cc0 | Allow decorator to be called with optional args | fatboystring/Wagtail-MVC,fatboystring/Wagtail-MVC | wagtail_mvc/decorators.py | wagtail_mvc/decorators.py | # -*- coding: utf-8 -*-
"""
wagtail_mvc decorators
"""
from __future__ import unicode_literals
def wagtail_mvc_url(*decorator_args, **decorator_kwargs):
"""
Decorates an existing method responsible for generating a url
prepends the parent url to the generated url to account for
:param func: The method to decorate
:return: Full url
"""
def decorator(func):
def outer(self, *args, **kwargs):
parent_attr = decorator_kwargs.get('parent_attr')
if parent_attr:
parent = getattr(self, parent_attr, None)
else:
parent = self.get_parent()
parts = parent.url.split('/')
parts += func(self, *args, **kwargs).split('/')
return '/{0}/'.format('/'.join([part for part in parts if part]))
return outer
if len(decorator_args) == 1 and callable(decorator_args[0]):
# We assume the decorator function has not been called
# or passed any arguments and return the result of calling
# the decorator function
return decorator(decorator_args[0])
return decorator
| # -*- coding: utf-8 -*-
"""
wagtail_mvc decorators
"""
from __future__ import unicode_literals
def wagtail_mvc_url(func):
"""
Decorates an existing method responsible for generating a url
prepends the parent url to the generated url to account for
:param func: The method to decorate
:return: Full url
"""
def outer(self, *args, **kwargs):
parts = self.get_parent().url.split('/')
parts += func(self, *args, **kwargs).split('/')
return '/{0}/'.format('/'.join([part for part in parts if part]))
return outer
| mit | Python |
a67b97d1ae3917fb228c0576fb30c8098f5443c6 | make random return PageInfos instead of PageIdentifiers | mahmoud/wapiti | wapiti/operations/rand.py | wapiti/operations/rand.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from base import QueryOperation, QueryLimit
from params import StaticParam
from models import PageInfo
from utils import OperationExample
class GetRandom(QueryOperation):
"""
Fetch random pages using MediaWiki's Special:Random.
"""
field_prefix = 'grn'
fields = [StaticParam('generator', 'random'),
StaticParam('prop', 'info'),
StaticParam('inprop', 'subjectid|talkid|protection')]
input_field = None
output_type = [PageInfo]
per_query_limit = QueryLimit(10, 20)
examples = [OperationExample('basic random')]
def extract_results(self, query_resp):
ret = []
for k, pid_dict in query_resp['pages'].iteritems():
page_info = PageInfo.from_query(pid_dict,
source=self.source)
ret.append(page_info)
return ret
def get_cont_str(self, *a, **kw):
return ''
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from base import QueryOperation, QueryLimit
from params import StaticParam
from models import PageIdentifier
from utils import OperationExample
class GetRandom(QueryOperation):
"""
Fetch random pages using MediaWiki's Special:Random.
"""
field_prefix = 'grn'
fields = [StaticParam('generator', 'random'),
StaticParam('prop', 'info'),
StaticParam('inprop', 'subjectid|talkid|protection')]
input_field = None
output_type = [PageIdentifier]
per_query_limit = QueryLimit(10, 20)
examples = [OperationExample('basic random')]
def extract_results(self, query_resp):
ret = []
for k, pid_dict in query_resp['pages'].iteritems():
try:
page_ident = PageIdentifier.from_query(pid_dict,
source=self.source)
except ValueError:
continue
ret.append(page_ident)
return ret
def get_cont_str(self, *a, **kw):
return ''
| bsd-3-clause | Python |
b549bc3e46d309efa31a65160f7091c4869f34d2 | Enable debug | ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo | web/web/settings/arnes.py | web/web/settings/arnes.py | from common import *
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = True
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ['www.projekt-tomo.si']
WSGI_APPLICATION = 'web.wsgi.dev.application'
INSTALLED_APPS += (
'shibboleth',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'tomo',
'USER': 'tomo',
'PASSWORD': 'tomo',
'HOST': 'db',
'PORT': '',
}
}
STATIC_ROOT = '/home/tomo/projekt-tomo/web/static'
STATIC_URL = '/static/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'shibboleth.middleware.ShibbolethRemoteUserMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware'
)
SHIBBOLETH_ATTRIBUTE_MAP = {
"shib-user": (True, "username"),
"shib-given-name": (True, "first_name"),
"shib-sn": (True, "last_name"),
"shib-mail": (False, "email"),
}
LOGIN_REDIRECT_URL = '/'
SUBMISSION_URL = 'https://www.projekt-tomo.si'
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'social.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend',
'shibboleth.backends.ShibbolethRemoteUserBackend',
)
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY']
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ['SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET']
SOCIAL_AUTH_FACEBOOK_KEY = os.environ['SOCIAL_AUTH_FACEBOOK_KEY']
SOCIAL_AUTH_FACEBOOK_SECRET = os.environ['SOCIAL_AUTH_FACEBOOK_SECRET']
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_USER_MODEL = 'users.User'
| from common import *
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ['www.projekt-tomo.si']
WSGI_APPLICATION = 'web.wsgi.dev.application'
INSTALLED_APPS += (
'shibboleth',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'tomo',
'USER': 'tomo',
'PASSWORD': 'tomo',
'HOST': 'db',
'PORT': '',
}
}
STATIC_ROOT = '/home/tomo/projekt-tomo/web/static'
STATIC_URL = '/static/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'shibboleth.middleware.ShibbolethRemoteUserMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'simple_history.middleware.HistoryRequestMiddleware'
)
SHIBBOLETH_ATTRIBUTE_MAP = {
"shib-user": (True, "username"),
"shib-given-name": (True, "first_name"),
"shib-sn": (True, "last_name"),
"shib-mail": (False, "email"),
}
LOGIN_REDIRECT_URL = '/'
SUBMISSION_URL = 'https://www.projekt-tomo.si'
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'social.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend',
'shibboleth.backends.ShibbolethRemoteUserBackend',
)
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = os.environ['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY']
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = os.environ['SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET']
SOCIAL_AUTH_FACEBOOK_KEY = os.environ['SOCIAL_AUTH_FACEBOOK_KEY']
SOCIAL_AUTH_FACEBOOK_SECRET = os.environ['SOCIAL_AUTH_FACEBOOK_SECRET']
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
SOCIAL_AUTH_USER_MODEL = 'users.User'
| agpl-3.0 | Python |
232bc2bb83190482c1125ca5879ffb6f11d67b40 | Fix logging for testing environment | dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server | puzzlehunt_server/settings/travis_settings.py | puzzlehunt_server/settings/travis_settings.py | from .base_settings import *
import os
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
} | from .base_settings import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
| mit | Python |
cc911a308e70637b491943d8d4ff3531f30b0880 | Set beta_features to True. | rowhit/h2o-2,vbelakov/h2o,100star/h2o,eg-zhang/h2o-2,elkingtonmcb/h2o-2,elkingtonmcb/h2o-2,calvingit21/h2o-2,111t8e/h2o-2,h2oai/h2o,111t8e/h2o-2,100star/h2o,100star/h2o,h2oai/h2o-2,eg-zhang/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,111t8e/h2o-2,111t8e/h2o-2,eg-zhang/h2o-2,h2oai/h2o-2,h2oai/h2o,h2oai/h2o-2,111t8e/h2o-2,vbelakov/h2o,h2oai/h2o,h2oai/h2o,calvingit21/h2o-2,elkingtonmcb/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,100star/h2o,eg-zhang/h2o-2,vbelakov/h2o,rowhit/h2o-2,rowhit/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,rowhit/h2o-2,rowhit/h2o-2,eg-zhang/h2o-2,calvingit21/h2o-2,h2oai/h2o,eg-zhang/h2o-2,100star/h2o,vbelakov/h2o,vbelakov/h2o,h2oai/h2o-2,vbelakov/h2o,100star/h2o,rowhit/h2o-2,h2oai/h2o-2,100star/h2o,calvingit21/h2o-2,eg-zhang/h2o-2,eg-zhang/h2o-2,100star/h2o,111t8e/h2o-2,elkingtonmcb/h2o-2,vbelakov/h2o,h2oai/h2o-2,100star/h2o,h2oai/h2o,eg-zhang/h2o-2,calvingit21/h2o-2,rowhit/h2o-2,rowhit/h2o-2,rowhit/h2o-2,calvingit21/h2o-2,calvingit21/h2o-2,vbelakov/h2o,calvingit21/h2o-2,h2oai/h2o,111t8e/h2o-2,eg-zhang/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,vbelakov/h2o,vbelakov/h2o,h2oai/h2o,h2oai/h2o,rowhit/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o,elkingtonmcb/h2o-2 | py/testdir_single_jvm/rf_VA_simple_example.py | py/testdir_single_jvm/rf_VA_simple_example.py | import sys
import json
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_import as h2i
#
# This is intended to be the simplest possible RF example.
# Look at sandbox/commands.log for REST API requests to H2O.
#
print "--------------------------------------------------------------------------------"
print "BUILDING CLOUD"
print "--------------------------------------------------------------------------------"
h2o.parse_our_args()
h2o.build_cloud(node_count=2, java_heap_GB=2)
# False == Use VA form of algorithms (when available) (e.g. RF1).
# True == Use FVec form of algorithm (e.g. DRF2).
h2o.beta_features = True
print "--------------------------------------------------------------------------------"
print "PARSING DATASET"
print "--------------------------------------------------------------------------------"
#
# What this really ends up doing is a REST API PostFile.json request.
#
csvPathname = 'iris/iris2.csv'
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
print "--------------------------------------------------------------------------------"
print "RUNNING RF"
print "--------------------------------------------------------------------------------"
#
# For valid kwargs, look at h2o.py random_forest() params_dict variable.
# beta_features==False means Value Array (e.g. RF1).
# beta_features==True means Fluid Vec (e.g. DRF2).
#
timeoutSecs = 20
if (h2o.beta_features):
kwargs = {"ntrees": 6}
else:
kwargs = {"ntree": 6}
rf_json_response = h2o_cmd.runRF(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
print json.dumps(rf_json_response, indent=4)
print "--------------------------------------------------------------------------------"
print "SHUTTING DOWN"
print "--------------------------------------------------------------------------------"
h2o.check_sandbox_for_errors()
h2o.tear_down_cloud()
| import sys
import json
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_import as h2i
#
# This is intended to be the simplest possible RF example.
# Look at sandbox/commands.log for REST API requests to H2O.
#
print "--------------------------------------------------------------------------------"
print "BUILDING CLOUD"
print "--------------------------------------------------------------------------------"
h2o.parse_our_args()
h2o.build_cloud(node_count=2, java_heap_GB=2)
# False == Use VA form of algorithms (when available) (e.g. RF1).
# True == Use FVec form of algorithm (e.g. DRF2).
h2o.beta_features = False
print "--------------------------------------------------------------------------------"
print "PARSING DATASET"
print "--------------------------------------------------------------------------------"
#
# What this really ends up doing is a REST API PostFile.json request.
#
csvPathname = 'iris/iris2.csv'
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
print "--------------------------------------------------------------------------------"
print "RUNNING RF"
print "--------------------------------------------------------------------------------"
#
# For valid kwargs, look at h2o.py random_forest() params_dict variable.
# beta_features==False means Value Array (e.g. RF1).
# beta_features==True means Fluid Vec (e.g. DRF2).
#
timeoutSecs = 20
if (h2o.beta_features):
kwargs = {"ntrees": 6}
else:
kwargs = {"ntree": 6}
rf_json_response = h2o_cmd.runRF(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
print json.dumps(rf_json_response, indent=4)
print "--------------------------------------------------------------------------------"
print "SHUTTING DOWN"
print "--------------------------------------------------------------------------------"
h2o.check_sandbox_for_errors()
h2o.tear_down_cloud()
| apache-2.0 | Python |
dd66c117b938e2a908a915f6da3b1483bf7a9054 | Improve help plugin | woohgit/will,skoczen/will,chillipeper/will,skoczen/will,chillipeper/will,chillipeper/will,woohgit/will,woohgit/will,wontonst/will,wontonst/will,mike-love/will,mike-love/will,wontonst/will,mike-love/will,skoczen/will | will/plugins/help/help.py | will/plugins/help/help.py | from will.plugin import WillPlugin
from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template
class HelpPlugin(WillPlugin):
@respond_to("^help(?: (?P<plugin>.*))?$")
def help(self, message, plugin=None):
"""help: the normal help you're reading."""
# help_data = self.load("help_files")
selected_modules = help_modules = self.load("help_modules")
self.say("Sure thing, %s." % message.sender.nick, message=message)
help_text = "Here's what I know how to do:"
if plugin and help_modules.has_key(plugin):
help_text = "Here's what I know how to do about %s:" % plugin
selected_modules = dict()
selected_modules[plugin] = help_modules[plugin]
for k in sorted(selected_modules, key=lambda x: x[0]):
help_data = selected_modules[k]
if help_data and len(help_data) > 0:
help_text += "<br/><br/><b>%s</b>:" % k
for line in help_data:
if line:
if ":" in line:
line = " <b>%s</b>%s" % (line[:line.find(":")], line[line.find(":"):])
help_text += "<br/> %s" % line
self.say(help_text, message=message, html=True)
| from will.plugin import WillPlugin
from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template
class HelpPlugin(WillPlugin):
@respond_to("^help$")
def help(self, message):
"""help: the normal help you're reading."""
# help_data = self.load("help_files")
help_modules = self.load("help_modules")
self.say("Sure thing, %s." % message.sender.nick, message=message)
help_text = "Here's what I know how to do:"
for k in sorted(help_modules, key=lambda x: x[0]):
help_data = help_modules[k]
if help_data and len(help_data) > 0:
help_text += "<br/><br/><b>%s</b>:" % k
for line in help_data:
if line:
if ":" in line:
line = " <b>%s</b>%s" % (line[:line.find(":")], line[line.find(":"):])
help_text += "<br/> %s" % line
self.say(help_text, message=message, html=True)
| mit | Python |
6ce5a8d1a2ea881592a264b7597536cddf07a00d | Fix a bug in check_secret | ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo | web/problems/models.py | web/problems/models.py | import json
from django.core.exceptions import ValidationError
from django.db import models
def shorten(s, max_length=50):
if len(s) < max_length:
return s
else:
return u'{0}...'.format(s[:50])
def is_json_string_list(s):
try:
val = json.loads(s)
except ValueError:
raise ValidationError('Not a JSON value.')
if type(val) is not list:
raise ValidationError('Not a JSON list.')
for x in val:
if type(x) is not unicode:
raise ValidationError('Not a JSON list of strings.')
class Problem(models.Model):
title = models.CharField(max_length=70)
description = models.TextField(blank=True)
def __unicode__(self):
return self.title
class Part(models.Model):
problem = models.ForeignKey(Problem, related_name='parts')
description = models.TextField(blank=True)
solution = models.TextField(blank=True)
validation = models.TextField(blank=True)
secret = models.TextField(default="[]", validators=[is_json_string_list])
class Meta:
order_with_respect_to = 'problem'
def __unicode__(self):
description = shorten(self.description)
return u'{0}/#{1:06d} ({2})'.format(self.problem, self.id, description)
def check_secret(self, secret):
'''
Checks whether a submitted secret corresponds to the official one.
The function accepts a secret (list of strings) and returns the pair:
True, None -- if secret matches the official one
False, None -- if secret has an incorrect length
False, i -- if secret first differs from the official one at index i
'''
official_secret = json.loads(self.secret)
if len(official_secret) != len(secret):
return False, None
for s1, (i, s2) in zip(official_secret, enumerate(secret)):
if s1 != s2:
return False, i
return True, None
| import json
from django.core.exceptions import ValidationError
from django.db import models
def shorten(s, max_length=50):
if len(s) < max_length:
return s
else:
return u'{0}...'.format(s[:50])
def is_json_string_list(s):
try:
val = json.loads(s)
except ValueError:
raise ValidationError('Not a JSON value.')
if type(val) is not list:
raise ValidationError('Not a JSON list.')
for x in val:
if type(x) is not unicode:
raise ValidationError('Not a JSON list of strings.')
class Problem(models.Model):
title = models.CharField(max_length=70)
description = models.TextField(blank=True)
def __unicode__(self):
return self.title
class Part(models.Model):
problem = models.ForeignKey(Problem, related_name='parts')
description = models.TextField(blank=True)
solution = models.TextField(blank=True)
validation = models.TextField(blank=True)
secret = models.TextField(default="[]", validators=[is_json_string_list])
class Meta:
order_with_respect_to = 'problem'
def __unicode__(self):
description = shorten(self.description)
return u'{0}/#{1:06d} ({2})'.format(self.problem, self.id, description)
def check_secret(self, secret):
'''
Checks whether a submitted secret corresponds to the official one.
The function accepts a secret (list of strings) and returns the pair:
True, None -- if secret matches the official one
False, None -- if secret has an incorrect length
False, i -- if secret first differs from the official one at index i
'''
official_secret = json.loads(self.secret)
if len(official_secret) != len(secret):
return False, None
for s1, (s2, i) in zip(official_secret, enumerate(secret)):
if s1 != s2:
return False, i
return True, None
| agpl-3.0 | Python |
ec79afeb0373e0155840bef4dcb84020bcc46542 | Fix bug in read_write_artifact_cache. | tdyas/pants,jessrosenfield/pants,cevaris/pants,wisechengyi/pants,TansyArron/pants,fkorotkov/pants,landism/pants,dturner-tw/pants,pantsbuild/pants,peiyuwang/pants,fkorotkov/pants,pombredanne/pants,manasapte/pants,15Dkatz/pants,manasapte/pants,landism/pants,benjyw/pants,square/pants,scode/pants,jessrosenfield/pants,di0spyr0s/pants,areitz/pants,ity/pants,mateor/pants,ericzundel/pants,mateor/pants,twitter/pants,dgomez10/xanon,qma/pants,foursquare/pants,di0spyr0s/pants,cevaris/pants,landism/pants,dgomez10/xanon,peiyuwang/pants,ity/pants,tejal29/pants,ericzundel/pants,fkorotkov/pants,megaserg/pants,megaserg/pants,landism/pants,dgomez10/xanon,wisechengyi/pants,kslundberg/pants,ity/pants,dturner-tw/pants,15Dkatz/pants,jtrobec/pants,mateor/pants,scode/pants,lahosken/pants,foursquare/pants,ity/pants,scode/pants,laurentgo/pants,dbentley/pants,pgroudas/pants,tdyas/pants,tejal29/pants,TansyArron/pants,pantsbuild/pants,cevaris/pants,twitter/pants,TansyArron/pants,foursquare/pants,laurentgo/pants,landism/pants,jessrosenfield/pants,baroquebobcat/pants,dgomez10/xanon,UnrememberMe/pants,TansyArron/pants,areitz/pants,manasapte/pants,sameerparekh/pants,areitz/pants,kwlzn/pants,ity/pants,dturner-tw/pants,mateor/pants,kslundberg/pants,jsirois/pants,pombredanne/pants,qma/pants,foursquare/pants,gmalmquist/pants,slyphon/pants,wisechengyi/pants,15Dkatz/pants,twitter/pants,digwanderlust/pants,15Dkatz/pants,twitter/pants,TansyArron/pants,megaserg/pants,TansyArron/pants,lahosken/pants,ity/pants,lahosken/pants,megaserg/pants,di0spyr0s/pants,Gabriel439/pants,wisechengyi/pants,slyphon/pants,ericzundel/pants,ericzundel/pants,di0spyr0s/pants,dturner-tw/pants,pgroudas/pants,jsirois/pants,qma/pants,digwanderlust/pants,pantsbuild/pants,peiyuwang/pants,ericzundel/pants,dbentley/pants,areitz/pants,UnrememberMe/pants,square/pants,sameerparekh/pants,twitter/pants,pombredanne/pants,tejal29/pants,digwanderlust/pants,megaserg/pants,benjyw/pants,pgroudas/pants,dturner-tw/pants,pantsbuild/pants,peiyuwang/pants,cevaris/pants,baroquebobcat/pants,slyphon/pants,ity/pants,pantsbuild/pants,kslundberg/pants,UnrememberMe/pants,manasapte/pants,UnrememberMe/pants,laurentgo/pants,gmalmquist/pants,benjyw/pants,slyphon/pants,dbentley/pants,sid-kap/pants,jessrosenfield/pants,sid-kap/pants,TansyArron/pants,qma/pants,tdyas/pants,tdyas/pants,benjyw/pants,foursquare/pants,baroquebobcat/pants,UnrememberMe/pants,Gabriel439/pants,ericzundel/pants,fkorotkov/pants,pombredanne/pants,megaserg/pants,kslundberg/pants,dbentley/pants,Gabriel439/pants,areitz/pants,jtrobec/pants,ericzundel/pants,wisechengyi/pants,scode/pants,peiyuwang/pants,areitz/pants,lahosken/pants,jtrobec/pants,slyphon/pants,laurentgo/pants,wisechengyi/pants,sid-kap/pants,lahosken/pants,15Dkatz/pants,dgomez10/xanon,mateor/pants,baroquebobcat/pants,baroquebobcat/pants,sameerparekh/pants,landism/pants,foursquare/pants,UnrememberMe/pants,fkorotkov/pants,tdyas/pants,pgroudas/pants,laurentgo/pants,dbentley/pants,dturner-tw/pants,lahosken/pants,jtrobec/pants,wisechengyi/pants,benjyw/pants,sid-kap/pants,cevaris/pants,mateor/pants,qma/pants,di0spyr0s/pants,baroquebobcat/pants,mateor/pants,UnrememberMe/pants,sid-kap/pants,dgomez10/xanon,tdyas/pants,digwanderlust/pants,lahosken/pants,cevaris/pants,jessrosenfield/pants,landism/pants,kwlzn/pants,kslundberg/pants,UnrememberMe/pants,dgomez10/xanon,peiyuwang/pants,fkorotkov/pants,lahosken/pants,15Dkatz/pants,jtrobec/pants,di0spyr0s/pants,di0spyr0s/pants,megaserg/pants,qma/pants,dturner-tw/pants,kwlzn/pants,pombredanne/pants,gmalmquist/pants,mateor/pants,manasapte/pants,baroquebobcat/pants,gmalmquist/pants,dbentley/pants,wisechengyi/pants,foursquare/pants,dgomez10/xanon,sid-kap/pants,square/pants,qma/pants,baroquebobcat/pants,pgroudas/pants,manasapte/pants,Gabriel439/pants,sameerparekh/pants,Gabriel439/pants,UnrememberMe/pants,baroquebobcat/pants,foursquare/pants,sameerparekh/pants,peiyuwang/pants,jsirois/pants,digwanderlust/pants,peiyuwang/pants,15Dkatz/pants,pantsbuild/pants,kslundberg/pants,sid-kap/pants,pgroudas/pants,tejal29/pants,slyphon/pants,scode/pants,sameerparekh/pants,manasapte/pants,Gabriel439/pants,dbentley/pants,pantsbuild/pants,benjyw/pants,twitter/pants,tejal29/pants,gmalmquist/pants,kwlzn/pants,cevaris/pants,15Dkatz/pants,fkorotkov/pants,gmalmquist/pants,pombredanne/pants,digwanderlust/pants,dgomez10/xanon,kslundberg/pants,twitter/pants,square/pants,Gabriel439/pants,tdyas/pants,twitter/pants,kwlzn/pants,ericzundel/pants,jessrosenfield/pants,foursquare/pants,twitter/pants,jtrobec/pants,scode/pants,fkorotkov/pants,slyphon/pants,jtrobec/pants,areitz/pants,tdyas/pants,gmalmquist/pants,jessrosenfield/pants,kwlzn/pants,laurentgo/pants,dgomez10/xanon,kwlzn/pants,landism/pants,benjyw/pants,wisechengyi/pants,sameerparekh/pants,dgomez10/xanon,tdyas/pants,pombredanne/pants,scode/pants | src/python/twitter/pants/cache/read_write_artifact_cache.py | src/python/twitter/pants/cache/read_write_artifact_cache.py | from twitter.pants.cache.artifact_cache import ArtifactCache
class ReadWriteArtifactCache(ArtifactCache):
"""An artifact cache that delegates to one cache for reading and another for writing.
The name is slightly misleading: all caches are read-write. But I couldn't think
of a better one.
"""
def __init__(self, read_artifact_cache, write_artifact_cache):
"""Either cache can be None, in which case we don't read from/write to it."""
artifact_roots = []
logs = []
def get_root_and_log(cache):
if cache is not None:
artifact_roots.append(cache.artifact_root)
logs.append(cache.log)
get_root_and_log(read_artifact_cache)
get_root_and_log(write_artifact_cache)
if len(artifact_roots) == 0:
# Parent will never be accessed, so this is OK. In fact, it's a good way to ensure it.
artifact_root = None
log = None
else:
artifact_root = artifact_roots[0]
log = logs[0]
if len(artifact_roots) > 1 and artifact_roots[1] != artifact_root:
raise ValueError('Read and write artifact caches must have the same artifact root.')
ArtifactCache.__init__(self, log, artifact_root)
self._read_artifact_cache = read_artifact_cache
self._write_artifact_cache = write_artifact_cache
def insert(self, cache_key, paths):
if self._write_artifact_cache:
self._write_artifact_cache.insert(cache_key, paths)
def has(self, cache_key):
if self._read_artifact_cache:
return self._read_artifact_cache.has(cache_key)
else:
return False
def use_cached_files(self, cache_key):
if self._read_artifact_cache:
return self._read_artifact_cache.use_cached_files(cache_key)
else:
return None
def delete(self, cache_key):
if self._write_artifact_cache:
self._write_artifact_cache.delete(cache_key)
def prune(self, age_hours):
if self._write_artifact_cache:
self._write_artifact_cache.prune(age_hours)
| from twitter.pants.cache.artifact_cache import ArtifactCache
class ReadWriteArtifactCache(ArtifactCache):
"""An artifact cache that delegates to one cache for reading and another for writing.
The name is slightly misleading: all caches are read-write. But I couldn't think
of a better one.
"""
def __init__(self, read_artifact_cache, write_artifact_cache):
"""Either cache can be None, in which case we don't read from/write to it."""
artifact_root = read_artifact_cache.artifact_root
if write_artifact_cache.artifact_root != artifact_root:
raise ValueError('Read and write artifact caches must have the same artifact root.')
ArtifactCache.__init__(self, read_artifact_cache.log, artifact_root)
self._read_artifact_cache = read_artifact_cache
self._write_artifact_cache = write_artifact_cache
def insert(self, cache_key, paths):
if self._write_artifact_cache:
self._write_artifact_cache.insert(cache_key, paths)
def has(self, cache_key):
if self._read_artifact_cache:
return self._read_artifact_cache.has(cache_key)
else:
return False
def use_cached_files(self, cache_key):
if self._read_artifact_cache:
return self._read_artifact_cache.use_cached_files(cache_key)
else:
return None
def delete(self, cache_key):
if self._write_artifact_cache:
self._write_artifact_cache.delete(cache_key)
def prune(self, age_hours):
if self._write_artifact_cache:
self._write_artifact_cache.prune(age_hours)
| apache-2.0 | Python |
18d8f09795de8e7315f4f5fc63f0e7caf23bfaa4 | Refactor cleanse_csv_data | HackBrexit/MinistersUnderTheInfluence,HackBrexit/MinistersUnderTheInfluence,HackBrexit/MinistersUnderTheInfluence,HackBrexit/MinistersUnderTheInfluence,HackBrexit/MinistersUnderTheInfluence,HackBrexit/MinistersUnderTheInfluence | src/python/datapreprocessor/datapreprocessor/csvcleanser.py | src/python/datapreprocessor/datapreprocessor/csvcleanser.py | UNWANTED_SPECIAL_CHARS = "".join([
'\xb1', '\xb2'
])
def is_row_empty(row):
return not any(row)
def remove_excess_columns_from_row(row):
return row[:4]
def remove_extra_whitespace_from_row(row):
return [cell.strip() for cell in row]
def remove_special_chars_from_row(row):
return [cell.translate(None, UNWANTED_SPECIAL_CHARS) for cell in row]
def is_row_boilerplate(row):
return (
(row[0] == 'Minister' and row[1] == 'Date') or
row[0] == 'Note' or
'Does not normally include' in row[0]
)
def cleanse_row(row):
row_data = remove_excess_columns_from_row(row)
row_data = remove_special_chars_from_row(row_data)
row_data = remove_extra_whitespace_from_row(row_data)
if is_row_empty(row_data) or is_row_boilerplate(row_data):
return None
return row_data
def cleanse_csv_data(file_contents):
return filter(None, (cleanse_row(row) for row in file_contents))
| UNWANTED_SPECIAL_CHARS = "".join([
'\xb1', '\xb2'
])
def is_row_empty(row):
return not any(row)
def remove_empty_lines(rows):
return [row for row in rows if not is_row_empty(row)]
def remove_excess_columns_from_row(row):
return row[:4]
def remove_empty_columns(rows):
return [remove_excess_columns_from_row(row) for row in rows]
def remove_extra_whitespace_from_row(row):
return [cell.strip() for cell in row]
def remove_whitespace(rows):
return [remove_extra_whitespace_from_row(row) for row in rows]
def remove_special_chars_from_row(row):
return [cell.translate(None, UNWANTED_SPECIAL_CHARS) for cell in row]
def remove_special_chars(rows):
return [remove_special_chars_from_row(row) for row in rows]
def remove_line(predicate, lines):
return [l for l in lines if not predicate(l)]
def is_row_boilerplate(row):
return (
(row[0] == 'Minister' and row[1] == 'Date') or
row[0] == 'Note' or
'Does not normally include' in row[0]
)
def remove_boilerplate(rows):
return remove_line(is_row_boilerplate, rows)
def cleanse_csv_data(file_contents):
file_contents = remove_empty_lines(file_contents)
file_contents = remove_empty_columns(file_contents)
file_contents = remove_whitespace(file_contents)
file_contents = remove_special_chars(file_contents)
file_contents = remove_boilerplate(file_contents)
return file_contents
| mit | Python |
15206298edfd988922654293e1c4cc91cd687044 | Revert [8799]. That wasn't ready for prime-time yet -- thanks, git-svn! | hybrideagle/django,jeezybrick/django,jgoclawski/django,craynot/django,taaviteska/django,barbuza/django,gohin/django,YangSongzhou/django,gchp/django,sadaf2605/django,tragiclifestories/django,henryfjordan/django,akaihola/django,andela-ooladayo/django,akaariai/django,helenst/django,alexallah/django,MarcJoan/django,jenalgit/django,MarcJoan/django,hcsturix74/django,TimBuckley/effective_django,salamer/django,felixjimenez/django,django/django,ar45/django,archen/django,frePPLe/django,rynomster/django,georgemarshall/django,jylaxp/django,gengue/django,Nepherhotep/django,davgibbs/django,stewartpark/django,MoritzS/django,taaviteska/django,ajaali/django,sopier/django,avanov/django,xrmx/django,Korkki/django,rsvip/Django,shacker/django,gchp/django,webgeodatavore/django,shaistaansari/django,mcardillo55/django,sgzsh269/django,vincepandolfo/django,Endika/django,druuu/django,andela-ifageyinbo/django,follow99/django,tanmaythakur/django,saydulk/django,HonzaKral/django,sgzsh269/django,leereilly/django-1,andresgz/django,ziima/django,chyeh727/django,marissazhou/django,shaib/django,rmboggs/django,mcrowson/django,syaiful6/django,mattseymour/django,NullSoldier/django,jenalgit/django,carljm/django,zanderle/django,feroda/django,seocam/django,wweiradio/django,henryfjordan/django,ironbox360/django,elky/django,extremewaysback/django,scorphus/django,davgibbs/django,mcella/django,MikeAmy/django,JorgeCoock/django,simone/django-gb,GaussDing/django,RossBrunton/django,jvkops/django,jasonwzhy/django,beni55/django,wsmith323/django,yigitguler/django,solarissmoke/django,ojengwa/django-1,MarkusH/django,gdi2290/django,divio/django,EmadMokhtar/Django,elkingtonmcb/django,phalt/django,lisael/pg-django,Korkki/django,double-y/django,helenst/django,yakky/django,delhivery/django,EliotBerriot/django,HousekeepLtd/django,willharris/django,marqueedev/django,GhostThrone/django,matiasb/django,coldmind/django,mitya57/django,fafaman/django,MounirMesselmeni/django,ghickman/django,kennethlove/django,peterlauri/django,rtindru/django,hnakamur/django,follow99/django,bspink/django,Nepherhotep/django,savoirfairelinux/django,github-account-because-they-want-it/django,quxiaolong1504/django,fpy171/django,charettes/django,avneesh91/django,avanov/django,Anonymous-X6/django,payeldillip/django,reinout/django,jdelight/django,eyohansa/django,akaariai/django,gannetson/django,jyotsna1820/django,MikeAmy/django,gdub/django,mttr/django,jpic/django,h4r5h1t/django-hauthy,riklaunim/django-custom-multisite,ABaldwinHunter/django-clone,nhippenmeyer/django,akaihola/django,ryanahall/django,mrfuxi/django,hynekcer/django,mshafiq9/django,jpic/django,bobcyw/django,marcelocure/django,Yong-Lee/django,anant-dev/django,rapilabs/django,elkingtonmcb/django,vitaly4uk/django,yask123/django,nielsvanoch/django,JorgeCoock/django,frankvdp/django,x111ong/django,BrotherPhil/django,jgoclawski/django,ckirby/django,HonzaKral/django,dbaxa/django,bikong2/django,mttr/django,huang4fstudio/django,gdi2290/django,dpetzold/django,bspink/django,zhoulingjun/django,rwillmer/django,himleyb85/django,yewang15215/django,treyhunner/django,oinopion/django,duqiao/django,robhudson/django,dfdx2/django,ar45/django,AndrewGrossman/django,gchp/django,x111ong/django,tayfun/django,edmorley/django,claudep/django,ericfc/django,jdelight/django,ziima/django,beckastar/django,jhg/django,jmcarp/django,EliotBerriot/django,andela-ooladayo/django,techdragon/django,lwiecek/django,abomyi/django,rmboggs/django,frishberg/django,pquentin/django,DasIch/django,follow99/django,Proggie02/TestRepo,jylaxp/django,Adnn/django,rwillmer/django,cainmatt/django,RevelSystems/django,jgoclawski/django,programadorjc/django,quamilek/django,charettes/django,intgr/django,DasIch/django,ironbox360/django,chrishas35/django-travis-ci,jvkops/django,mmardini/django,imtapps/django-imt-fork,Matt-Deacalion/django,redhat-openstack/django,gchp/django,dgladkov/django,ataylor32/django,caotianwei/django,auvipy/django,sadaf2605/django,camilonova/django,kevintaw/django,delinhabit/django,moreati/django,risicle/django,dsanders11/django,mattrobenolt/django,ptoraskar/django,varunnaganathan/django,hackerbot/DjangoDev,labcodes/django,varunnaganathan/django,takeshineshiro/django,googleinterns/django,druuu/django,avneesh91/django,mattseymour/django,shownomercy/django,takis/django,hellhovnd/django,HousekeepLtd/django,ojengwa/django-1,archen/django,ajaali/django,blindroot/django,mitar/django,gohin/django,daniponi/django,blindroot/django,bikong2/django,baylee/django,b-me/django,yask123/django,aspidites/django,kisna72/django,elena/django,craynot/django,sarthakmeh03/django,nemesisdesign/django,epandurski/django,quxiaolong1504/django,hkchenhongyi/django,jrrembert/django,ericfc/django,oberlin/django,dsanders11/django,theo-l/django,quamilek/django,pelme/django,guettli/django,iambibhas/django,hcsturix74/django,frdb194/django,z0by/django,bak1an/django,tragiclifestories/django,sbellem/django,extremewaysback/django,scorphus/django,Anonymous-X6/django,zanderle/django,x111ong/django,sephii/django,felixjimenez/django,darkryder/django,anant-dev/django,ytjiang/django,hobarrera/django,makinacorpus/django,eyohansa/django,django-nonrel/django,jhg/django,gannetson/django,rynomster/django,xadahiya/django,mcella/django,willharris/django,ASCrookes/django,beck/django,h4r5h1t/django-hauthy,gdi2290/django,raphaelmerx/django,freakboy3742/django,zerc/django,curtisstpierre/django,chrishas35/django-travis-ci,SoftwareMaven/django,Matt-Deacalion/django,sjlehtin/django,jasonbot/django,SujaySKumar/django,andela-ifageyinbo/django,alrifqi/django,oberlin/django,YangSongzhou/django,WSDC-NITWarangal/django,lunafeng/django,mammique/django,kaedroho/django,davidharrigan/django,lsqtongxin/django,chrisfranzen/django,aisipos/django,YYWen0o0/python-frame-django,shaib/django,akaihola/django,hkchenhongyi/django,nealtodd/django,rlugojr/django,extremewaysback/django,georgemarshall/django,delinhabit/django,Sonicbids/django,liavkoren/djangoDev,manhhomienbienthuy/django,hasadna/django,WillGuan105/django,hellhovnd/django,ptoraskar/django,pjdelport/django,JavML/django,ecederstrand/django,hynekcer/django,ojengwa/django-1,arun6582/django,katrid/django,eyohansa/django,piquadrat/django,tysonclugg/django,shacker/django,joequery/django,tayfun/django,blindroot/django,daniponi/django,wweiradio/django,himleyb85/django,crazy-canux/django,synasius/django,adamchainz/django,shaistaansari/django,EmadMokhtar/Django,rsvip/Django,knifenomad/django,dbaxa/django,zhaodelong/django,blaze33/django,elkingtonmcb/django,pasqualguerrero/django,divio/django,xadahiya/django,sam-tsai/django,hunter007/django,mcella/django,aspidites/django,indevgr/django,marqueedev/django,Korkki/django,frishberg/django,joakim-hove/django,simonw/django,hasadna/django,andreif/django,sarvex/django,tcwicklund/django,ticosax/django,nemesisdesign/django,dhruvagarwal/django,kcpawan/django,blighj/django,yakky/django,marckuz/django,craynot/django,adamchainz/django,zhaodelong/django,yograterol/django,vincepandolfo/django,dex4er/django,double-y/django,sdcooke/django,tayfun/django,megaumi/django,vitaly4uk/django,adamchainz/django,jasonbot/django,KokareIITP/django,ajoaoff/django,hynekcer/django,abomyi/django,szopu/django,mojeto/django,denis-pitul/django,mjtamlyn/django,gunchleoc/django,hottwaj/django,blueyed/django,BlindHunter/django,apocquet/django,liavkoren/djangoDev,jasonbot/django,mcrowson/django,AltSchool/django,pjdelport/django,mitya57/django,Leila20/django,nju520/django,hassanabidpk/django,mjtamlyn/django,guettli/django,ziima/django,googleinterns/django,hunter007/django,willhardy/django,mojeto/django,vitan/django,marctc/django,Leila20/django,benjaminjkraft/django,maxsocl/django,dursk/django,crazy-canux/django,zulip/django,davidharrigan/django,rapilabs/django,AltSchool/django,lmorchard/django,AndrewGrossman/django,mmardini/django,imtapps/django-imt-fork,hackerbot/DjangoDev,elijah513/django,MounirMesselmeni/django,areski/django,auready/django,megaumi/django,mattseymour/django,ericfc/django,Yong-Lee/django,yceruto/django,marckuz/django,vincepandolfo/django,bikong2/django,ar45/django,uranusjr/django,GaussDing/django,RaoUmer/django,shaistaansari/django,takeshineshiro/django,eugena/django,RaoUmer/django,pquentin/django,tysonclugg/django,GitAngel/django,darjeeling/django,camilonova/django,timgraham/django,vitan/django,areski/django,poiati/django,DONIKAN/django,mattrobenolt/django,elky/django,BlindHunter/django,marissazhou/django,jsoref/django,tanmaythakur/django,harisibrahimkv/django,ptoraskar/django,RossBrunton/django,timgraham/django,yigitguler/django,bitcity/django,beck/django,pipermerriam/django,blindroot/django,dudepare/django,vitaly4uk/django,kamyu104/django,jallohm/django,neiudemo1/django,moreati/django,jeezybrick/django,kutenai/django,chrisfranzen/django,liu602348184/django,andresgz/django,akintoey/django,ziima/django,AltSchool/django,Beeblio/django,lmorchard/django,benspaulding/django,hackerbot/DjangoDev,jejimenez/django,elkingtonmcb/django,henryfjordan/django,filias/django,HonzaKral/django,iambibhas/django,dbaxa/django,aerophile/django,zedr/django,duqiao/django,felixjimenez/django,techdragon/django,charettes/django,adelton/django,RevelSystems/django,aerophile/django,RossBrunton/django,ryangallen/django,tomchristie/django,ryangallen/django,denisenkom/django,spisneha25/django,daniponi/django,kisna72/django,huang4fstudio/django,iambibhas/django,wsmith323/django,ccn-2m/django,MoritzS/django,stevenewey/django,phalt/django,ajaali/django,b-me/django,rajsadho/django,jenalgit/django,weiawe/django,kholidfu/django,TimYi/django,pasqualguerrero/django,ericholscher/django,edmorley/django,irwinlove/django,salamer/django,sarvex/django,xadahiya/django,blueyed/django,Matt-Deacalion/django,DONIKAN/django,simone/django-gb,vsajip/django,hellhovnd/django,ABaldwinHunter/django-clone-classic,tysonclugg/django,liavkoren/djangoDev,postrational/django,tcwicklund/django,epandurski/django,donkirkby/django,jsoref/django,frePPLe/django,evansd/django,katrid/django,redhat-openstack/django,takeshineshiro/django,jpic/django,andreif/django,hasadna/django,edevil/django,jgoclawski/django,dpetzold/django,ecederstrand/django,sopier/django,zhaodelong/django,sopier/django,karyon/django,sarvex/django,aleida/django,gcd0318/django,PolicyStat/django,tragiclifestories/django,hobarrera/django,stewartpark/django,darkryder/django,archen/django,sarthakmeh03/django,SoftwareMaven/django,auready/django,evansd/django,blighj/django,KokareIITP/django,denys-duchier/django,benspaulding/django,ifduyue/django,timgraham/django,dgladkov/django,ASCrookes/django,koordinates/django,koniiiik/django,coldmind/django,kennethlove/django,edmorley/django,dfdx2/django,jarshwah/django,gcd0318/django,SujaySKumar/django,blueyed/django,ghickman/django,krisys/django,carljm/django,aidanlister/django,YangSongzhou/django,sjlehtin/django,yamila-moreno/django,xwolf12/django,curtisstpierre/django,wweiradio/django,sarthakmeh03/django,syaiful6/django,theo-l/django,mitchelljkotler/django,jmcarp/django,auvipy/django,leeon/annotated-django,xrmx/django,helenst/django,anant-dev/django,apollo13/django,takis/django,chrisfranzen/django,drjeep/django,mojeto/django,olasitarska/django,ArnossArnossi/django,mitchelljkotler/django,sbellem/django,Leila20/django,mrbox/django,ebar0n/django,weiawe/django,sdcooke/django,pipermerriam/django,adrianholovaty/django,taaviteska/django,reinout/django,caotianwei/django,fafaman/django,Anonymous-X6/django,mitar/django,jhoos/django,pasqualguerrero/django,hassanabidpk/django,alexmorozov/django,oinopion/django,makinacorpus/django,akshatharaj/django,vmarkovtsev/django,elijah513/django,uranusjr/django,lunafeng/django,aleida/django,kholidfu/django,andela-ooladayo/django,bliti/django-nonrel-1.5,double-y/django,zhaodelong/django,myang321/django,devops2014/djangosite,dwightgunning/django,vsajip/django,MatthewWilkes/django,wweiradio/django,ojengwa/django-1,weiawe/django,rajsadho/django,yamila-moreno/django,seocam/django,jhoos/django,JavML/django,krishna-pandey-git/django,petecummings/django,kutenai/django,dfunckt/django,jn7163/django,andreif/django,hottwaj/django,andela-ooladayo/django,harisibrahimkv/django,nhippenmeyer/django,auready/django,PolicyStat/django,fpy171/django,marcelocure/django,darjeeling/django,ABaldwinHunter/django-clone-classic,epandurski/django,JorgeCoock/django,rmboggs/django,kamyu104/django,willhardy/django,gcd0318/django,filias/django,syaiful6/django,marissazhou/django,devops2014/djangosite,rtindru/django,koniiiik/django,dpetzold/django,kcpawan/django,techdragon/django,krishna-pandey-git/django,wsmith323/django,Beauhurst/django,litchfield/django,mammique/django,elena/django,simone/django-gb,kisna72/django,petecummings/django,jylaxp/django,mathspace/django,edmorley/django,gengue/django,ajoaoff/django,DrMeers/django,gohin/django,willharris/django,Mixser/django,hunter007/django,krisys/django,bspink/django,donkirkby/django,Proggie02/TestRepo,dex4er/django,rtindru/django,andela-ifageyinbo/django,mrfuxi/django,xrmx/django,denis-pitul/django,codepantry/django,Adnn/django,irwinlove/django,aidanlister/django,synasius/django,alexallah/django,mitchelljkotler/django,cobalys/django,memtoko/django,nealtodd/django,lisael/pg-django,rajsadho/django,tcwicklund/django,saydulk/django,rmboggs/django,dex4er/django,BrotherPhil/django,ArnossArnossi/django,dfdx2/django,jsoref/django,chyeh727/django,gdub/django,monetate/django,PetrDlouhy/django,simonw/django,jnovinger/django,MikeAmy/django,yamila-moreno/django,kangfend/django,druuu/django,benjaminjkraft/django,digimarc/django,NullSoldier/django,koniiiik/django,JavML/django,pauloxnet/django,stevenewey/django,nhippenmeyer/django,djbaldey/django,elky/django,liuliwork/django,lisael/pg-django,tysonclugg/django,alexmorozov/django,jnovinger/django,synasius/django,bliti/django-nonrel-1.5,aleida/django,rogerhu/django,loic/django,TimBuckley/effective_django,delinhabit/django,nju520/django,stewartpark/django,theo-l/django,hassanabidpk/django,varunnaganathan/django,ryangallen/django,Mixser/django,GitAngel/django,yewang15215/django,sgzsh269/django,olasitarska/django,blaze33/django,baylee/django,lzw120/django,divio/django,maxsocl/django,WSDC-NITWarangal/django,intgr/django,seocam/django,beckastar/django,theo-l/django,jrrembert/django,rizumu/django,wsmith323/django,spisneha25/django,sjlehtin/django,hnakamur/django,piquadrat/django,sbellem/django,areski/django,jn7163/django,andresgz/django,jyotsna1820/django,jaywreddy/django,gohin/django,atul-bhouraskar/django,szopu/django,mitchelljkotler/django,hobarrera/django,himleyb85/django,crazy-canux/django,errx/django,deployed/django,django/django,savoirfairelinux/django,ataylor32/django,django-nonrel/django,yakky/django,dursk/django,bikong2/django,cobalys/django,asser/django,pelme/django,runekaagaard/django-contrib-locking,areski/django,auvipy/django,wetneb/django,apollo13/django,EmadMokhtar/Django,petecummings/django,taaviteska/django,nealtodd/django,KokareIITP/django,zhoulingjun/django,jrrembert/django,ytjiang/django,edevil/django,h4r5h1t/django-hauthy,jvkops/django,vitan/django,mjtamlyn/django,Sonicbids/django,NullSoldier/django,ckirby/django,Y3K/django,shtouff/django,dydek/django,kutenai/django,dudepare/django,rhertzog/django,aroche/django,gannetson/django,dfunckt/django,kangfend/django,dhruvagarwal/django,WSDC-NITWarangal/django,ajaali/django,waytai/django,jallohm/django,schinckel/django,bak1an/django,1013553207/django,frdb194/django,chyeh727/django,koordinates/django,twz915/django,Matt-Deacalion/django,karyon/django,peterlauri/django,stevenewey/django,dydek/django,mdj2/django,github-account-because-they-want-it/django,wkschwartz/django,kosz85/django,MatthewWilkes/django,pauloxnet/django,sgzsh269/django,Y3K/django,jylaxp/django,Balachan27/django,andyzsf/django,RossBrunton/django,tuhangdi/django,dwightgunning/django,ebar0n/django,claudep/django,mrbox/django,jrrembert/django,jscn/django,jgeskens/django,DasIch/django,EliotBerriot/django,kosz85/django,feroda/django,kaedroho/django,frishberg/django,riteshshrv/django,freakboy3742/django,davidharrigan/django,gunchleoc/django,druuu/django,WillGuan105/django,oinopion/django,vsajip/django,tbeadle/django,georgemarshall/django,darkryder/django,djbaldey/django,mttr/django,reinout/django,Y3K/django,mdj2/django,sam-tsai/django,RevelSystems/django,kevintaw/django,whs/django,jgeskens/django,koniiiik/django,hcsturix74/django,davgibbs/django,ebar0n/django,shownomercy/django,sjlehtin/django,RaoUmer/django,kamyu104/django,takis/django,felixxm/django,apollo13/django,MounirMesselmeni/django,hunter007/django,alrifqi/django,yograterol/django,andrewsmedina/django,treyhunner/django,knifenomad/django,phalt/django,felixxm/django,twz915/django,github-account-because-they-want-it/django,rockneurotiko/django,z0by/django,leekchan/django_test,djbaldey/django,riteshshrv/django,willharris/django,EliotBerriot/django,koordinates/django,wetneb/django,tomchristie/django,ccn-2m/django,roselleebarle04/django,xwolf12/django,xwolf12/django,benspaulding/django,Vixionar/django,blaze33/django,tbeadle/django,kswiat/django,jgeskens/django,cainmatt/django,lzw120/django,ryangallen/django,hybrideagle/django,beni55/django,hynekcer/django,gannetson/django,SoftwareMaven/django,solarissmoke/django,mlavin/django,yceruto/django,daniponi/django,mshafiq9/django,dracos/django,ryanahall/django,rwillmer/django,MarcJoan/django,dudepare/django,Nepherhotep/django,dgladkov/django,Anonymous-X6/django,jaywreddy/django,alx-eu/django,GhostThrone/django,sopier/django,webgeodatavore/django,ccn-2m/django,dbaxa/django,ivandevp/django,zerc/django,matiasb/django,sergei-maertens/django,andyzsf/django,mcella/django,etos/django,beckastar/django,petecummings/django,lunafeng/django,TimYi/django,codepantry/django,quamilek/django,yograterol/django,nju520/django,rsvip/Django,denis-pitul/django,harisibrahimkv/django,hnakamur/django,treyhunner/django,adelton/django,benjaminjkraft/django,ABaldwinHunter/django-clone,Argon-Zhou/django,kswiat/django,ASCrookes/django,sam-tsai/django,mrbox/django,leekchan/django_test,rlugojr/django,elky/django,benjaminjkraft/django,sam-tsai/django,sephii/django,shtouff/django,haxoza/django,Nepherhotep/django,rhertzog/django,seanwestfall/django,mitya57/django,zedr/django,donkirkby/django,bliti/django-nonrel-1.5,vitan/django,akintoey/django,tragiclifestories/django,shownomercy/django,monetate/django,marctc/django,roselleebarle04/django,BlindHunter/django,GaussDing/django,ABaldwinHunter/django-clone-classic,Adnn/django,Y3K/django,frdb194/django,donkirkby/django,yewang15215/django,karyon/django,shaistaansari/django,gcd0318/django,ckirby/django,django-nonrel/django,DrMeers/django,alimony/django,TridevGuha/django,huang4fstudio/django,maxsocl/django,apocquet/django,jasonwzhy/django,django/django,liu602348184/django,yigitguler/django,django-nonrel/django-nonrel,GitAngel/django,bitcity/django,django-nonrel/django,moreati/django,ajoaoff/django,programadorjc/django,ar45/django,ericholscher/django,risicle/django,kosz85/django,SebasSBM/django,rizumu/django,dpetzold/django,aisipos/django,yceruto/django,ironbox360/django,liu602348184/django,delhivery/django,nielsvanoch/django,labcodes/django,alexmorozov/django,pipermerriam/django,1013553207/django,andresgz/django,mttr/django,etos/django,aerophile/django,GhostThrone/django,lsqtongxin/django,seanwestfall/django,wkschwartz/django,rsalmaso/django,nemesisdesign/django,ojake/django,dydek/django,beckastar/django,olasitarska/django,ABaldwinHunter/django-clone,YYWen0o0/python-frame-django,claudep/django,yograterol/django,PetrDlouhy/django,manhhomienbienthuy/django,drjeep/django,akshatharaj/django,sergei-maertens/django,rockneurotiko/django,GhostThrone/django,Endika/django,rapilabs/django,jasonbot/django,roselleebarle04/django,weiawe/django,bitcity/django,bak1an/django,dursk/django,moreati/django,fenginx/django,frankvdp/django,rapilabs/django,lsqtongxin/django,eugena/django,marctc/django,Balachan27/django,epandurski/django,frishberg/django,doismellburning/django,zerc/django,gengue/django,rhertzog/django,krisys/django,Argon-Zhou/django,andrewsmedina/django,claudep/django,riteshshrv/django,ABaldwinHunter/django-clone-classic,bak1an/django,SebasSBM/django,kswiat/django,dsanders11/django,ataylor32/django,leereilly/django-1,rockneurotiko/django,raphaelmerx/django,duqiao/django,alimony/django,gitaarik/django,Proggie02/TestRepo,peterlauri/django,mlavin/django,mcardillo55/django,hybrideagle/django,ytjiang/django,baylee/django,makinacorpus/django,schinckel/django,irwinlove/django,waytai/django,Mixser/django,oinopion/django,liu602348184/django,denys-duchier/django,crazy-canux/django,BMJHayward/django,akaariai/django,rlugojr/django,erikr/django,poiati/django,blighj/django,lsqtongxin/django,SujaySKumar/django,aidanlister/django,bobcyw/django,dwightgunning/django,kennethlove/django,coldmind/django,gitaarik/django,runekaagaard/django-contrib-locking,payeldillip/django,knifenomad/django,pasqualguerrero/django,cainmatt/django,alexallah/django,etos/django,Argon-Zhou/django,gunchleoc/django,vmarkovtsev/django,supriyantomaftuh/django,ifduyue/django,jhg/django,mathspace/django,intgr/django,varunnaganathan/django,salamer/django,vincepandolfo/django,joequery/django,h4r5h1t/django-hauthy,mjtamlyn/django,SujaySKumar/django,unaizalakain/django,rogerhu/django,alimony/django,lwiecek/django,syphar/django,ghickman/django,double-y/django,mathspace/django,mbox/django,andela-ifageyinbo/django,denisenkom/django,HousekeepLtd/django,ojake/django,jmcarp/django,mshafiq9/django,akshatharaj/django,scorphus/django,Leila20/django,adamchainz/django,mshafiq9/django,Balachan27/django,fenginx/django,ivandevp/django,robhudson/django,joequery/django,dwightgunning/django,frdb194/django,xrmx/django,zedr/django,sbellem/django,loic/django,piquadrat/django,Vixionar/django,alx-eu/django,whs/django,lzw120/django,shaib/django,andreif/django,frankvdp/django,willhardy/django,auvipy/django,MatthewWilkes/django,beck/django,jejimenez/django,tomchristie/django,manhhomienbienthuy/django,bobcyw/django,aisipos/django,syphar/django,MatthewWilkes/django,jvkops/django,jsoref/django,ebar0n/django,HousekeepLtd/django,mewtaylor/django,knifenomad/django,dydek/django,akaariai/django,marissazhou/django,dudepare/django,risicle/django,t0in4/django,mbox/django,ivandevp/django,stewartpark/django,hybrideagle/django,nhippenmeyer/django,oscaro/django,risicle/django,labcodes/django,dracos/django,MoritzS/django,delinhabit/django,PetrDlouhy/django,PolicyStat/django,riteshshrv/django,drjeep/django,jmcarp/django,AndrewGrossman/django,sergei-maertens/django,ccn-2m/django,schinckel/django,jscn/django,jasonwzhy/django,WillGuan105/django,xadahiya/django,zsiciarz/django,jallohm/django,redhat-openstack/django,jnovinger/django,peterlauri/django,tcwicklund/django,DasIch/django,jarshwah/django,tanmaythakur/django,wkschwartz/django,Beeblio/django,rsalmaso/django,uranusjr/django,dracos/django,jasonwzhy/django,adelton/django,alilotfi/django,kcpawan/django,mrfuxi/django,maxsocl/django,jhg/django,programadorjc/django,caotianwei/django,quxiaolong1504/django,JorgeCoock/django,wetneb/django,alx-eu/django,camilonova/django,barbuza/django,gunchleoc/django,waytai/django,zulip/django,z0by/django,twz915/django,freakboy3742/django,haxoza/django,digimarc/django,darkryder/django,ajoaoff/django,ifduyue/django,pauloxnet/django,haxoza/django,karyon/django,gitaarik/django,seanwestfall/django,oberlin/django,rockneurotiko/django,AlexHill/django,bobcyw/django,RevelSystems/django,riklaunim/django-custom-multisite,caotianwei/django,apocquet/django,vmarkovtsev/django,syphar/django,hkchenhongyi/django,spisneha25/django,spisneha25/django,sephii/django,Yong-Lee/django,akshatharaj/django,dfunckt/django,barbuza/django,TridevGuha/django,errx/django,alimony/django,jscn/django,vmarkovtsev/django,saydulk/django,indevgr/django,monetate/django,MoritzS/django,HonzaKral/django,zhoulingjun/django,imtapps/django-imt-fork,kutenai/django,PetrDlouhy/django,krishna-pandey-git/django,codepantry/django,payeldillip/django,myang321/django,MarcJoan/django,eyohansa/django,fafaman/django,rtindru/django,atul-bhouraskar/django,marckuz/django,TridevGuha/django,etos/django,whs/django,ulope/django,tbeadle/django,elena/django,leekchan/django_test,mlavin/django,frePPLe/django,jenalgit/django,WSDC-NITWarangal/django,bspink/django,mmardini/django,zsiciarz/django,GitAngel/django,RaoUmer/django,AlexHill/django,postrational/django,SoftwareMaven/django,ArnossArnossi/django,ulope/django,extremewaysback/django,szopu/django,techdragon/django,programadorjc/django,filias/django,robhudson/django,supriyantomaftuh/django,aisipos/django,IRI-Research/django,tayfun/django,mattseymour/django,IRI-Research/django,raphaelmerx/django,BMJHayward/django,sarthakmeh03/django,mlavin/django,lwiecek/django,MarkusH/django,andrewsmedina/django,zanderle/django,MarkusH/django,jhoos/django,feroda/django,denisenkom/django,kevintaw/django,codepantry/django,YYWen0o0/python-frame-django,MounirMesselmeni/django,leeon/annotated-django,mewtaylor/django,t0in4/django,aroche/django,bitcity/django,t0in4/django,arun6582/django,piquadrat/django,erikr/django,hobarrera/django,monetate/django,fpy171/django,MikeAmy/django,zulip/django,hkchenhongyi/django,marqueedev/django,seanwestfall/django,SebasSBM/django,elijah513/django,alilotfi/django,mmardini/django,payeldillip/django,blighj/django,poiati/django,pquentin/django,rrrene/django,github-account-because-they-want-it/django,liuliwork/django,oscaro/django,megaumi/django,camilonova/django,rogerhu/django,joequery/django,yask123/django,felixxm/django,blueyed/django,Endika/django,krishna-pandey-git/django,alexmorozov/django,tomchristie/django,ticosax/django,adrianholovaty/django,Argon-Zhou/django,ryanahall/django,kangfend/django,treyhunner/django,marcelocure/django,zhoulingjun/django,chrisfranzen/django,jeezybrick/django,mcrowson/django,Beauhurst/django,marqueedev/django,rlugojr/django,saydulk/django,dgladkov/django,duqiao/django,ArnossArnossi/django,errx/django,ytjiang/django,ulope/django,YangSongzhou/django,ecederstrand/django,frankvdp/django,leereilly/django-1,supriyantomaftuh/django,kcpawan/django,feroda/django,rhertzog/django,jyotsna1820/django,reinout/django,cobalys/django,willhardy/django,henryfjordan/django,django-nonrel/django-nonrel,andyzsf/django,1013553207/django,googleinterns/django,eugena/django,litchfield/django,abomyi/django,himleyb85/django,pauloxnet/django,beni55/django,django-nonrel/django-nonrel,runekaagaard/django-contrib-locking,denys-duchier/django,unaizalakain/django,roselleebarle04/django,filias/django,mrfuxi/django,atul-bhouraskar/django,sdcooke/django,rizumu/django,mdj2/django,ckirby/django,gdub/django,jejimenez/django,aidanlister/django,simonw/django,davgibbs/django,TimBuckley/effective_django,oscaro/django,uranusjr/django,sarvex/django,ghedsouza/django,xwolf12/django,neiudemo1/django,adambrenecki/django,memtoko/django,kevintaw/django,deployed/django,oberlin/django,chyeh727/django,tuhangdi/django,webgeodatavore/django,pipermerriam/django,ericfc/django,pelme/django,Adnn/django,alilotfi/django,jdelight/django,nealtodd/django,ghickman/django,adelton/django,adambrenecki/django,rizumu/django,ghedsouza/django,beck/django,yask123/django,rrrene/django,oscaro/django,katrid/django,jaywreddy/django,savoirfairelinux/django,marctc/django,phalt/django,gitaarik/django,joakim-hove/django,whs/django,anant-dev/django,shownomercy/django,lunafeng/django,labcodes/django,asser/django,hottwaj/django,intgr/django,waytai/django,neiudemo1/django,simonw/django,fenginx/django,darjeeling/django,fafaman/django,googleinterns/django,beni55/django,baylee/django,follow99/django,koordinates/django,guettli/django,jyotsna1820/django,rynomster/django,haxoza/django,akintoey/django,alrifqi/django,BlindHunter/django,1013553207/django,indevgr/django,salamer/django,fenginx/django,DrMeers/django,dhruvagarwal/django,rrrene/django,dfdx2/django,fpy171/django,elijah513/django,Vixionar/django,vitaly4uk/django,JavML/django,alilotfi/django,jnovinger/django,apocquet/django,felixjimenez/django,hnakamur/django,mathspace/django,ticosax/django,scorphus/django,schinckel/django,manhhomienbienthuy/django,dursk/django,mattrobenolt/django,ironbox360/django,loic/django,coldmind/django,avneesh91/django,x111ong/django,yamila-moreno/django,BrotherPhil/django,mattrobenolt/django,jn7163/django,georgemarshall/django,tuhangdi/django,NullSoldier/django,adrianholovaty/django,syphar/django,delhivery/django,litchfield/django,liuliwork/django,djbaldey/django,takis/django,denis-pitul/django,aspidites/django,dracos/django,Proggie02/TestRepo,deployed/django,darjeeling/django,twz915/django,Sonicbids/django,GaussDing/django,WillGuan105/django,Beeblio/django,mcrowson/django,ptoraskar/django,devops2014/djangosite,b-me/django,DONIKAN/django,wetneb/django,zsiciarz/django,ecederstrand/django,mewtaylor/django,erikr/django,hellhovnd/django,elena/django,hcsturix74/django,rwillmer/django,ojake/django,auready/django,kosz85/django,MarkusH/django,mbox/django,yakky/django,asser/django,adambrenecki/django,neiudemo1/django,AltSchool/django,Yong-Lee/django,ghedsouza/django,ghedsouza/django,davidharrigan/django,frePPLe/django,rsalmaso/django,matiasb/django,jarshwah/django,sergei-maertens/django,zulip/django,lmorchard/django,ifduyue/django,joakim-hove/django,tbeadle/django,syaiful6/django,jdelight/django,BrotherPhil/django,digimarc/django,marcelocure/django,IRI-Research/django,irwinlove/django,b-me/django,redhat-openstack/django,shaib/django,litchfield/django,raphaelmerx/django,doismellburning/django,hottwaj/django,seocam/django,synasius/django,evansd/django,dsanders11/django,leeon/annotated-django,shacker/django,tanmaythakur/django,arun6582/django,AlexHill/django,hackerbot/DjangoDev,rajsadho/django,aroche/django,liuliwork/django,shtouff/django,doismellburning/django,marckuz/django,alrifqi/django,yewang15215/django,alx-eu/django,delhivery/django,jaywreddy/django,curtisstpierre/django,rynomster/django,jscn/django,jeezybrick/django,nielsvanoch/django,Beauhurst/django,dfunckt/django,alexallah/django,KokareIITP/django,jpic/django,ASCrookes/django,arun6582/django,aerophile/django,timgraham/django,mammique/django,kholidfu/django,mrbox/django,digimarc/django,rsvip/Django,zsiciarz/django,jn7163/django,joakim-hove/django,riklaunim/django-custom-multisite,carljm/django,jhoos/django,avneesh91/django,kaedroho/django,TimYi/django,apollo13/django,nemesisdesign/django,Beauhurst/django,lwiecek/django,stevenewey/django,quxiaolong1504/django,erikr/django,unaizalakain/django,akintoey/django,ataylor32/django,Endika/django,Balachan27/django,rsalmaso/django,eugena/django,ABaldwinHunter/django-clone,indevgr/django,Mixser/django,atul-bhouraskar/django,django/django,drjeep/django,guettli/django,charettes/django,jejimenez/django,ojake/django,postrational/django,unaizalakain/django,takeshineshiro/django,mojeto/django,rrrene/django,TridevGuha/django,katrid/django,hassanabidpk/django,BMJHayward/django,Vixionar/django,nju520/django,robhudson/django,gdub/django,dhruvagarwal/django,z0by/django,DONIKAN/django,craynot/django,jarshwah/django,ericholscher/django,divio/django,huang4fstudio/django,sadaf2605/django,SebasSBM/django,kisna72/django,denys-duchier/django,shacker/django,ticosax/django,supriyantomaftuh/django,mcardillo55/django,shtouff/django,chrishas35/django-travis-ci,wkschwartz/django,loic/django,t0in4/django,avanov/django,ivandevp/django,kangfend/django,cainmatt/django,solarissmoke/django,edevil/django,abomyi/django,megaumi/django,mcardillo55/django,krisys/django,curtisstpierre/django,memtoko/django,zerc/django,myang321/django,gengue/django,felixxm/django,sdcooke/django,aroche/django,kamyu104/django,AndrewGrossman/django,poiati/django,sadaf2605/django,myang321/django,tuhangdi/django,avanov/django,webgeodatavore/django,solarissmoke/django,savoirfairelinux/django,ryanahall/django,asser/django,mitya57/django,carljm/django,Beeblio/django,TimYi/django,lmorchard/django,pjdelport/django,quamilek/django,matiasb/django,barbuza/django,mewtaylor/django,mitar/django,aspidites/django,BMJHayward/django,Korkki/django,kholidfu/django,evansd/django,zanderle/django,harisibrahimkv/django,jallohm/django | tests/regressiontests/datatypes/models.py | tests/regressiontests/datatypes/models.py | """
This is a basic model to test saving and loading boolean and date-related
types, which in the past were problematic for some database backends.
"""
from django.db import models
from django.conf import settings
class Donut(models.Model):
name = models.CharField(max_length=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.NullBooleanField()
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
class Meta:
ordering = ('consumed_at',)
def __str__(self):
return self.name
__test__ = {'API_TESTS': """
# No donuts are in the system yet.
>>> Donut.objects.all()
[]
>>> d = Donut(name='Apple Fritter')
# Ensure we're getting True and False, not 0 and 1
>>> d.is_frosted
False
>>> d.has_sprinkles
>>> d.has_sprinkles = True
>>> d.has_sprinkles == True
True
>>> d.save()
>>> d2 = Donut.objects.all()[0]
>>> d2
<Donut: Apple Fritter>
>>> d2.is_frosted == False
True
>>> d2.has_sprinkles == True
True
>>> import datetime
>>> d2.baked_date = datetime.date(year=1938, month=6, day=4)
>>> d2.baked_time = datetime.time(hour=5, minute=30)
>>> d2.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
>>> d2.save()
>>> d3 = Donut.objects.all()[0]
>>> d3.baked_date
datetime.date(1938, 6, 4)
>>> d3.baked_time
datetime.time(5, 30)
>>> d3.consumed_at
datetime.datetime(2007, 4, 20, 16, 19, 59)
# Year boundary tests (ticket #3689)
>>> d = Donut(name='Date Test 2007', baked_date=datetime.datetime(year=2007, month=12, day=31), consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59))
>>> d.save()
>>> d1 = Donut(name='Date Test 2006', baked_date=datetime.datetime(year=2006, month=1, day=1), consumed_at=datetime.datetime(year=2006, month=1, day=1))
>>> d1.save()
>>> Donut.objects.filter(baked_date__year=2007)
[<Donut: Date Test 2007>]
>>> Donut.objects.filter(baked_date__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2007).order_by('name')
[<Donut: Apple Fritter>, <Donut: Date Test 2007>]
>>> Donut.objects.filter(consumed_at__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2005)
[]
>>> Donut.objects.filter(consumed_at__year=2008)
[]
"""}
| """
This is a basic model to test saving and loading boolean and date-related
types, which in the past were problematic for some database backends.
"""
from django.db import models
from django.conf import settings
class Donut(models.Model):
name = models.CharField(max_length=100)
is_frosted = models.BooleanField(default=False)
has_sprinkles = models.NullBooleanField()
baked_date = models.DateField(null=True)
baked_time = models.TimeField(null=True)
consumed_at = models.DateTimeField(null=True)
class Meta:
ordering = ('consumed_at',)
def __str__(self):
return self.name
__test__ = {'API_TESTS': """
# No donuts are in the system yet.
>>> Donut.objects.all()
[]
>>> d = Donut(name='Apple Fritter')
# Ensure we're getting True and False, not 0 and 1
>>> d.is_frosted
False
>>> d.has_sprinkles
>>> d.has_sprinkles = True
>>> d.has_sprinkles == True
True
>>> d.save()
>>> d2 = Donut.objects.all()[0]
>>> d2
<Donut: Apple Fritter>
>>> d2.is_frosted == False
True
>>> d2.has_sprinkles == True
True
>>> import datetime
>>> d2.baked_date = datetime.date(year=1938, month=6, day=4)
>>> d2.baked_time = datetime.time(hour=5, minute=30)
>>> d2.consumed_at = datetime.datetime(year=2007, month=4, day=20, hour=16, minute=19, second=59)
>>> d2.save()
>>> d3 = Donut.objects.all()[0]
>>> d3.baked_date
datetime.date(1938, 6, 4)
>>> d3.baked_time
datetime.time(5, 30)
>>> d3.consumed_at
datetime.datetime(2007, 4, 20, 16, 19, 59)
# Year boundary tests (ticket #3689)
>>> d = Donut(name='Date Test 2007', baked_date=datetime.datetime(year=2007, month=12, day=31), consumed_at=datetime.datetime(year=2007, month=12, day=31, hour=23, minute=59, second=59))
>>> d.save()
>>> d1 = Donut(name='Date Test 2006', baked_date=datetime.datetime(year=2006, month=1, day=1), consumed_at=datetime.datetime(year=2006, month=1, day=1))
>>> d1.save()
>>> Donut.objects.filter(baked_date__year=2007)
[<Donut: Date Test 2007>]
>>> Donut.objects.filter(baked_date__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2007).order_by('name')
[<Donut: Apple Fritter>, <Donut: Date Test 2007>]
>>> Donut.objects.filter(consumed_at__year=2006)
[<Donut: Date Test 2006>]
>>> Donut.objects.filter(consumed_at__year=2005)
[]
>>> Donut.objects.filter(consumed_at__year=2008)
[]
# TZ-aware datetimes (#8354).
>>> from django.utils import tzinfo
>>> dt = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=tzinfo.FixedOffset(0))
>>> d = Donut(name='Bear claw', consumed_at=dt)
>>> d.save()
>>> Donut.objects.filter(consumed_at=dt)
[<Donut: Bear claw>]
"""}
| bsd-3-clause | Python |
4fe2a35492715027f29c0aa548015d83495fa3ca | Fix unit tests | nerdzeu/NERDZCrush,nerdzeu/NERDZCrush,nerdzeu/NERDZCrush,roderickm/MediaCrush,roderickm/MediaCrush,MediaCrush/MediaCrush,roderickm/MediaCrush,MediaCrush/MediaCrush | mediacrush/network.py | mediacrush/network.py | import json
from flask import request, current_app, redirect
from flaskext.bcrypt import generate_password_hash
def get_ip():
ip = request.remote_addr
if (ip == '127.0.0.1' or ip == '127.0.0.2') and "X-Real-IP" in request.headers:
ip = request.headers.get("X-Real-IP")
return ip
def makeMask(n):
"return a mask of n bits as a long integer"
return (2 << n - 1) - 1
def dottedQuadToNum(ip):
"convert decimal dotted quad string to long integer"
parts = ip.split(".")
return int(parts[0]) | (int(parts[1]) << 8) | (int(parts[2]) << 16) | (int(parts[3]) << 24)
def networkMask(ip, bits):
"Convert a network address to a long integer"
return dottedQuadToNum(ip) & makeMask(bits)
def addressInNetwork(ip, net):
"Is an address in a network"
return ip & net == net
def secure_ip():
ip = get_ip()
if is_tor():
ip = 'anonymous_user'
return generate_password_hash(ip)
def is_tor():
return get_ip() == '5.254.104.62'
| import json
from flask import request, current_app, redirect
from flaskext.bcrypt import generate_password_hash
def get_ip():
ip = request.remote_addr
if ip == '127.0.0.1' or ip == '127.0.0.2' and "X-Real-IP" in request.headers:
ip = request.headers.get("X-Real-IP")
return ip
def makeMask(n):
"return a mask of n bits as a long integer"
return (2 << n - 1) - 1
def dottedQuadToNum(ip):
"convert decimal dotted quad string to long integer"
parts = ip.split(".")
return int(parts[0]) | (int(parts[1]) << 8) | (int(parts[2]) << 16) | (int(parts[3]) << 24)
def networkMask(ip, bits):
"Convert a network address to a long integer"
return dottedQuadToNum(ip) & makeMask(bits)
def addressInNetwork(ip, net):
"Is an address in a network"
return ip & net == net
def secure_ip():
ip = get_ip()
if ip == '127.0.0.3' and not current_app.debug:
return 'anonymous_user'
return generate_password_hash(ip)
def is_tor():
return get_ip() == '5.254.104.62'
| mit | Python |
656aa635667e4a308652635c3ffe24aa65b725d2 | Fix paths | HIIT/deliberation-classifier,HIIT/deliberation-classifier | app/alpha/dqi.py | app/alpha/dqi.py | import collections
import itertools
import numpy
import pickle
## machine learning
from sklearn import svm, grid_search
## handling natural language
from liwc import liwc
_liwc = liwc.LIWC()
import nltk
import nltk.data
lemma = nltk.stem.wordnet.WordNetLemmatizer()
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
def preprocess( text ):
tokens = nltk.word_tokenize( text )
tokens = map( lambda x: lemma.lemmatize(x).lower(), tokens )
## liwc transformation
liwcs = map( lambda x: _liwc.search( x.lower() ) , tokens )
liwcs = list( itertools.chain( *liwcs ) )
liwcs = collections.Counter( liwcs )
f = {}
for term in _liwc.terms():
f['feature_liwc_' + term ] = 0
for k,v in liwcs.items():
f['feature_liwc_' + k ] = float( v ) / len( tokens )
f['feature_length'] = len( text )
f['feature_sentences'] = len( tokens )
ret = []
for k in sorted( f.keys() ):
ret.append( f[k] )
return ret
def learn( data, labels ):
estimator = svm.SVC()
grid = [
{'C': numpy.arange( 0.5 , 10, .5 ), 'gamma': numpy.arange( .0001, .1, .0005) , 'kernel': ['rbf', 'sigmoid'] },
]
model = grid_search.GridSearchCV( estimator , grid )
data = numpy.array( data )
labels = numpy.array( labels )
model.fit( data, labels )
pickle.dump( model, open('model.svm', 'w') )
print model.score( data, labels )
def predict( textline ):
model = pickle.load( open('alpha/model.svm') )
data = numpy.array( preprocess( textline ) )
return model.predict( [ data ] )
if __name__ == "__main__":
## teach with real data
def _int(s):
try:
int(s)
return True
except ValueError:
return False
import json
d = json.load( open( '/Users/mnelimar/projects/2015-dqi/lord/data.json' ) )
d = filter( lambda x: x['text'] != '', d )
d = filter( lambda x: _int( x['jl'] ), d )
labels = map( lambda x: int( x['jl'] ), d )
data = map( lambda x: preprocess( x['text'] ), d )
print len( data )
print len( labels )
print 'Start tuning'
learn( data, labels )
print 'Done tuning, model saved'
| import collections
import itertools
import numpy
import pickle
## machine learning
from sklearn import svm, grid_search
## handling natural language
from liwc import liwc
_liwc = liwc.LIWC()
import nltk
import nltk.data
lemma = nltk.stem.wordnet.WordNetLemmatizer()
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
def preprocess( text ):
tokens = nltk.word_tokenize( text )
tokens = map( lambda x: lemma.lemmatize(x).lower(), tokens )
## liwc transformation
liwcs = map( lambda x: _liwc.search( x.lower() ) , tokens )
liwcs = list( itertools.chain( *liwcs ) )
liwcs = collections.Counter( liwcs )
f = {}
for term in _liwc.terms():
f['feature_liwc_' + term ] = 0
for k,v in liwcs.items():
f['feature_liwc_' + k ] = float( v ) / len( tokens )
f['feature_length'] = len( text )
f['feature_sentences'] = len( tokens )
ret = []
for k in sorted( f.keys() ):
ret.append( f[k] )
return ret
def learn( data, labels ):
estimator = svm.SVC()
grid = [
{'C': numpy.arange( 0.5 , 10, .5 ), 'gamma': numpy.arange( .0001, .1, .0005) , 'kernel': ['rbf', 'sigmoid'] },
]
model = grid_search.GridSearchCV( estimator , grid )
data = numpy.array( data )
labels = numpy.array( labels )
model.fit( data, labels )
pickle.dump( model, open('model.svm', 'w') )
print model.score( data, labels )
def predict( textline ):
model = pickle.load( open('model.svm') )
data = numpy.array( preprocess( textline ) )
return model.predict( [ data ] )
if __name__ == "__main__":
## teach with real data
def _int(s):
try:
int(s)
return True
except ValueError:
return False
import json
d = json.load( open( '/Users/mnelimar/projects/2015-dqi/lord/data.json' ) )
d = filter( lambda x: x['text'] != '', d )
d = filter( lambda x: _int( x['jl'] ), d )
labels = map( lambda x: int( x['jl'] ), d )
data = map( lambda x: preprocess( x['text'] ), d )
print len( data )
print len( labels )
print 'Start tuning'
learn( data, labels )
print 'Done tuning, model saved'
| mit | Python |
746859067357761e3b6e1d0e21acd26dd7787f94 | raise import error is better for debug | doraemonext/wechat-python-sdk,wechat-python-sdk/wechat-python-sdk | wechat_sdk/__init__.py | wechat_sdk/__init__.py | # -*- coding: utf-8 -*-
from wechat_sdk.core.conf import WechatConf
from wechat_sdk.basic import WechatBasic
from wechat_sdk.ext import WechatExt
__all__ = ['WechatConf', 'WechatBasic', 'WechatExt']
| # -*- coding: utf-8 -*-
__all__ = ['WechatConf', 'WechatBasic', 'WechatExt']
try:
from wechat_sdk.core.conf import WechatConf
from wechat_sdk.basic import WechatBasic
from wechat_sdk.ext import WechatExt
except ImportError:
pass
| bsd-2-clause | Python |
6bfd02fd83da14d045fc01d16783942b14c51825 | fix issue #11 | Zopieux/django-blog-zinnia,jfdsmit/django-blog-zinnia,ghachey/django-blog-zinnia,1844144/django-blog-zinnia,aorzh/django-blog-zinnia,marctc/django-blog-zinnia,dapeng0802/django-blog-zinnia,Maplecroft/django-blog-zinnia,extertioner/django-blog-zinnia,extertioner/django-blog-zinnia,extertioner/django-blog-zinnia,petecummings/django-blog-zinnia,bywbilly/django-blog-zinnia,1844144/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia,Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,ZuluPro/django-blog-zinnia,jfdsmit/django-blog-zinnia,Fantomas42/django-blog-zinnia,bywbilly/django-blog-zinnia,aorzh/django-blog-zinnia,petecummings/django-blog-zinnia,Zopieux/django-blog-zinnia,marctc/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,petecummings/django-blog-zinnia,dapeng0802/django-blog-zinnia,jfdsmit/django-blog-zinnia,ZuluPro/django-blog-zinnia,Zopieux/django-blog-zinnia,jfdsmit/django-blog-zinnia,1844144/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Maplecroft/django-blog-zinnia,Fantomas42/django-blog-zinnia,aorzh/django-blog-zinnia | zinnia/ping.py | zinnia/ping.py | """Pings for Zinnia"""
import xmlrpclib
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
current_site = Site.objects.get_current()
site = 'http://%s' % current_site.domain
blog_url = ''
blog_feed = ''
class DirectoryPinger(object):
"""Pinger for Directories"""
def __init__(self, server_name):
global blog_url, blog_feed
self.server_name = server_name
self.server = xmlrpclib.ServerProxy(self.server_name)
if not blog_url or not blog_feed:
blog_url = '%s%s' % (site, reverse('zinnia_entry_archive_index'))
blog_feed = '%s%s' % (site, reverse('zinnia_entry_latest_feed'))
def ping(self, entry):
entry_url = '%s%s' % (site, entry.get_absolute_url())
categories = '|'.join([c.title for c in entry.categories.all()])
try:
reply = self.server.weblogUpdates.extendedPing(current_site.name,
blog_url, entry_url,
blog_feed, categories)
except Exception, ex:
try:
reply = self.server.weblogUpdates.ping(current_site.name,
blog_url, entry_url,
categories)
except xmlrpclib.ProtocolError, ex:
reply = {'message': '% invalid ping' % self.server_name,
'flerror': True}
return reply
| """Pings for Zinnia"""
import xmlrpclib
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
current_site = Site.objects.get_current()
site = 'http://%s' % current_site.domain
blog_url = ''
blog_feed = ''
class DirectoryPinger(object):
"""Pinger for Directories"""
def __init__(self, server_name):
global blog_url, blog_feed
self.server_name = server_name
self.server = xmlrpclib.ServerProxy(self.server_name)
if not blog_url or not blog_feed:
blog_url = '%s%s' % (site, reverse('zinnia_entry_archive_index'))
blog_feed = '%s%s' % (site, reverse('zinnia_feeds', args=['latest', ]))
def ping(self, entry):
entry_url = '%s%s' % (site, entry.get_absolute_url())
categories = '|'.join([c.title for c in entry.categories.all()])
try:
reply = self.server.weblogUpdates.extendedPing(current_site.name,
blog_url, entry_url,
blog_feed, categories)
except Exception, ex:
try:
reply = self.server.weblogUpdates.ping(current_site.name,
blog_url, entry_url,
categories)
except xmlrpclib.ProtocolError, ex:
reply = {'message': '% invalid ping' % self.server_name,
'flerror': True}
return reply
| bsd-3-clause | Python |
9c1165bebd92fcaa9e5e3d961027e4a86f8e75cd | Update Where_is_the_code.py | AiAiHealthcare/ProjectAiAi | DLCode/Where_is_the_code.py | DLCode/Where_is_the_code.py | """
AiAi.care uses confidential Patient X-Ray data protected by HIPAA.
We we will publish the code on Github Public repo after a code-audit to make sure
no HIPAA / PII information is published accidentally.
In the meantime we are working off of a private repository. Sorry 😔
"""
| """
AiAi.care uses confidential Patient X-Ray data protected by HIPAA.
We we will publish the code on Github Public repo after a code-audit to make sure
no HIPAA / PII information is published accidentally.
In the meantime we are working off of a private repository. Sorry 😔
""""
| agpl-3.0 | Python |
09a677e11c5abd0441181d13306927c3f3fce29b | Add is_failure to add_error | slash-testing/backslash-python,vmalloc/backslash-python | backslash/error_container.py | backslash/error_container.py | from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING, is_failure=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
'traceback': traceback,
'is_failure': is_failure,
'timestamp': timestamp
})
def add_failure(self, message, **kwargs):
return self.add_error(message, is_failure=True, **kwargs)
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
| from sentinels import NOTHING
class ErrorContainer(object):
def add_error(self, message, exception_type=NOTHING, traceback=NOTHING, timestamp=NOTHING):
return self.client.api.call_function('add_error', {self._get_id_key(): self.id,
'message': message,
'exception_type': exception_type,
'traceback': traceback,
'timestamp': timestamp
})
def _get_id_key(self):
if type(self).__name__ == 'Test':
return 'test_id'
return 'session_id'
| bsd-3-clause | Python |
3e8df6836f0508fb4c6cd1c4a9f2f39192a01cea | Add Cloud Code tags for API Explorer pilot (#282) | googleapis/python-translate,googleapis/python-translate | samples/snippets/translate_v3_translate_text.py | samples/snippets/translate_v3_translate_text.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START translate_v3_translate_text]
# [START translate_v3_translate_text_0]
# Imports the Google Cloud Translation library
from google.cloud import translate
# [END translate_v3_translate_text_0]
# [START translate_v3_translate_text_1]
# Initialize Translation client
def translate_text(text="YOUR_TEXT_TO_TRANSLATE", project_id="YOUR_PROJECT_ID"):
"""Translating Text."""
client = translate.TranslationServiceClient()
location = "global"
parent = f"projects/{project_id}/locations/{location}"
# [END translate_v3_translate_text_1]
# [START translate_v3_translate_text_2]
# Translate text from English to French
# Detail on supported types can be found here:
# https://cloud.google.com/translate/docs/supported-formats
response = client.translate_text(
request={
"parent": parent,
"contents": [text],
"mime_type": "text/plain", # mime types: text/plain, text/html
"source_language_code": "en-US",
"target_language_code": "fr",
}
)
# Display the translation for each input text provided
for translation in response.translations:
print("Translated text: {}".format(translation.translated_text))
# [END translate_v3_translate_text_2]
# [END translate_v3_translate_text]
| # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START translate_v3_translate_text]
from google.cloud import translate
def translate_text(text="YOUR_TEXT_TO_TRANSLATE", project_id="YOUR_PROJECT_ID"):
"""Translating Text."""
client = translate.TranslationServiceClient()
location = "global"
parent = f"projects/{project_id}/locations/{location}"
# Detail on supported types can be found here:
# https://cloud.google.com/translate/docs/supported-formats
response = client.translate_text(
request={
"parent": parent,
"contents": [text],
"mime_type": "text/plain", # mime types: text/plain, text/html
"source_language_code": "en-US",
"target_language_code": "fr",
}
)
# Display the translation for each input text provided
for translation in response.translations:
print("Translated text: {}".format(translation.translated_text))
# [END translate_v3_translate_text]
| apache-2.0 | Python |
c1d2834c304c8c1194dbc15c89f1c24b56bb24a9 | Fix styling | catapult-project/catapult-csm,sahiljain/catapult,SummerLW/Perf-Insight-Report,vmpstr/trace-viewer,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult,danbeam/catapult,catapult-project/catapult-csm,zeptonaut/catapult,modulexcite/catapult,sahiljain/catapult,zeptonaut/catapult,SummerLW/Perf-Insight-Report,danbeam/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,benschmaus/catapult,vmpstr/trace-viewer,SummerLW/Perf-Insight-Report,dstockwell/catapult,catapult-project/catapult-csm,0x90sled/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult,danbeam/catapult,danbeam/catapult,scottmcmaster/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,zeptonaut/catapult,scottmcmaster/catapult,dstockwell/catapult,dstockwell/catapult,SummerLW/Perf-Insight-Report,modulexcite/catapult,catapult-project/catapult,vmpstr/trace-viewer,modulexcite/catapult,dstockwell/catapult,benschmaus/catapult,catapult-project/catapult-csm,benschmaus/catapult,0x90sled/catapult,benschmaus/catapult,0x90sled/catapult,sahiljain/catapult,scottmcmaster/catapult,benschmaus/catapult,sahiljain/catapult | trace_viewer/build/trace2html_unittest.py | trace_viewer/build/trace2html_unittest.py | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import tempfile
import os
from trace_viewer.build import trace2html
class Trace2HTMLTests(unittest.TestCase):
def test_writeHTMLForTracesToFile(self):
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmpfile:
simple_trace_path = os.path.join(os.path.dirname(__file__),
'..', '..', 'test_data', 'simple_trace.json')
big_trace_path = os.path.join(os.path.dirname(__file__),
'..', '..', 'test_data', 'big_trace.json')
res = trace2html.WriteHTMLForTracesToFile(
[big_trace_path, simple_trace_path], tmpfile) | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import tempfile
import os
from trace_viewer.build import trace2html
class Trace2HTMLTests(unittest.TestCase):
def test_writeHTMLForTracesToFile(self):
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmpfile:
simple_trace_path = os.path.join(os.path.dirname(__file__),
'..', '..', 'test_data', 'simple_trace.json')
big_trace_path = os.path.join(os.path.dirname(__file__),
'..', '..', 'test_data', 'big_trace.json')
res = trace2html.WriteHTMLForTracesToFile([big_trace_path, simple_trace_path], tmpfile) | bsd-3-clause | Python |
e38e18b8ab5ee90a1e5e0e77070ec07260687332 | Add more tests for Malayalam | dmort27/epitran,dmort27/epitran | epitran/test/test_malayalam.py | epitran/test/test_malayalam.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import unicodedata
import epitran
class TestMalayalamGeneral(unittest.TestCase):
def setUp(self):
self.epi = epitran.Epitran(u'mal-Mlym')
def _assert_trans(self, src, tar):
trans = self.epi.transliterate(src)
trans = unicodedata.normalize('NFD', trans)
src = unicodedata.normalize('NFD', trans)
# print('{}\t{}\t{}'.format(trans, tar, zip(trans, tar)))
self.assertEqual(trans, tar)
def test_malayalam(self):
self._assert_trans('മലയാളം', 'malajaːɭam')
def test_kala(self):
self._assert_trans('കല', 'kala')
def test_eniykk(self):
self._assert_trans('എനിയ്ക്ക്', 'enijkkə')
class TestMalayalamFaDisambiguation(unittest.TestCase):
def setUp(self):
self.epi = epitran.Epitran('mal-Mlym')
def _assert_trans(self, src, tar):
trans = self.epi.transliterate(src)
trans = unicodedata.normalize('NFD', trans)
src = unicodedata.normalize('NFD', trans)
# print('{}\t{}\t{}'.format(trans, tar, zip(trans, tar)))
self.assertEqual(trans, tar)
def test_phalam(self):
self._assert_trans('ഫലം', 'pʰalam')
def test_fan(self):
self._assert_trans('ഫാൻ', 'faːn')
class TestMalayalamDentalAlveolarNasalDisambiguation(unittest.TestCase):
def setUp(self):
self.epi = epitran.Epitran('mal-Mlym')
def _assert_trans(self, src, tar):
trans = self.epi.transliterate(src)
trans = unicodedata.normalize('NFD', trans)
src = unicodedata.normalize('NFD', trans)
self.assertEqual(trans, tar)
def test_nannayi(self):
self._assert_trans('നന്നായി', 'n̪an̪n̪aːji')
def test_nanavu(self):
self._assert_trans('നനവ്', 'n̪anaʋə')
def test_sneham(self):
self._assert_trans('സ്നേഹം', 'sneːɦam')
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import unicodedata
import epitran
class TestMalayalamGeneral(unittest.TestCase):
def setUp(self):
self.epi = epitran.Epitran(u'mal-Mlym')
def _assert_trans(self, src, tar):
trans = self.epi.transliterate(src)
trans = unicodedata.normalize('NFD', trans)
src = unicodedata.normalize('NFD', trans)
# print('{}\t{}\t{}'.format(trans, tar, zip(trans, tar)))
self.assertEqual(trans, tar)
def test_malayalam(self):
self._assert_trans('മലയാളം', 'malajaːɭam')
def test_kala(self):
self._assert_trans('കല', 'kala')
def test_eniykk(self):
self._assert_trans('എനിയ്ക്ക്', 'enijkkə')
class TestMalayalamFaDisambiguation(unittest.TestCase):
def setUp(self):
self.epi = epitran.Epitran('mal-Mlym')
def _assert_trans(self, src, tar):
trans = self.epi.transliterate(src)
trans = unicodedata.normalize('NFD', trans)
src = unicodedata.normalize('NFD', trans)
# print('{}\t{}\t{}'.format(trans, tar, zip(trans, tar)))
self.assertEqual(trans, tar)
def test_phalam(self):
self._assert_trans('ഫലം', 'pʰalam')
def test_phalam(self):
self._assert_trans('ഫാൻ', 'faːn') | mit | Python |
fe676a041b793f55d33bfd27eb2b4fdfe7d93bb6 | Change import path for pricing | tysonholub/twilio-python,twilio/twilio-python | twilio/rest/resources/pricing/__init__.py | twilio/rest/resources/pricing/__init__.py | from twilio.rest.pricing.voice import (
Voice,
VoiceCountry,
VoiceCountries,
VoiceNumber,
VoiceNumbers,
)
from twilio.rest.pricing.phone_number import (
PhoneNumberCountries,
PhoneNumberCountry,
PhoneNumber,
)
| from .voice import (
Voice,
VoiceCountry,
VoiceCountries,
VoiceNumber,
VoiceNumbers,
)
from .phone_numbers import (
PhoneNumberCountries,
PhoneNumberCountry,
PhoneNumbers,
)
| mit | Python |
ef3797e8393fcaa03356e73bebb92a7f9d283cd7 | Bump version | thombashi/DataProperty | dataproperty/__version__.py | dataproperty/__version__.py | __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "0.54.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "0.53.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
a248dc23b19b3120fe94f3fcd22943d9230a9833 | Use `label_from_instance` in QuerySetSequenceSelectMixin | yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light | src/dal_queryset_sequence/widgets.py | src/dal_queryset_sequence/widgets.py | """
Widget mixin that only renders selected options with QuerySetSequence.
For details about why this is required, see :mod:`dal.widgets`.
"""
from dal.widgets import WidgetMixin
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_text
class QuerySetSequenceSelectMixin(WidgetMixin):
"""Support QuerySetSequence in WidgetMixin."""
def label_from_instance(self, obj):
"""Convert an object into string. Override it to customize display."""
return force_text(obj)
def filter_choices_to_render(self, selected_choices):
"""Overwrite self.choices to exclude unselected values."""
if len(selected_choices) == 1 and not selected_choices[0]:
selected_choices = []
ctype_models = {}
for choice in selected_choices:
ctype_pk, model_pk = choice.split('-')
ctype_pk = int(ctype_pk)
ctype_models.setdefault(ctype_pk, [])
ctype_models[ctype_pk].append(model_pk)
self.choices = []
ctype = ContentType.objects.get_for_id
for ctype_pk, ids in ctype_models.items():
results = ctype(ctype_pk).model_class().objects.filter(pk__in=ids)
self.choices += [
('%s-%s' % (ctype_pk, r.pk), self.label_from_instance(r))
for r in results
]
class QuerySetSequenceSelect(QuerySetSequenceSelectMixin,
forms.Select):
"""Select widget for QuerySetSequence choices."""
class QuerySetSequenceSelectMultiple(QuerySetSequenceSelectMixin,
forms.SelectMultiple):
"""SelectMultiple widget for QuerySetSequence choices."""
| """
Widget mixin that only renders selected options with QuerySetSequence.
For details about why this is required, see :mod:`dal.widgets`.
"""
from dal.widgets import WidgetMixin
from django import forms
from django.contrib.contenttypes.models import ContentType
from django.utils import six
class QuerySetSequenceSelectMixin(WidgetMixin):
"""Support QuerySetSequence in WidgetMixin."""
def filter_choices_to_render(self, selected_choices):
"""Overwrite self.choices to exclude unselected values."""
if len(selected_choices) == 1 and not selected_choices[0]:
selected_choices = []
ctype_models = {}
for choice in selected_choices:
ctype_pk, model_pk = choice.split('-')
ctype_pk = int(ctype_pk)
ctype_models.setdefault(ctype_pk, [])
ctype_models[ctype_pk].append(model_pk)
self.choices = []
ctype = ContentType.objects.get_for_id
for ctype_pk, ids in ctype_models.items():
results = ctype(ctype_pk).model_class().objects.filter(pk__in=ids)
self.choices += [
('%s-%s' % (ctype_pk, r.pk), six.text_type(r))
for r in results
]
class QuerySetSequenceSelect(QuerySetSequenceSelectMixin,
forms.Select):
"""Select widget for QuerySetSequence choices."""
class QuerySetSequenceSelectMultiple(QuerySetSequenceSelectMixin,
forms.SelectMultiple):
"""SelectMultiple widget for QuerySetSequence choices."""
| mit | Python |
3621ad16160797b843d56af82314cfeb339e6ce1 | Make tasks view able to filter by jobuuid. | artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history | src/dashboard/src/dashboard/views.py | src/dashboard/src/dashboard/views.py | from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseServerError, HttpResponseRedirect
from dashboard.contrib.mcp.client import MCPClient
from dashboard.dashboard.models import Task, Job
from lxml import etree
def client(request):
return render_to_response('client.html')
def approve_job(request):
result = ''
if 'uuid' in request.REQUEST:
client = MCPClient(settings.MCP_SERVER[0], settings.MCP_SERVER[1])
uuid = request.REQUEST.get('uuid', '')
result = client.approve_job(uuid)
return HttpResponse(result, mimetype = 'text/plain')
def jobs_awaiting_approval(request):
client = MCPClient(settings.MCP_SERVER[0], settings.MCP_SERVER[1])
jobs = etree.XML(client.get_jobs_awaiting_approval())
response = ''
if 0 < len(jobs):
for job in jobs:
response += etree.tostring(job)
response = '<Jobs>' + response + '</Jobs>'
return HttpResponse(response, mimetype = 'text/xml')
def index(request):
return HttpResponseRedirect(reverse(jobs))
def jobs(request, page = 1):
objects = Job.objects.all().order_by('-createdtime')
paginator = Paginator(objects, 10)
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return render_to_response('jobs.html', locals())
def jobsplus(request, page = 1):
objects = Job.objects.all().order_by('-createdtime')
return render_to_response('jobsplus.html', locals())
def tasks(request, page = 1):
if 'jobuuid' in request.REQUEST:
job = Job.objects.get(jobuuid = request.REQUEST.get('jobuuid'))
objects = job.task_set.all()
else:
objects = Task.objects.all().order_by('-createdtime')
paginator = Paginator(objects, 10)
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return render_to_response('tasks.html', locals())
| from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseServerError, HttpResponseRedirect
from dashboard.contrib.mcp.client import MCPClient
from dashboard.dashboard.models import Task, Job
from lxml import etree
def client(request):
return render_to_response('client.html')
def approve_job(request):
result = ''
if 'uuid' in request.REQUEST:
client = MCPClient(settings.MCP_SERVER[0], settings.MCP_SERVER[1])
uuid = request.REQUEST.get('uuid', '')
result = client.approve_job(uuid)
return HttpResponse(result, mimetype = 'text/plain')
def jobs_awaiting_approval(request):
client = MCPClient(settings.MCP_SERVER[0], settings.MCP_SERVER[1])
jobs = etree.XML(client.get_jobs_awaiting_approval())
response = ''
if 0 < len(jobs):
for job in jobs:
response += etree.tostring(job)
response = '<Jobs>' + response + '</Jobs>'
return HttpResponse(response, mimetype = 'text/xml')
def index(request):
return HttpResponseRedirect(reverse(jobs))
def jobs(request, page = 1):
objects = Job.objects.all().order_by('-createdtime')
paginator = Paginator(objects, 10)
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return render_to_response('jobs.html', locals())
def jobsplus(request, page = 1):
objects = Job.objects.all().order_by('-createdtime')
return render_to_response('jobsplus.html', locals())
def tasks(request, page = 1):
objects = Task.objects.all().order_by('-createdtime')
if 'jobuuid' in request.GET:
objects.filter(jobuuid=request.GET.get('jobuuid'))
sys.exit()
paginator = Paginator(objects, 10)
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return render_to_response('tasks.html', locals())
| agpl-3.0 | Python |
e5061b4d5f1a8fc0c75262931e614f0414eb9502 | Disable failing test | tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status | tests/modules/test_brightness.py | tests/modules/test_brightness.py | # pylint: disable=C0103,C0111
import mock
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
import tests.mocks as mocks
from bumblebee.config import Config
from bumblebee.input import WHEEL_UP, WHEEL_DOWN
from bumblebee.modules.brightness import Module
class TestBrightnessModule(unittest.TestCase):
def setUp(self):
mocks.setup_test(self, Module)
def tearDown(self):
mocks.teardown_test(self)
# def test_format(self):
# for widget in self.module.widgets():
# self.assertEquals(len(widget.full_text()), len("100%"))
def test_wheel_up(self):
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_UP, inp=self.input, module=self.module)
self.popen.assert_call("xbacklight +2%")
def test_wheel_down(self):
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_DOWN, inp=self.input, module=self.module)
self.popen.assert_call("xbacklight -2%")
def test_custom_step(self):
self.config.set("brightness.step", "10")
module = Module(engine=self.engine, config={"config": self.config})
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_DOWN, inp=self.input, module=module)
self.popen.assert_call("xbacklight -10%")
# @mock.patch('bumblebee.modules.brightness.open', create=True)
# def test_update(self, mock_open):
# mock_open.side_effect = [
# mock.mock_open(read_data="20").return_value,
# mock.mock_open(read_data="100").return_value
# ]
# self.module.update_all()
# self.assertEquals(self.module.brightness(self.anyWidget), "020%")
# self.assertEquals(len(self.module.brightness(self.anyWidget)), len("100%"))
@mock.patch('bumblebee.modules.brightness.open', create=True)
def test_error(self,mock_open):
mock_open.side_effect = FileNotFoundError
self.module.update_all()
self.assertEquals(self.module.brightness(self.anyWidget), "n/a")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| # pylint: disable=C0103,C0111
import mock
import unittest
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
import tests.mocks as mocks
from bumblebee.config import Config
from bumblebee.input import WHEEL_UP, WHEEL_DOWN
from bumblebee.modules.brightness import Module
class TestBrightnessModule(unittest.TestCase):
def setUp(self):
mocks.setup_test(self, Module)
def tearDown(self):
mocks.teardown_test(self)
# def test_format(self):
# for widget in self.module.widgets():
# self.assertEquals(len(widget.full_text()), len("100%"))
def test_wheel_up(self):
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_UP, inp=self.input, module=self.module)
self.popen.assert_call("xbacklight +2%")
def test_wheel_down(self):
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_DOWN, inp=self.input, module=self.module)
self.popen.assert_call("xbacklight -2%")
def test_custom_step(self):
self.config.set("brightness.step", "10")
module = Module(engine=self.engine, config={"config": self.config})
mocks.mouseEvent(stdin=self.stdin, button=WHEEL_DOWN, inp=self.input, module=module)
self.popen.assert_call("xbacklight -10%")
@mock.patch('bumblebee.modules.brightness.open', create=True)
def test_update(self, mock_open):
mock_open.side_effect = [
mock.mock_open(read_data="20").return_value,
mock.mock_open(read_data="100").return_value
]
self.module.update_all()
self.assertEquals(self.module.brightness(self.anyWidget), "020%")
self.assertEquals(len(self.module.brightness(self.anyWidget)), len("100%"))
@mock.patch('bumblebee.modules.brightness.open', create=True)
def test_error(self,mock_open):
mock_open.side_effect = FileNotFoundError
self.module.update_all()
self.assertEquals(self.module.brightness(self.anyWidget), "n/a")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| mit | Python |
71b89ea569e7a6ac84ba5e45cbef2cfcd9679c79 | Make sure send_unknown_user_type_stats is never run in quick succession | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/pillows/tasks.py | corehq/pillows/tasks.py | from datetime import timedelta
from celery.schedules import crontab
from celery.task import periodic_task
from corehq.apps.es import FormES
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from corehq.form_processor.utils.xform import resave_form
from corehq.pillows.utils import get_user_type_deep_cache_for_unknown_users
from corehq.util.datadog.gauges import datadog_gauge
from corehq.util.decorators import serial_task
from corehq.util.quickcache import quickcache
@periodic_task(run_every=timedelta(minutes=10))
@quickcache([], timeout=9 * 60) # Protect from many runs after recovering from a backlog
def send_unknown_user_type_stats():
datadog_gauge('commcare.fix_user_types.unknown_user_count',
len(_get_unknown_user_type_user_ids()))
datadog_gauge('commcare.fix_user_types.unknown_user_form_count',
FormES().user_type('unknown').count())
@periodic_task(run_every=crontab(minute=0, hour=0))
def fix_user_types():
unknown_user_ids = _get_unknown_user_type_user_ids()
for user_id in unknown_user_ids:
user_type = get_user_type_deep_cache_for_unknown_users(user_id)
if user_type != unknown_user_ids:
resave_es_forms_with_unknown_user_type.delay(user_id)
@serial_task('{user_id}', queue='background_queue')
def resave_es_forms_with_unknown_user_type(user_id):
domain_form_id_list = (
FormES().user_type('unknown').user_id(user_id)
.values_list('domain', '_id', scroll=True)
)
for domain, form_id in domain_form_id_list:
form = FormAccessors(domain).get_form(form_id)
resave_form(domain, form)
def _get_unknown_user_type_user_ids():
return FormES().user_type('unknown').user_aggregation().run().aggregations.user.keys
| from datetime import timedelta
from celery.schedules import crontab
from celery.task import periodic_task
from corehq.apps.es import FormES
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
from corehq.form_processor.utils.xform import resave_form
from corehq.pillows.utils import get_user_type_deep_cache_for_unknown_users
from corehq.util.datadog.gauges import datadog_gauge
from corehq.util.decorators import serial_task
@periodic_task(run_every=timedelta(minutes=10))
def send_unknown_user_type_stats():
datadog_gauge('commcare.fix_user_types.unknown_user_count',
len(_get_unknown_user_type_user_ids()))
datadog_gauge('commcare.fix_user_types.unknown_user_form_count',
FormES().user_type('unknown').count())
@periodic_task(run_every=crontab(minute=0, hour=0))
def fix_user_types():
unknown_user_ids = _get_unknown_user_type_user_ids()
for user_id in unknown_user_ids:
user_type = get_user_type_deep_cache_for_unknown_users(user_id)
if user_type != unknown_user_ids:
resave_es_forms_with_unknown_user_type.delay(user_id)
@serial_task('{user_id}', queue='background_queue')
def resave_es_forms_with_unknown_user_type(user_id):
domain_form_id_list = (
FormES().user_type('unknown').user_id(user_id)
.values_list('domain', '_id', scroll=True)
)
for domain, form_id in domain_form_id_list:
form = FormAccessors(domain).get_form(form_id)
resave_form(domain, form)
def _get_unknown_user_type_user_ids():
return FormES().user_type('unknown').user_aggregation().run().aggregations.user.keys
| bsd-3-clause | Python |
a6d6ff81e8fc8f1c2cd80e8f33ea34e892bb8345 | remove json import | StepicOrg/Stepic-API | examples/oauth_auth_example.py | examples/oauth_auth_example.py | # Run with Python 3
import requests
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = resp.json()['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Example:
api_url = 'https://stepic.org/api/courses/67'
course = requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).json()
print(course)
| # Run with Python 3
import json
import requests
# 1. Get your keys at https://stepic.org/oauth2/applications/ (client type = confidential,
# authorization grant type = client credentials)
client_id = "..."
client_secret = "..."
# 2. Get a token
auth = requests.auth.HTTPBasicAuth(client_id, client_secret)
resp = requests.post('https://stepic.org/oauth2/token/',
data={'grant_type': 'client_credentials'},
auth=auth
)
token = json.loads(resp.text)['access_token']
# 3. Call API (https://stepic.org/api/docs/) using this token.
# Example:
api_url = 'https://stepic.org/api/courses/67'
course = json.loads(requests.get(api_url, headers={'Authorization': 'Bearer '+ token}).text)
print(course)
| mit | Python |
1dbc0ba0ab6b33d855e7ad455d115a43ead9dfa2 | Fix test_servlet_newrequest | bis12/pushmanager,bchess/pushmanager,Yelp/pushmanager,bis12/pushmanager,imbstack/pushmanager,Yelp/pushmanager,Yelp/pushmanager,bis12/pushmanager,imbstack/pushmanager,asottile/pushmanager,asottile/pushmanager,YelpArchive/pushmanager,imbstack/pushmanager,bchess/pushmanager,Yelp/pushmanager,asottile/pushmanager,bchess/pushmanager,YelpArchive/pushmanager,YelpArchive/pushmanager,YelpArchive/pushmanager | tests/test_servlet_newrequest.py | tests/test_servlet_newrequest.py | from contextlib import nested
import mock
import urllib
from core import db
from core.util import get_servlet_urlspec
from servlets.newrequest import NewRequestServlet
import testing as T
class NewRequestServletTest(T.TestCase, T.ServletTestMixin, T.FakeDataMixin):
def get_handlers(self):
return [get_servlet_urlspec(NewRequestServlet)]
def test_newrequest(self):
results = []
def on_db_return(success, db_results):
assert success
results.extend(db_results.fetchall())
with nested(
mock.patch.dict(db.Settings, T.MockedSettings),
mock.patch.object(NewRequestServlet, "redirect"),
mock.patch.object(
NewRequestServlet,
"get_current_user",
return_value="testuser"
)
):
results = []
db.execute_cb(db.push_requests.select(), on_db_return)
num_results_before = len(results)
request = {
'request-title': 'Test Push Request Title',
'request-tags': 'super-safe,logs',
'request-review': 1,
'request-repo': 'testuser',
'request-branch': 'super_safe_fix',
'request-comments': 'No comment',
'request-description': 'I approve this fix!',
}
response = self.fetch(
"/newrequest",
method="POST",
body=urllib.urlencode(request)
)
T.assert_equal(response.error, None)
results = []
db.execute_cb(db.push_requests.select(), on_db_return)
num_results_after = len(results)
T.assert_equal(num_results_after, num_results_before + 1)
last_req = self.get_requests()[-1]
T.assert_equal(len(results), last_req['id'])
T.assert_equal('testuser', last_req['user'])
T.assert_equal(request['request-repo'], last_req['repo'])
T.assert_equal(request['request-branch'], last_req['branch'])
T.assert_equal(request['request-tags'], last_req['tags'])
T.assert_equal(request['request-comments'], last_req['comments'])
T.assert_equal(request['request-description'], last_req['description'])
| from contextlib import nested
import mock
import urllib
from core import db
from core.util import get_servlet_urlspec
from servlets.newrequest import NewRequestServlet
import testing as T
class NewRequestServletTest(T.TestCase, T.ServletTestMixin):
def get_handlers(self):
return [get_servlet_urlspec(NewRequestServlet)]
def test_newrequest(self):
results = []
def on_db_return(success, db_results):
assert success
results.extend(db_results.fetchall())
with nested(
mock.patch.dict(db.Settings, T.MockedSettings),
mock.patch.object(NewRequestServlet, "redirect"),
mock.patch.object(
NewRequestServlet,
"get_current_user",
return_value="testuser"
)
):
results = []
db.execute_cb(db.push_requests.select(), on_db_return)
num_results_before = len(results)
request = {
'title': 'Test Push Request Title',
'user': 'testuser',
'tags': 'super-safe,logs',
'reviewid': 1,
'repo': 'testuser',
'branch': 'super_safe_fix',
'comments': 'No comment',
'description': 'I approve this fix!',
}
response = self.fetch(
"/newrequest",
method="POST",
body=urllib.urlencode(request)
)
T.assert_equal(response.error, None)
results = []
db.execute_cb(db.push_requests.select(), on_db_return)
num_results_after = len(results)
T.assert_equal(num_results_after, num_results_before + 1)
| apache-2.0 | Python |
1342ff4a6d95bea5b0396a3765a406a0504fc72c | Add test for get_receive_message() (#53) | silver-castle/mach9 | tests/test_websocket_protocol.py | tests/test_websocket_protocol.py | from mach9.http import HttpProtocol
from mach9.websocket import WebSocketProtocol
from tests.utils import Transport
def test_accept_content():
http_protocol = HttpProtocol(loop=None, request_handler=None)
headers = [[b'foo', b'bar']]
websocket_protocol = WebSocketProtocol(http_protocol, headers)
content = websocket_protocol.get_accept_content()
output = b'HTTP/1.1 101 Switching Protocols\r\nfoo: bar\r\n\r\n'
assert content == output
def test_get_connect_message():
http_protocol = HttpProtocol(loop=None, request_handler=None)
headers = [[b'foo', b'bar']]
websocket_protocol = WebSocketProtocol(http_protocol, headers)
transport = Transport()
message = websocket_protocol.get_connect_message(
transport,
'1.1',
b'GET',
b'ws://127.0.0.1:1234/foo/bar?key1=1&key2=2',
[[b'k1', b'v1']])
assert websocket_protocol.order == 0
assert message['channel'] == 'websocket.connect'
assert message['reply_channel'] is None
assert message['http_version'] == '1.1'
assert message['method'] == 'GET'
assert message['scheme'] == 'ws'
assert message['query_string'] == b'key1=1&key2=2'
assert message['root_path'] == ''
assert message['path'] == '/foo/bar'
assert message['order'] == 0
assert message['headers'] == [[b'k1', b'v1']]
assert message['client'] == ('127.0.0.1', 1234)
assert message['server'] == ('127.0.0.1', 5678)
def test_get_receive_message():
http_protocol = HttpProtocol(loop=None, request_handler=None)
headers = [[b'foo', b'bar']]
websocket_protocol = WebSocketProtocol(http_protocol, headers)
transport = Transport()
websocket_protocol.get_connect_message(
transport,
'1.1',
b'GET',
b'ws://127.0.0.1:1234/foo/bar?key1=1&key2=2',
[[b'k1', b'v1']])
message1 = websocket_protocol.get_receive_message('text')
assert message1['channel'] == 'websocket.receive'
assert message1['reply_channel'] is None
assert message1['path'] == '/foo/bar'
assert message1['order'] == 1
assert message1['text'] == 'text'
assert message1['bytes'] is None
message2 = websocket_protocol.get_receive_message(b'bytes')
assert message2['order'] == 2
assert message2['text'] is None
assert message2['bytes'] == b'bytes'
| from mach9.http import HttpProtocol
from mach9.websocket import WebSocketProtocol
from tests.utils import Transport
def test_accept_content():
http_protocol = HttpProtocol(loop=None, request_handler=None)
headers = [[b'foo', b'bar']]
websocket_protocol = WebSocketProtocol(http_protocol, headers)
content = websocket_protocol.get_accept_content()
output = b'HTTP/1.1 101 Switching Protocols\r\nfoo: bar\r\n\r\n'
assert content == output
def test_get_connect_message():
http_protocol = HttpProtocol(loop=None, request_handler=None)
headers = [[b'foo', b'bar']]
websocket_protocol = WebSocketProtocol(http_protocol, headers)
transport = Transport()
message = websocket_protocol.get_connect_message(
transport,
'1.1',
b'GET',
b'ws://127.0.0.1:1234/foo/bar?key1=1&key2=2',
[[b'k1', b'v1']])
assert message['channel'] == 'websocket.connect'
assert message['reply_channel'] is None
assert message['http_version'] == '1.1'
assert message['method'] == 'GET'
assert message['scheme'] == 'ws'
assert message['query_string'] == b'key1=1&key2=2'
assert message['root_path'] == ''
assert message['order'] == 0
assert message['headers'] == [[b'k1', b'v1']]
assert message['client'] == ('127.0.0.1', 1234)
assert message['server'] == ('127.0.0.1', 5678)
| mit | Python |
232ffb4ab0c2bc41b1dc31ffc04ebc0477897088 | Fix ssh_demo example | Fizzadar/pyinfra,Fizzadar/pyinfra | examples/ssh_demo/ssh_demo1.py | examples/ssh_demo/ssh_demo1.py | from pyinfra import config, host, inventory
from pyinfra.facts.hardware import Ipv4Addresses
from pyinfra.operations import files, server
config.SUDO = True
# update the /etc/hosts file
def update_hosts_file(name, ip):
name = name.replace("@vagrant/", "")
files.line(
name="Add hosts to /etc/hosts",
path="/etc/hosts",
line=r" {}.example.com ".format(name),
replace="{} {}.example.com {}".format(ip, name, name),
)
# ensure all hosts are added to each /etc/hosts file
inv = inventory.get_group("@vagrant")
for item in inv:
update_hosts_file(item.name, item.get_fact(Ipv4Addresses)["eth0"])
if host.name == "@vagrant/two":
server.hostname(
name="Set the hostname for two",
hostname="two.example.com",
)
if host.name == "@vagrant/one":
server.hostname(
name="Set the hostname for one",
hostname="one.example.com",
)
server.shell(
name="Generate vagrant ssh key",
commands=(
"sudo -u vagrant ssh-keygen -t rsa -C vagrant@example.com "
'-b 4096 -N "" -q -f /home/vagrant/.ssh/id_rsa'
),
)
files.get(
name="Download id_rsa.pub from one",
src="/home/vagrant/.ssh/id_rsa.pub",
dest="/tmp/one_vagrant_id_rsa.pub",
)
| from pyinfra import config, host, inventory
from pyinfra.operations import files, server
config.SUDO = True
# update the /etc/hosts file
def update_hosts_file(name, ip):
name = name.replace("@vagrant/", "")
files.line(
name="Add hosts to /etc/hosts",
path="/etc/hosts",
line=r" {}.example.com ".format(name),
replace="{} {}.example.com {}".format(ip, name, name),
)
# ensure all hosts are added to each /etc/hosts file
inv = inventory.get_group("@vagrant")
for item in inv:
update_hosts_file(item.name, item.fact.ipv4_addresses["eth0"])
if host.name == "@vagrant/two":
server.hostname(
name="Set the hostname for two",
hostname="two.example.com",
)
if host.name == "@vagrant/one":
server.hostname(
name="Set the hostname for one",
hostname="one.example.com",
)
server.shell(
name="Generate vagrant ssh key",
commands=(
"sudo -u vagrant ssh-keygen -t rsa -C vagrant@example.com "
'-b 4096 -N "" -q -f /home/vagrant/.ssh/id_rsa'
),
)
files.get(
name="Download id_rsa.pub from one",
src="/home/vagrant/.ssh/id_rsa.pub",
dest="/tmp/one_vagrant_id_rsa.pub",
)
| mit | Python |
9704f43d2245f53ed5315a62db3eabb40aa3dad8 | use name method for querying | uw-it-aca/canvas-sis-provisioner,uw-it-aca/canvas-sis-provisioner,uw-it-aca/canvas-sis-provisioner,uw-it-aca/canvas-sis-provisioner | sis_provisioner/management/commands/__init__.py | sis_provisioner/management/commands/__init__.py | from django.core.management.base import BaseCommand, CommandError
from django.utils.timezone import utc
from sis_provisioner.models import Job
import datetime
import sys
class SISProvisionerCommand(BaseCommand):
def __init__(self, *args, **kwargs):
super(SISProvisionerCommand, self).__init__(*args, **kwargs)
if not self.is_active_job():
sys.exit(0)
def is_active_job(self):
name = self.name_from_argv()
try:
job = Job.objects.get(name=name)
except Job.DoesNotExist:
job = Job(name=name,
title=self.title_from_name(name),
is_active=False)
job.changed_date = datetime.datetime.utcnow().replace(tzinfo=utc)
job.save()
return True if job.is_active else False
def update_job(self):
job = Job.objects.get(name=self.name_from_argv())
job.last_run_date = datetime.datetime.utcnow().replace(tzinfo=utc)
job.save()
def name_from_argv(self):
name = sys.argv[1]
args = sys.argv[2:]
if len(args):
name += ':' + ':'.join(args).replace('--', '')
return name
def title_from_name(self, name):
parts = name.split(':')
title = ' '.join(w.capitalize() for w in parts[0].split('_'))
args = parts[1:]
if len(args):
title += ' (' + ', '.join(args) + ')'
return title
| from django.core.management.base import BaseCommand, CommandError
from django.utils.timezone import utc
from sis_provisioner.models import Job
import datetime
import sys
class SISProvisionerCommand(BaseCommand):
def __init__(self, *args, **kwargs):
super(SISProvisionerCommand, self).__init__(*args, **kwargs)
if not self.is_active_job():
sys.exit(0)
def is_active_job(self):
name = self.name_from_argv()
try:
job = Job.objects.get(name=name)
except Job.DoesNotExist:
job = Job(name=name,
title=self.title_from_name(name),
is_active=False)
job.changed_date = datetime.datetime.utcnow().replace(tzinfo=utc)
job.save()
return True if job.is_active else False
def update_job(self):
job = Job.objects.get(name=sys.argv[1])
job.last_run_date = datetime.datetime.utcnow().replace(tzinfo=utc)
job.save()
def name_from_argv(self):
name = sys.argv[1]
args = sys.argv[2:]
if len(args):
name += ':' + ':'.join(args).replace('--', '')
return name
def title_from_name(self, name):
parts = name.split(':')
title = ' '.join(w.capitalize() for w in parts[0].split('_'))
args = parts[1:]
if len(args):
title += ' (' + ', '.join(args) + ')'
return title
| apache-2.0 | Python |
ffc86b7fd5ef152d5a00bbdfcab7b69cb26be180 | Update all drivers except XENIFACE | xenserver/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,OwenSmith/win-installer,xenserver/win-installer,xenserver/win-installer,OwenSmith/win-installer,xenserver/win-installer,xenserver/win-installer | manifestspecific.py | manifestspecific.py | # Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
secureserver = r'\\10.80.13.10\distfiles\distfiles\WindowsBuilds'
localserver = r'\\filer.do.citrite.net\build\windowsbuilds\WindowsBuilds'
build_tar_source_files = {
"xenguestagent" : r'xenguestagentsecret.git\206\xenguestagent.tar',
"xenbus" : r'xenbus-patchq.git\70\xenbus.tar',
"xenvif" : r'xenvif-patchq.git\68\xenvif.tar',
"xennet" : r'xennet-patchq.git\46\xennet.tar',
"xeniface" : r'xeniface-patchq.git\43\xeniface.tar',
"xenvbd" : r'xenvbd-patchq.git\137\xenvbd.tar',
"xenvss" : r'xenvss.git\15\xenvss.tar',
}
all_drivers_signed = False
| # Copyright (c) Citrix Systems Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
secureserver = r'\\10.80.13.10\distfiles\distfiles\WindowsBuilds'
localserver = r'\\filer.do.citrite.net\build\windowsbuilds\WindowsBuilds'
build_tar_source_files = {
"xenguestagent" : r'xenguestagentsecret.git\206\xenguestagent.tar',
"xenbus" : r'xenbus-patchq.git\69\xenbus.tar',
"xenvif" : r'xenvif-patchq.git\67\xenvif.tar',
"xennet" : r'xennet-patchq.git\45\xennet.tar',
"xeniface" : r'xeniface-patchq.git\43\xeniface.tar',
"xenvbd" : r'xenvbd-patchq.git\135\xenvbd.tar',
"xenvss" : r'xenvss.git\15\xenvss.tar',
}
all_drivers_signed = False
| bsd-2-clause | Python |
f1e700461340884aa6af99abbbc9f352be66df4c | Handle more recent modifications than retraction | acshi/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,alexschiller/osf.io,felliott/osf.io,erinspace/osf.io,caneruguz/osf.io,adlius/osf.io,felliott/osf.io,alexschiller/osf.io,chrisseto/osf.io,rdhyee/osf.io,leb2dg/osf.io,chennan47/osf.io,mluo613/osf.io,cwisecarver/osf.io,sloria/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,chennan47/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,cslzchen/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,TomBaxter/osf.io,hmoco/osf.io,saradbowman/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,mluo613/osf.io,monikagrabowska/osf.io,mattclark/osf.io,sloria/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,adlius/osf.io,Nesiehr/osf.io,aaxelb/osf.io,erinspace/osf.io,rdhyee/osf.io,laurenrevere/osf.io,cslzchen/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,alexschiller/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,acshi/osf.io,felliott/osf.io,mattclark/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,binoculars/osf.io,chrisseto/osf.io,erinspace/osf.io,acshi/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,felliott/osf.io,baylee-d/osf.io,chrisseto/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,icereval/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,chennan47/osf.io,baylee-d/osf.io,hmoco/osf.io,mluo613/osf.io,caneruguz/osf.io,alexschiller/osf.io,aaxelb/osf.io,Nesiehr/osf.io,binoculars/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,icereval/osf.io,caneruguz/osf.io,adlius/osf.io,mattclark/osf.io,cwisecarver/osf.io,mfraezz/osf.io,pattisdr/osf.io,acshi/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,icereval/osf.io,Nesiehr/osf.io,rdhyee/osf.io,saradbowman/osf.io,laurenrevere/osf.io,hmoco/osf.io,cslzchen/osf.io,crcresearch/osf.io,pattisdr/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,leb2dg/osf.io,binoculars/osf.io,mluo613/osf.io | scripts/migration/migrate_retraction_dates.py | scripts/migration/migrate_retraction_dates.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to migrate retracted registrations so that their date modified is date of retraction."""
import sys
import logging
from modularodm import Q
from website.models import Node, NodeLog
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def do_migration(logs):
# ... perform the migration using a list of logs ...
for log in logs:
registration_id = log.params.get('registration')
if registration_id:
registration = Node.load(registration_id)
if registration.date_modified < log.date:
registration.date_modified = log.date
registration.save()
logger.info('{} date updated to {}'.format(registration, log.date))
else:
logger.warning('Date modified is more recent than retraction ' + log._id)
else:
logger.warning('No parent registration found for retraction log ' + log._id)
def get_targets():
# ... return the list of logs whose registrations we want to migrate ...
targets = NodeLog.find(Q('action', 'eq', 'retraction_approved'))
logger.info('Retractions found: {}'.format(len(targets)))
return targets
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
targets = get_targets()
for target in targets:
logger.info('{} {}: {}'.format(target.date, target.params.get('registration'), target.action))
do_migration(targets)
if dry:
raise RuntimeError('Dry run, rolling back transaction.')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to migrate retracted registrations so that their date modified is date of retraction."""
import sys
import logging
from modularodm import Q
from website.models import Node, NodeLog
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def do_migration(logs):
# ... perform the migration using a list of logs ...
for log in logs:
registration_id = log.params.get('registration')
if registration_id:
registration = Node.load(registration_id)
registration.date_modified = log.date
registration.save()
logger.info('{} date updated to {}'.format(registration, log.date))
else:
logger.warning('No parent registration found for retraction log ' + log._id)
def get_targets():
# ... return the list of logs whose registrations we want to migrate ...
targets = NodeLog.find(Q('action', 'eq', 'retraction_approved'))
logger.info('Retractions found: {}'.format(len(targets)))
return targets
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
targets = get_targets()
for target in targets:
logger.info('{} {}: {}'.format(target.date, target.params.get('registration'), target.action))
do_migration(targets)
if dry:
raise RuntimeError('Dry run, rolling back transaction.')
| apache-2.0 | Python |
c303813602585f2653ffcaac2df0866e5a91f1a7 | declare 'coda' as external_dependencies | Noviat/l10n-belgium,acsone/l10n-belgium,Niboo/l10n-belgium,Niboo/l10n-belgium,Noviat/l10n-belgium,QANSEE/l10n-belgium,akretion/l10n-belgium,yvaucher/l10n-belgium,QANSEE/l10n-belgium,acsone/l10n-belgium,akretion/l10n-belgium | account_statement_coda_import/__openerp__.py | account_statement_coda_import/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Laurent Mignon
# Copyright 2014 'ACSONE SA/NV'
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': "Bank statement CODA import",
'version': '1.0',
'author': 'ACSONE SA/NV',
'maintainer': 'ACSONE SA/NV',
'category': 'Finance',
'complexity': 'normal',
'depends': [
'account_statement_base_import',
'account_statement_bankaccount_completion'
],
'description': """
Bank statement CODA import
==========================
This module brings generic methods and fields on bank statement to deal with
the importation of coded statement of account from electronic files. **CODA**
This is an alternative to the official l10n_be_coda that leverages the advanced
bank statement completion framework of the bank-statement-reconcile
OCA project (https://github.com/OCA/bank-statement-reconcile)
This module allows you to import your bank transactions with a standard **CODA**
file (you'll find samples in the 'data' folder). It respects the chosen profile
(model provided by the account_statement_ext module) to generate the entries.
.. important::
The module requires the python library
`pycoda <https://pypi.python.org/pypi/pycoda>`_
""",
'website': 'http://www.acsone.eu',
'external_dependencies': {
'python': ['coda'],
},
'init_xml': [],
'update_xml': [
],
'demo_xml': [],
'test': [],
'installable': True,
'images': [],
'auto_install': False,
'license': 'AGPL-3',
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Laurent Mignon
# Copyright 2014 'ACSONE SA/NV'
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': "Bank statement CODA import",
'version': '1.0',
'author': 'ACSONE SA/NV',
'maintainer': 'ACSONE SA/NV',
'category': 'Finance',
'complexity': 'normal',
'depends': [
'account_statement_base_import',
'account_statement_bankaccount_completion'
],
'description': """
Bank statement CODA import
==========================
This module brings generic methods and fields on bank statement to deal with
the importation of coded statement of account from electronic files. **CODA**
This is an alternative to the official l10n_be_coda that leverages the advanced
bank statement completion framework of the bank-statement-reconcile
OCA project (https://github.com/OCA/bank-statement-reconcile)
This module allows you to import your bank transactions with a standard **CODA**
file (you'll find samples in the 'data' folder). It respects the chosen profile
(model provided by the account_statement_ext module) to generate the entries.
.. important::
The module requires the python library
`pycoda <https://pypi.python.org/pypi/pycoda>`_
""",
'website': 'http://www.acsone.eu',
'init_xml': [],
'update_xml': [
],
'demo_xml': [],
'test': [],
'installable': True,
'images': [],
'auto_install': False,
'license': 'AGPL-3',
}
| agpl-3.0 | Python |
3b1c6549c6f2430a61727fa8fb148632e654ddbc | fix b2g urls to force trailing slash | petabyte/bedrock,Sancus/bedrock,chirilo/bedrock,Sancus/bedrock,TheJJ100100/bedrock,andreadelrio/bedrock,mkmelin/bedrock,davehunt/bedrock,mozilla/mwc,malena/bedrock,mozilla/bedrock,glogiotatidis/bedrock,ericawright/bedrock,davidwboswell/documentation_autoresponse,elin-moco/bedrock,dudepare/bedrock,alexgibson/bedrock,kyoshino/bedrock,jacshfr/mozilla-bedrock,marcoscaceres/bedrock,mmmavis/bedrock,Jobava/bedrock,sgarrity/bedrock,pascalchevrel/bedrock,gauthierm/bedrock,schalkneethling/bedrock,mmmavis/lightbeam-bedrock-website,alexgibson/bedrock,SujaySKumar/bedrock,jpetto/bedrock,davehunt/bedrock,gauthierm/bedrock,glogiotatidis/bedrock,mahinthjoe/bedrock,malena/bedrock,MichaelKohler/bedrock,amjadm61/bedrock,gerv/bedrock,davehunt/bedrock,amjadm61/bedrock,craigcook/bedrock,elin-moco/bedrock,hoosteeno/bedrock,bensternthal/bedrock,pmclanahan/bedrock,TheoChevalier/bedrock,l-hedgehog/bedrock,schalkneethling/bedrock,analytics-pros/mozilla-bedrock,andreadelrio/bedrock,kyoshino/bedrock,schalkneethling/bedrock,ericawright/bedrock,mozilla/bedrock,elin-moco/bedrock,jgmize/bedrock,mermi/bedrock,SujaySKumar/bedrock,malena/bedrock,rishiloyola/bedrock,schalkneethling/bedrock,mermi/bedrock,bensternthal/bedrock,craigcook/bedrock,kyoshino/bedrock,bensternthal/bedrock,analytics-pros/mozilla-bedrock,mmmavis/bedrock,yglazko/bedrock,Jobava/bedrock,alexgibson/bedrock,davehunt/bedrock,jpetto/bedrock,mahinthjoe/bedrock,sgarrity/bedrock,jpetto/bedrock,rishiloyola/bedrock,andreadelrio/bedrock,l-hedgehog/bedrock,TheJJ100100/bedrock,pmclanahan/bedrock,CSCI-462-01-2017/bedrock,mahinthjoe/bedrock,l-hedgehog/bedrock,glogiotatidis/bedrock,Jobava/bedrock,jpetto/bedrock,elin-moco/bedrock,pascalchevrel/bedrock,sylvestre/bedrock,jgmize/bedrock,SujaySKumar/bedrock,mmmavis/lightbeam-bedrock-website,sylvestre/bedrock,analytics-pros/mozilla-bedrock,gerv/bedrock,pascalchevrel/bedrock,ckprice/bedrock,TheJJ100100/bedrock,craigcook/bedrock,jacshfr/mozilla-bedrock,TheoChevalier/bedrock,alexgibson/bedrock,dudepare/bedrock,gerv/bedrock,mkmelin/bedrock,Sancus/bedrock,flodolo/bedrock,davidwboswell/documentation_autoresponse,davidwboswell/documentation_autoresponse,craigcook/bedrock,bensternthal/bedrock,CSCI-462-01-2017/bedrock,TheoChevalier/bedrock,yglazko/bedrock,jacshfr/mozilla-bedrock,SujaySKumar/bedrock,ckprice/bedrock,davidwboswell/documentation_autoresponse,hoosteeno/bedrock,CSCI-462-01-2017/bedrock,MichaelKohler/bedrock,chirilo/bedrock,ericawright/bedrock,marcoscaceres/bedrock,analytics-pros/mozilla-bedrock,MichaelKohler/bedrock,jacshfr/mozilla-bedrock,MichaelKohler/bedrock,rishiloyola/bedrock,sgarrity/bedrock,ericawright/bedrock,flodolo/bedrock,pascalchevrel/bedrock,flodolo/bedrock,dudepare/bedrock,Jobava/bedrock,jgmize/bedrock,pmclanahan/bedrock,mermi/bedrock,mermi/bedrock,yglazko/bedrock,sylvestre/bedrock,mkmelin/bedrock,mahinthjoe/bedrock,mozilla/bedrock,marcoscaceres/bedrock,hoosteeno/bedrock,sylvestre/bedrock,l-hedgehog/bedrock,yglazko/bedrock,gauthierm/bedrock,TheJJ100100/bedrock,TheoChevalier/bedrock,mmmavis/bedrock,Sancus/bedrock,gerv/bedrock,amjadm61/bedrock,amjadm61/bedrock,CSCI-462-01-2017/bedrock,dudepare/bedrock,marcoscaceres/bedrock,petabyte/bedrock,malena/bedrock,mmmavis/lightbeam-bedrock-website,ckprice/bedrock,jgmize/bedrock,gauthierm/bedrock,chirilo/bedrock,petabyte/bedrock,kyoshino/bedrock,flodolo/bedrock,rishiloyola/bedrock,mozilla/mwc,pmclanahan/bedrock,mkmelin/bedrock,chirilo/bedrock,ckprice/bedrock,glogiotatidis/bedrock,mozilla/mwc,amjadm61/bedrock,hoosteeno/bedrock,andreadelrio/bedrock,sgarrity/bedrock,jacshfr/mozilla-bedrock,mmmavis/bedrock,mozilla/bedrock,petabyte/bedrock,mozilla/mwc | apps/b2g/urls.py | apps/b2g/urls.py | from django.conf.urls.defaults import *
from views import b2g, about, faq
urlpatterns = patterns('',
(r'^b2g/faq/$', faq),
(r'^b2g/about/$', about),
(r'^b2g/$', b2g),
)
| from django.conf.urls.defaults import *
from views import b2g, about, faq
urlpatterns = patterns('',
(r'^b2g/faq/', faq),
(r'^b2g/about/', about),
(r'^b2g/', b2g),
)
| mpl-2.0 | Python |
76fb477ddbbd944c1d228dd6b5a6147ca3e02362 | Remove log translations | openstack/python-mistralclient,openstack/python-mistralclient,StackStorm/python-mistralclient,StackStorm/python-mistralclient | mistralclient/i18n.py | mistralclient/i18n.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='mistralclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""oslo.i18n integration module.
See http://docs.openstack.org/developer/oslo.i18n/usage.html
"""
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain='mistralclient')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
| apache-2.0 | Python |
518959b8eb5a7b6ce380535e9b791c2c1668b592 | Fix flake8 error: unused import | pjbriggs/tools-iuc,davebx/tools-iuc,pjbriggs/tools-iuc,davebx/tools-iuc,Delphine-L/tools-iuc,pavanvidem/tools-iuc,blankenberg/tools-iuc,mvdbeek/tools-iuc,jj-umn/tools-iuc,nekrut/tools-iuc,abretaud/tools-iuc,pavanvidem/tools-iuc,loraine-gueguen/tools-iuc,davebx/tools-iuc,nekrut/tools-iuc,nsoranzo/tools-iuc,loraine-gueguen/tools-iuc,pavanvidem/tools-iuc,gregvonkuster/tools-iuc,nekrut/tools-iuc,mblue9/tools-iuc,natefoo/tools-iuc,natefoo/tools-iuc,mblue9/tools-iuc,abretaud/tools-iuc,davebx/tools-iuc,pavanvidem/tools-iuc,nsoranzo/tools-iuc,Delphine-L/tools-iuc,loraine-gueguen/tools-iuc,martenson/tools-iuc,jj-umn/tools-iuc,loraine-gueguen/tools-iuc,mvdbeek/tools-iuc,blankenberg/tools-iuc,jj-umn/tools-iuc,natefoo/tools-iuc,lparsons/tools-iuc,abretaud/tools-iuc,loraine-gueguen/tools-iuc,nekrut/tools-iuc,pjbriggs/tools-iuc,mblue9/tools-iuc,natefoo/tools-iuc,gregvonkuster/tools-iuc,ieguinoa/tools-iuc,natefoo/tools-iuc,mblue9/tools-iuc,ieguinoa/tools-iuc,Delphine-L/tools-iuc,mvdbeek/tools-iuc,galaxyproject/tools-iuc,mvdbeek/tools-iuc,ieguinoa/tools-iuc,pjbriggs/tools-iuc,natefoo/tools-iuc,davebx/tools-iuc,Delphine-L/tools-iuc,Delphine-L/tools-iuc,mblue9/tools-iuc,galaxyproject/tools-iuc,gregvonkuster/tools-iuc,nekrut/tools-iuc,nsoranzo/tools-iuc,loraine-gueguen/tools-iuc,mblue9/tools-iuc,blankenberg/tools-iuc,galaxyproject/tools-iuc,mvdbeek/tools-iuc,jj-umn/tools-iuc,pavanvidem/tools-iuc,pavanvidem/tools-iuc,abretaud/tools-iuc,lparsons/tools-iuc,Delphine-L/tools-iuc,natefoo/tools-iuc,blankenberg/tools-iuc,nekrut/tools-iuc,davebx/tools-iuc,mvdbeek/tools-iuc,pjbriggs/tools-iuc,lparsons/tools-iuc,pjbriggs/tools-iuc,lparsons/tools-iuc,abretaud/tools-iuc,ieguinoa/tools-iuc,blankenberg/tools-iuc,pavanvidem/tools-iuc,ieguinoa/tools-iuc,galaxyproject/tools-iuc,jj-umn/tools-iuc,gregvonkuster/tools-iuc,mvdbeek/tools-iuc,lparsons/tools-iuc,loraine-gueguen/tools-iuc,blankenberg/tools-iuc,Delphine-L/tools-iuc,jj-umn/tools-iuc,galaxyproject/tools-iuc,nsoranzo/tools-iuc,ieguinoa/tools-iuc,galaxyproject/tools-iuc,ieguinoa/tools-iuc,blankenberg/tools-iuc,gregvonkuster/tools-iuc,lparsons/tools-iuc,nekrut/tools-iuc,gregvonkuster/tools-iuc,nsoranzo/tools-iuc,abretaud/tools-iuc,jj-umn/tools-iuc,nsoranzo/tools-iuc,davebx/tools-iuc,gregvonkuster/tools-iuc,abretaud/tools-iuc,galaxyproject/tools-iuc,nsoranzo/tools-iuc | tools/spaln/list_spaln_tables.py | tools/spaln/list_spaln_tables.py | #!/usr/bin/env python3
import argparse
import shlex
from subprocess import run
import sys
from typing import TextIO
def find_common_ancestor_distance(
taxon: str, other_taxon: str, taxonomy_db_path: str, only_canonical: bool
):
canonical = "--only_canonical" if only_canonical else ""
cmd_str = f"taxonomy_util -d {taxonomy_db_path} common_ancestor_distance {canonical} '{other_taxon}' '{taxon}'"
cmd = shlex.split(cmd_str)
proc = run(cmd, encoding="utf8", capture_output=True)
return proc
def find_distances(gnm2tab_file: TextIO, taxon: str, taxonomy_db_path: str):
cmd = ["taxonomy_util", "-d", taxonomy_db_path, "get_id", taxon]
proc = run(cmd, capture_output=True, encoding="utf8")
if "not found in" in proc.stderr:
exit("Error: " + proc.stderr.strip())
for line in gnm2tab_file:
fields = line.split("\t")
(species_code, settings, other_taxon) = map(lambda el: el.strip(), fields[:3])
proc = find_common_ancestor_distance(taxon, other_taxon, taxonomy_db_path, True)
ancestor_info = proc.stdout.rstrip()
if proc.stderr != "":
print("Warning:", other_taxon, proc.stderr.rstrip(), file=sys.stderr)
else:
proc = find_common_ancestor_distance(
taxon, other_taxon, taxonomy_db_path, False
)
non_canonical_distance = proc.stdout.split("\t")[0]
print(
non_canonical_distance,
ancestor_info,
species_code,
settings,
other_taxon,
sep="\t",
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Find distance to common ancestor")
parser.add_argument(
"--taxonomy_db", required=True, help="NCBI Taxonomy database (SQLite format)"
)
parser.add_argument(
"--gnm2tab_file",
required=True,
type=argparse.FileType(),
help="gnm2tab file from spal",
)
parser.add_argument("taxon")
args = parser.parse_args()
find_distances(args.gnm2tab_file, args.taxon, args.taxonomy_db)
| #!/usr/bin/env python3
import argparse
from io import StringIO
import shlex
from subprocess import run
import sys
from typing import TextIO
def find_common_ancestor_distance(taxon: str, other_taxon: str, taxonomy_db_path: str, only_canonical: bool):
canonical = '--only_canonical' if only_canonical else ''
cmd_str = f"taxonomy_util -d {taxonomy_db_path} common_ancestor_distance {canonical} '{other_taxon}' '{taxon}'"
cmd = shlex.split(cmd_str)
proc = run(cmd, encoding='utf8', capture_output=True)
return proc
def find_distances(gnm2tab_file: TextIO, taxon: str, taxonomy_db_path: str):
cmd = ['taxonomy_util', '-d', taxonomy_db_path, 'get_id', taxon]
proc = run(cmd, capture_output=True, encoding='utf8')
if 'not found in' in proc.stderr:
exit("Error: " + proc.stderr.strip())
for line in gnm2tab_file:
fields = line.split('\t')
(species_code, settings, other_taxon) = map(lambda el: el.strip(), fields[:3])
proc = find_common_ancestor_distance(taxon, other_taxon, taxonomy_db_path, True)
ancestor_info = proc.stdout.rstrip()
if proc.stderr != "":
print("Warning:", other_taxon, proc.stderr.rstrip(), file=sys.stderr)
else:
proc = find_common_ancestor_distance(taxon, other_taxon, taxonomy_db_path, False)
non_canonical_distance = proc.stdout.split('\t')[0]
print(non_canonical_distance, ancestor_info, species_code, settings, other_taxon , sep='\t')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Find distance to common ancestor")
parser.add_argument('--taxonomy_db', required=True, help='NCBI Taxonomy database (SQLite format)')
parser.add_argument('--gnm2tab_file', required=True, type=argparse.FileType(), help='gnm2tab file from spal')
parser.add_argument('taxon')
args = parser.parse_args()
find_distances(args.gnm2tab_file, args.taxon, args.taxonomy_db)
| mit | Python |
b03ad153dce2fd026b632907aff8fc82fd3df407 | Update __init__.py | ddsc/ddsc-worker | ddsc_worker/__init__.py | ddsc_worker/__init__.py | # (c) Fugro Geoservices. MIT licensed, see LICENSE.rst.
# package
| # package
| mit | Python |
8e7c29f7425c5844c9fb1c3ea7f6a9984c18ed9c | fix typo | BjoernT/rpc-openstack,major/rpc-openstack,byronmccollum/rpc-openstack,claco/rpc-openstack,galstrom21/rpc-openstack,byronmccollum/rpc-openstack,darrenchan/rpc-openstack,sigmavirus24/rpc-openstack,cloudnull/rpc-openstack,nrb/rpc-openstack,rcbops/rpc-openstack,rcbops/rpc-openstack,npawelek/rpc-maas,busterswt/rpc-openstack,briancurtin/rpc-maas,stevelle/rpc-openstack,claco/rpc-openstack,xeregin/rpc-openstack,briancurtin/rpc-maas,xeregin/rpc-openstack,stevelle/rpc-openstack,galstrom21/rpc-openstack,mattt416/rpc-openstack,cloudnull/rpc-maas,byronmccollum/rpc-openstack,cfarquhar/rpc-maas,miguelgrinberg/rpc-openstack,mattt416/rpc-openstack,npawelek/rpc-maas,darrenchan/rpc-openstack,prometheanfire/rpc-openstack,cloudnull/rpc-maas,briancurtin/rpc-maas,cfarquhar/rpc-maas,major/rpc-openstack,jpmontez/rpc-openstack,xeregin/rpc-openstack,andymcc/rpc-openstack,mancdaz/rpc-openstack,jpmontez/rpc-openstack,miguelgrinberg/rpc-openstack,cfarquhar/rpc-openstack,robb-romans/rpc-openstack,sigmavirus24/rpc-openstack,sigmavirus24/rpc-openstack,BjoernT/rpc-openstack,jacobwagner/rpc-openstack,darrenchan/rpc-openstack,andymcc/rpc-openstack,cfarquhar/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,robb-romans/rpc-openstack,cloudnull/rpc-openstack,mattt416/rpc-openstack,shannonmitchell/rpc-openstack,git-harry/rpc-openstack,claco/rpc-openstack,jacobwagner/rpc-openstack,busterswt/rpc-openstack,prometheanfire/rpc-openstack,mancdaz/rpc-openstack,shannonmitchell/rpc-openstack,andymcc/rpc-openstack,cfarquhar/rpc-maas,npawelek/rpc-maas,darrenchan/rpc-openstack,busterswt/rpc-openstack,nrb/rpc-openstack,hughsaunders/rpc-openstack,nrb/rpc-openstack,git-harry/rpc-openstack,xeregin/rpc-openstack,jpmontez/rpc-openstack,cloudnull/rpc-maas,stevelle/rpc-openstack,hughsaunders/rpc-openstack | memcached_status.py | memcached_status.py | #!/usr/bin/env python
import re
import argparse
import memcache
from ipaddr import IPv4Address
from maas_common import status_ok, status_err, metric, metric_bool
VERSION_RE = re.compile('STAT version (\d+\.\d+\.\d+)(?![-+0-9\\.])')
VERSION = '1.4.14 (Ubuntu)'
MEMCACHE_METRICS = ['total_items',
'get_hits',
'get_misses',
'total_connections']
def item_stats(host, port):
"""Check the stats for items and connection status."""
mc = memcache.Client(['%s:%s' % (host, port)])
stats = mc.get_stats()[0][1]
if not stats:
status_err('could not retrieve status from memcached')
return stats
def main(args):
bind_ip = str(args.ip)
port = args.port
stats = item_stats(bind_ip, port)
current_version = stats['version']
if current_version != VERSION:
status_err('This plugin has only been tested with version %s '
'of memcached, and you are using version %s'
% (VERSION, current_version))
if stats is not None:
status_ok
metric_bool('memcache_api_local_status', True)
for m in MEMCACHE_METRICS:
metric('memcache_%s' % m, 'uint64', stats[m])
else:
status_err('memcached is unreachable')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Check glance API')
parser.add_argument('ip', type=IPv4Address, help='memcached IP address.')
parser.add_argument('--port', type=int,
default=11211, help='memcached port.')
args = parser.parse_args()
main(args)
| #!/usr/bin/env python
import re
import argparse
import memcache
from ipaddr import IPv4Address
from maas_common import status_ok, status_err, metric, metric_bool
VERSION_RE = re.compile('STAT version (\d+\.\d+\.\d+)(?![-+0-9\\.])')
VERSION = '1.4.14 (Ubuntu)'
MEMCACHE_METRICS = ['total_items',
'get_hits',
'get_misses',
'total_connections']
def item_stats(host, port):
"""Check the stats for items and connection status."""
mc = memcache.Client(['%s:%s' % (host, port)])
stats = mc.get_stats()[0][1]
if not stats:
status_err('could not retrieve status from memcached')
return stats
def main(args):
bind_ip = str(args.ip)
port = args.port
stats = item_stats(bind_ip, port)
if stats['version'] != VERSION:
status_err('This plugin has only been tested with version %s '
'of memcached, and you are using version %s'
% (VERSION, current_version))
if stats is not None:
status_ok
metric_bool('memcache_api_local_status', True)
for m in MEMCACHE_METRICS:
metric('memcache_%s' % m, 'uint64', stats[m])
else:
status_err('memcached is unreachable')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Check glance API')
parser.add_argument('ip', type=IPv4Address, help='memcached IP address.')
parser.add_argument('--port', type=int,
default=11211, help='memcached port.')
args = parser.parse_args()
main(args)
| apache-2.0 | Python |
5c23b60dc32a69e96a491fd38960109a5be787ad | speed up test | jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk | vehicles/management/tests/test_bod_avl.py | vehicles/management/tests/test_bod_avl.py | import os
from datetime import timedelta
from mock import patch
from freezegun import freeze_time
from vcr import use_cassette
from django.conf import settings
from django.test import TestCase
from busstops.models import Region, DataSource, Operator
from ...models import VehicleLocation, VehicleJourney, Vehicle
from ..commands import import_bod_avl
class BusOpenDataVehicleLocationsTest(TestCase):
@classmethod
def setUpTestData(cls):
region = Region.objects.create(id='EA')
Operator.objects.bulk_create([
Operator(id='ARHE', region=region),
Operator(id='ASES', region=region),
Operator(id='CBBH', region=region),
Operator(id='GPLM', region=region),
Operator(id='KCTB', region=region),
Operator(id='WHIP', region=region),
Operator(id='UNOE', region=region),
])
cls.source = DataSource.objects.create(
name='Bus Open Data',
url='https://data.bus-data.dft.gov.uk/avl/download/bulk_archive'
)
@freeze_time('2020-05-01')
def test_get_items(self):
command = import_bod_avl.Command()
command.source = self.source
with use_cassette(os.path.join(settings.DATA_DIR, 'vcr', 'bod_avl.yaml')):
with patch('builtins.print') as mocked_print:
items = list(command.get_items())
mocked_print.assert_called_with(timedelta(0))
self.assertEqual(841, len(items))
def test_handle(self):
command = import_bod_avl.Command()
command.source = self.source
command.handle_item({
'RecordedAtTime': '2020-06-17T08:34:00+00:00',
'ItemIdentifier': '13505681-c482-451d-a089-ee805e196e7e',
'ValidUntilTime': '2020-07-24T14:19:46.982911',
'MonitoredVehicleJourney': {
'LineRef': 'U',
'DirectionRef': 'INBOUND',
'PublishedLineName': 'U',
'OperatorRef': 'WHIP',
'OriginRef': '0500CCITY536',
'OriginName': 'Dame Mary Archer Wa',
'DestinationRef': '0500CCITY544',
'DestinationName': 'Eddington Sainsbury',
'OriginAimedDepartureTime': '2020-06-17T08:23:00+00:00',
'VehicleLocation': {
'Longitude': '0.141533',
'Latitude': '52.1727219',
'VehicleJourneyRef': 'UNKNOWN',
},
'VehicleRef': 'WHIP-106'
}
}, None)
self.assertEqual(1, VehicleJourney.objects.count())
self.assertEqual(1, VehicleLocation.objects.count())
self.assertEqual(1, Vehicle.objects.count())
| import os
from vcr import use_cassette
from django.conf import settings
from django.test import TestCase
from busstops.models import Region, DataSource, Operator
from ...models import VehicleLocation
from ..commands import import_bod_avl
class BusOpenDataVehicleLocationsTest(TestCase):
@classmethod
def setUpTestData(cls):
region = Region.objects.create(id='EA')
Operator.objects.bulk_create([
Operator(id='ARHE', region=region),
Operator(id='ASES', region=region),
Operator(id='CBBH', region=region),
Operator(id='GPLM', region=region),
Operator(id='KCTB', region=region),
Operator(id='WHIP', region=region),
Operator(id='UNOE', region=region),
])
cls.source = DataSource.objects.create(
name='Bus Open Data',
url='https://data.bus-data.dft.gov.uk/avl/download/bulk_archive'
)
def test_handle(self):
command = import_bod_avl.Command()
command.source = self.source
with use_cassette(os.path.join(settings.DATA_DIR, 'vcr', 'bod_avl.yaml')):
command.update()
self.assertEqual(835, VehicleLocation.objects.count())
| mpl-2.0 | Python |
cefee133a960124542de906cfb408dd9e6cf0aca | Make indentation consistently spaces in __init.py__ | mtearle/npyscreenreactor | npyscreenreactor/__init__.py | npyscreenreactor/__init__.py | #!/usr/bin/env python
# npyscreenreactory.py
# Inspired by pausingreactor.py and xmmsreactor.py
# npyscreen modifications
# Copyright (c) 2015 Mark Tearle <mark@tearle.com>
# See LICENSE for details.
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
"""
This module provides npyscreen event loop support for Twisted.
In order to use this support, simply do the following::
| import npyscreenreactor
| npyscreenreactor.install()
Then, when your root npyscreenApp has been created::
| from twisted.internet import reactor
| reactor.registerNpyscreenApp(yourApp)
| reactor.run()
Then use twisted.internet APIs as usual.
Stop the event loop using reactor.stop()
Maintainer: Mark Tearle
"""
from twisted.python import log, runtime
from twisted.internet import selectreactor
import npyscreen
class NpyscreenReactor(selectreactor.SelectReactor):
"""
npyscreen reactor.
npyscreen drives the event loop
"""
def doIteration(self, timeout):
# Executing what normal reactor would do...
self.runUntilCurrent()
selectreactor.SelectReactor.doIteration(self, timeout)
# push event back on the npyscreen queue
self.npyscreenapp.queue_event(npyscreen.Event("_NPYSCREEN_REACTOR"))
def registerNpyscreenApp(self, npyscreenapp):
"""
Register npyscreen.StandardApp instance with the reactor.
"""
self.npyscreenapp = npyscreenapp
# push an event on the npyscreen queue
self.npyscreenapp.add_event_hander("_NPYSCREEN_REACTOR", self._twisted_events)
def _twisted_events(self, event):
self.doIteration(0)
def _stopNpyscreen(self):
"""
Stop the Npsycreen event loop if it hasn't already been stopped.
Called during Twisted event loop shutdown.
"""
if hasattr(self, "npyscreenapp"):
self.npyscreenapp.setNextForm(None)
def run(self,installSignalHandlers=True):
"""
Start the reactor.
"""
# Executing what normal reactor would do...
self.startRunning(installSignalHandlers=installSignalHandlers)
# do initial iteration and put event on queue to do twisted things
self.doIteration(0)
# add cleanup events:
self.addSystemEventTrigger("after", "shutdown", self._stopNpyscreen)
#
self.npyscreenapp.run()
def install():
"""
Configure the twisted mainloop to be run inside the npyscreen mainloop.
"""
reactor = NpyscreenReactor()
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
__all__ = ['install']
| #!/usr/bin/env python
# npyscreenreactory.py
# Inspired by pausingreactor.py and xmmsreactor.py
# npyscreen modifications
# Copyright (c) 2015 Mark Tearle <mark@tearle.com>
# See LICENSE for details.
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
"""
This module provides npyscreen event loop support for Twisted.
In order to use this support, simply do the following::
| import npyscreenreactor
| npyscreenreactor.install()
Then, when your root npyscreenApp has been created::
| from twisted.internet import reactor
| reactor.registerNpyscreenApp(yourApp)
| reactor.run()
Then use twisted.internet APIs as usual.
Stop the event loop using reactor.stop()
Maintainer: Mark Tearle
"""
from twisted.python import log, runtime
from twisted.internet import selectreactor
import npyscreen
class NpyscreenReactor(selectreactor.SelectReactor):
"""
npyscreen reactor.
npyscreen drives the event loop
"""
def doIteration(self, timeout):
# Executing what normal reactor would do...
self.runUntilCurrent()
selectreactor.SelectReactor.doIteration(self, timeout)
# push event back on the npyscreen queue
self.npyscreenapp.queue_event(npyscreen.Event("_NPYSCREEN_REACTOR"))
def registerNpyscreenApp(self, npyscreenapp):
"""
Register npyscreen.StandardApp instance with the reactor.
"""
self.npyscreenapp = npyscreenapp
# push an event on the npyscreen queue
self.npyscreenapp.add_event_hander("_NPYSCREEN_REACTOR", self._twisted_events)
def _twisted_events(self, event):
self.doIteration(0)
def _stopNpyscreen(self):
"""
Stop the Npsycreen event loop if it hasn't already been stopped.
Called during Twisted event loop shutdown.
"""
if hasattr(self, "npyscreenapp"):
self.npyscreenapp.setNextForm(None)
def run(self,installSignalHandlers=True):
"""
Start the reactor.
"""
# Executing what normal reactor would do...
self.startRunning(installSignalHandlers=installSignalHandlers)
# do initial iteration and put event on queue to do twisted things
self.doIteration(0)
# add cleanup events:
self.addSystemEventTrigger("after", "shutdown", self._stopNpyscreen)
#
self.npyscreenapp.run()
def install():
"""
Configure the twisted mainloop to be run inside the npyscreen mainloop.
"""
reactor = NpyscreenReactor()
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
__all__ = ['install']
| mit | Python |
2a9b440d4125b87b19b26b2d5118f8c7e5b54c1f | Add WriteHTMLForTraceDataToFile method that allow create trace viewer in html directly from trace data | catapult-project/catapult-csm,benschmaus/catapult,modulexcite/catapult,sahiljain/catapult,danbeam/catapult,dstockwell/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,dstockwell/catapult,catapult-project/catapult,zeptonaut/catapult,danbeam/catapult,scottmcmaster/catapult,benschmaus/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,vmpstr/trace-viewer,0x90sled/catapult,vmpstr/trace-viewer,scottmcmaster/catapult,modulexcite/catapult,dstockwell/catapult,dstockwell/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,danbeam/catapult,0x90sled/catapult,benschmaus/catapult,modulexcite/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult,zeptonaut/catapult,danbeam/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult,scottmcmaster/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,zeptonaut/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,0x90sled/catapult,sahiljain/catapult,sahiljain/catapult,vmpstr/trace-viewer,benschmaus/catapult | trace_viewer/build/trace2html.py | trace_viewer/build/trace2html.py | # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import gzip
import json
import optparse
import shutil
import os
import StringIO
import sys
import tempfile
from trace_viewer import trace_viewer_project
from tvcm import generate
def Main(args):
parser = optparse.OptionParser(
usage="%prog <options> trace_file1 [trace_file2 ...]",
epilog="""Takes the provided trace file and produces a standalone html
file that contains both the trace and the trace viewer.""")
parser.add_option(
"--output", dest="output",
help='Where to put the generated result. If not ' +
'given, the trace filename is used, with an html suffix.')
parser.add_option(
"--quiet", action='store_true',
help='Dont print the output file name')
options, args = parser.parse_args(args)
if len(args) == 0:
parser.error('At least one trace file required')
if options.output:
output_filename = options.output
elif len(args) > 1:
parser.error('Must specify --output if >1 trace file')
else:
namepart = os.path.splitext(args[0])[0]
output_filename = namepart + '.html'
with open(output_filename, 'w') as f:
WriteHTMLForTracesToFile(args, f)
if not options.quiet:
print output_filename
return 0
class ViewerDataScript(generate.ExtraScript):
def __init__(self, trace_data_string):
super(ViewerDataScript, self).__init__()
self._trace_data_string = trace_data_string
def WriteToFile(self, output_file):
output_file.write('<script id="viewer-data" type="application/json">\n')
compressed_trace = StringIO.StringIO()
with gzip.GzipFile(fileobj=compressed_trace, mode='w') as f:
f.write(self._trace_data_string)
b64_content = base64.b64encode(compressed_trace.getvalue())
output_file.write(b64_content)
output_file.write('\n</script>\n')
def WriteHTMLForTraceDataToFile(trace_data, title, output_file):
project = trace_viewer_project.TraceViewerProject()
load_sequence = project.CalcLoadSequenceForModuleNames(['build.trace2html'])
trace_data_string = json.dumps(trace_data)
generate.GenerateStandaloneHTMLToFile(
output_file, load_sequence, title,
extra_scripts=[ViewerDataScript(trace_data_string)])
def WriteHTMLForTracesToFile(trace_filenames, output_file):
project = trace_viewer_project.TraceViewerProject()
load_sequence = project.CalcLoadSequenceForModuleNames(['build.trace2html'])
scripts = []
for filename in trace_filenames:
with open(filename, 'r') as f:
scripts.append(ViewerDataScript(f.read()))
title = "Trace from %s" % ','.join(trace_filenames)
generate.GenerateStandaloneHTMLToFile(
output_file, load_sequence, title, extra_scripts=scripts)
| # Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import gzip
import optparse
import shutil
import os
import StringIO
import sys
import tempfile
from trace_viewer import trace_viewer_project
from tvcm import generate
def Main(args):
parser = optparse.OptionParser(
usage="%prog <options> trace_file1 [trace_file2 ...]",
epilog="""Takes the provided trace file and produces a standalone html
file that contains both the trace and the trace viewer.""")
parser.add_option(
"--output", dest="output",
help='Where to put the generated result. If not ' +
'given, the trace filename is used, with an html suffix.')
parser.add_option(
"--quiet", action='store_true',
help='Dont print the output file name')
options, args = parser.parse_args(args)
if len(args) == 0:
parser.error('At least one trace file required')
if options.output:
output_filename = options.output
elif len(args) > 1:
parser.error('Must specify --output if >1 trace file')
else:
namepart = os.path.splitext(args[0])[0]
output_filename = namepart + '.html'
with open(output_filename, 'w') as f:
WriteHTMLForTracesToFile(args, f)
if not options.quiet:
print output_filename
return 0
class ViewerDataScript(generate.ExtraScript):
def __init__(self, filename):
super(ViewerDataScript, self).__init__()
self._filename = filename
def WriteToFile(self, output_file):
output_file.write('<script id="viewer-data" type="application/json">\n')
compressed_trace = StringIO.StringIO()
with open(self._filename, 'r') as trace_file:
with gzip.GzipFile(fileobj=compressed_trace, mode='w') as f:
f.write(trace_file.read())
b64_content = base64.b64encode(compressed_trace.getvalue())
output_file.write(b64_content)
output_file.write('\n</script>\n')
def WriteHTMLForTracesToFile(trace_filenames, output_file):
project = trace_viewer_project.TraceViewerProject()
load_sequence = project.CalcLoadSequenceForModuleNames(
['build.trace2html'])
scripts = [ViewerDataScript(x) for x in trace_filenames]
title = "Trace from %s" % ','.join(trace_filenames)
generate.GenerateStandaloneHTMLToFile(
output_file, load_sequence, title, extra_scripts=scripts)
| bsd-3-clause | Python |
69cb9f4a1eae09f76875a9483e13a7f221339d85 | rename metric | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/couch_sql_migration/tasks.py | corehq/apps/couch_sql_migration/tasks.py | from __future__ import absolute_import
from __future__ import unicode_literals
from celery.schedules import crontab
from celery.task import periodic_task
from corehq.apps.es import DomainES, filters, aggregations
from corehq.util.datadog.gauges import datadog_gauge
@periodic_task('background_queue', run_every=crontab(minute=0, hour=10),
acks_late=True, ignore_result=True)
def couch_sql_migration_stats():
result = (
DomainES()
.filter(filters.term('use_sql_backend', False))
.remove_default_filters()
.aggregations([
aggregations.SumAggregation('cases', 'cp_n_cases'),
aggregations.SumAggregation('forms', 'cp_n_forms'),
])
.size(0).run()
)
datadog_gauge('commcare.couch_sql_migration.domains_remaining', int(result.total))
datadog_gauge('commcare.couch_sql_migration.forms_remaining', int(result.aggregations.forms.value))
datadog_gauge('commcare.couch_sql_migration.cases_remaining', int(result.aggregations.cases.value))
| from __future__ import absolute_import
from __future__ import unicode_literals
from celery.schedules import crontab
from celery.task import periodic_task
from corehq.apps.es import DomainES, filters, aggregations
from corehq.util.datadog.gauges import datadog_gauge
@periodic_task('background_queue', run_every=crontab(minute=0, hour=10),
acks_late=True, ignore_result=True)
def couch_sql_migration_stats():
result = (
DomainES()
.filter(filters.term('use_sql_backend', False))
.remove_default_filters()
.aggregations([
aggregations.SumAggregation('cases', 'cp_n_cases'),
aggregations.SumAggregation('forms', 'cp_n_forms'),
])
.size(0).run()
)
datadog_gauge('commcare.couch_sql_migration.total_remaining', int(result.total))
datadog_gauge('commcare.couch_sql_migration.forms_remaining', int(result.aggregations.forms.value))
datadog_gauge('commcare.couch_sql_migration.cases_remaining', int(result.aggregations.cases.value))
| bsd-3-clause | Python |
d98f97e00e12eb7ba9a2cd1c2b9ae11ed47b91af | Add domain or and ilike | juancr83/DockerOpenacademy-proyect | openacademy/models/models.py | openacademy/models/models.py | # -*- coding: utf-8 -*-
from openerp import models, fields, api
class Course(models.Model):
_name = 'course'
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Descripcion')
responsible = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
sessions = fields.One2many('session','course')
class Session(models.Model):
_name = 'session'
name = fields.Char()
instructor = fields.Many2one('res.partner',string="Instructor",
domain=['|',
("instructor","=",True),
("category_id.name","ilike","Teacher")
]
)
course = fields.Many2one('course')
start_date = fields.Date()
duration = fields.Float(help="Duration in Days")
seats = fields.Integer()
attendees = fields.Many2many('res.partner', string="Attendees")
percentage_seats_taken = fields.Float(compute="_compute_perc_seats_taken", store=True)
@api.depends('attendees','seats')
def _compute_perc_seats_taken(self):
for record in self:
if record.seats:
record.percentage_seats_taken = float(len(record.attendees)) / record.seats * 100.00
else:
record.percentage_seats_taken = 0.00
# class openacademy(models.Model):
# _name = 'openacademy.openacademy'
# name = fields.Char()
# value = fields.Integer()
# value2 = fields.Float(compute="_value_pc", store=True)
# description = fields.Text()
#
# @api.depends('value')
# def _value_pc(self):
# self.value2 = float(self.value) / 100
| # -*- coding: utf-8 -*-
from openerp import models, fields, api
class Course(models.Model):
_name = 'course'
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Descripcion')
responsible = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
sessions = fields.One2many('session','course')
class Session(models.Model):
_name = 'session'
name = fields.Char()
instructor = fields.Many2one('res.partner')
course = fields.Many2one('course')
start_date = fields.Date()
duration = fields.Float(help="Duration in Days")
seats = fields.Integer()
attendees = fields.Many2many('res.partner', string="Attendees")
percentage_seats_taken = fields.Float(compute="_compute_perc_seats_taken", store=True)
@api.depends('attendees','seats')
def _compute_perc_seats_taken(self):
for record in self:
if record.seats:
record.percentage_seats_taken = float(len(record.attendees)) / record.seats * 100.00
else:
record.percentage_seats_taken = 0.00
# class openacademy(models.Model):
# _name = 'openacademy.openacademy'
# name = fields.Char()
# value = fields.Integer()
# value2 = fields.Float(compute="_value_pc", store=True)
# description = fields.Text()
#
# @api.depends('value')
# def _value_pc(self):
# self.value2 = float(self.value) / 100
| apache-2.0 | Python |
3e353d54ffebbcae3afbd02c89cbc33ec4a6eec2 | update tests to match change in decorators | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/sso/tests/test_decorators.py | corehq/apps/sso/tests/test_decorators.py | from unittest import mock
from django.test import TestCase, RequestFactory
from corehq.apps.sso.decorators import (
identity_provider_required,
use_saml2_auth,
)
from corehq.apps.sso.tests import generator
class TestDecorators(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.account = generator.get_billing_account_for_idp()
def setUp(self):
super().setUp()
self.idp = generator.create_idp(self.account, include_certs=True)
self.request = RequestFactory().get('/sso/test')
self.request_args = (self.idp.slug, )
self.request.is_secure = lambda: False
self.request.META = {
'HTTP_HOST': 'test.commcarehq.org',
'PATH_INFO': '/sso/test',
'SERVER_PORT': '80',
}
self.view = mock.MagicMock(return_value='fake response')
def test_identity_provider_required_decorator(self):
decorated_view = identity_provider_required(self.view)
decorated_view(self.request, *self.request_args)
self.view.assert_called_once_with(self.request, *self.request_args)
self.assertEqual(self.request.idp, self.idp)
def test_use_saml2_auth_decorator(self):
decorated_view = use_saml2_auth(self.view)
decorated_view(self.request, *self.request_args)
self.view.assert_called_once_with(self.request, *self.request_args)
self.assertEqual(self.request.idp, self.idp)
self.assertIsNotNone(self.request.saml2_request_data)
self.assertIsNotNone(self.request.saml2_auth)
def tearDown(self):
self.idp.delete()
super().tearDown()
@classmethod
def tearDownClass(cls):
cls.account.delete()
super().tearDownClass()
| from unittest import mock
from django.test import TestCase, RequestFactory
from corehq.apps.sso.decorators import (
identity_provider_required,
use_saml2_auth,
)
from corehq.apps.sso.tests import generator
class TestDecorators(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.account = generator.get_billing_account_for_idp()
def setUp(self):
super().setUp()
self.idp = generator.create_idp(self.account, include_certs=True)
self.request = RequestFactory().get('/sso/test')
self.request_args = (self.idp.slug, )
self.view = mock.MagicMock(return_value='fake response')
def test_identity_provider_required_decorator(self):
decorated_view = identity_provider_required(self.view)
decorated_view(self.request, *self.request_args)
self.view.assert_called_once_with(self.request, *self.request_args)
self.assertEqual(self.request.idp, self.idp)
def test_use_saml2_auth_decorator(self):
decorated_view = use_saml2_auth(self.view)
decorated_view(self.request, *self.request_args)
self.view.assert_called_once_with(self.request, *self.request_args)
self.assertEqual(self.request.idp, self.idp)
self.assertIsNotNone(self.request.saml2_auth)
def tearDown(self):
self.idp.delete()
super().tearDown()
@classmethod
def tearDownClass(cls):
cls.account.delete()
super().tearDownClass()
| bsd-3-clause | Python |
0e779581be648ca80eea6b97f9963606d85659b9 | Add exporter to VisualSfM format | BrookRoberts/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,BrookRoberts/OpenSfM,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM,BrookRoberts/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,BrookRoberts/OpenSfM,BrookRoberts/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM | opensfm/commands/__init__.py | opensfm/commands/__init__.py |
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
import export_visualsfm
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
export_visualsfm,
]
|
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
]
| bsd-2-clause | Python |
52f21a4f3a61497099fc6483bde00d60307297c7 | Fix formatting | mattdavis90/base10,mattdavis90/base10 | base10/transports/udp_transport.py | base10/transports/udp_transport.py | import sys
from socket import socket, AF_INET, SOCK_DGRAM
from base10.base import Writer
from base10.exceptions import TransportError
PY3 = sys.version_info.major == 3
class UDPWriter(Writer):
def __init__(self, host, port):
self._host = host
self._port = port
self._socket = socket(AF_INET, SOCK_DGRAM)
def write(self, string):
if PY3:
self._socket.sendto(
string.encode('utf8') + b'\n', (self._host, self._port)
)
else:
self._socket.sendto(string + '\n', (self._host, self._port))
| import sys
from socket import socket, AF_INET, SOCK_DGRAM
from base10.base import Writer
from base10.exceptions import TransportError
PY3 = sys.version_info.major == 3
class UDPWriter(Writer):
def __init__(self, host, port):
self._host = host
self._port = port
self._socket = socket(AF_INET, SOCK_DGRAM)
def write(self, string):
if PY3:
self._socket.sendto(string.encode('utf8') + b'\n', (self._host, self._port))
else:
self._socket.sendto(string + '\n', (self._host, self._port))
| mit | Python |
416575ca3cc684925be0391b43b98a9fa1d9f909 | Save the image of the selection (to be able to reinitialise later) | baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite | ObjectTracking/testTrack.py | ObjectTracking/testTrack.py |
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display, Color
# Open reference video
cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video')
# Select reference image
img=cam.getFrame(50)
modelImage = img.crop(255, 180, 70, 20)
modelImage = Image('kite_detail.jpg')
ts = []
disp=Display()
for i in range(0,50):
img = cam.getImage()
while (disp.isNotDone()):
img = cam.getImage()
bb = (255, 180, 70, 20)
ts = img.track("camshift",ts,modelImage,bb, num_frames = 1)
modelImage = Image('kite_detail.jpg')
# now here in first loop iteration since ts is empty,
# img0 and bb will be considered.
# New tracking object will be created and added in ts (TrackSet)
# After first iteration, ts is not empty and hence the previous
# image frames and bounding box will be taken from ts and img0
# and bb will be ignored.
ts.draw()
ts.drawBB()
ts.showCoordinates()
img.show()
|
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display
# Open reference video
cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video')
# Select reference image
img=cam.getFrame(50)
modelImage = img.crop(255, 180, 70, 20)
modelImage = Image('kite_detail.jpg')
ts = []
disp=Display()
for i in range(0,50):
img = cam.getImage()
while (disp.isNotDone()):
img = cam.getImage()
bb = (255, 180, 70, 20)
ts = img.track("camshift",ts,modelImage,bb, num_frames = 1)
# now here in first loop iteration since ts is empty,
# img0 and bb will be considered.
# New tracking object will be created and added in ts (TrackSet)
# After first iteration, ts is not empty and hence the previous
# image frames and bounding box will be taken from ts and img0
# and bb will be ignored.
ts.drawPath()
img.show()
| mit | Python |
1ad9130ed788fcc0d7aadef3365082ce2fa2c78c | Remove modifying sys.path in package __init__ | farzadghanei/distutilazy | distutilazy/__init__.py | distutilazy/__init__.py | """
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
| """
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
from os.path import dirname, abspath
import sys
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
base_dir = abspath(dirname(dirname(__file__)))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
| mit | Python |
a0ac251bec891a6c511ea1c0b11faa6525b81545 | Support more C++ extensions by default | jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000 | bfg9000/languages.py | bfg9000/languages.py | ext2lang = {
'.c' : 'c',
'.cpp': 'c++',
'.cc' : 'c++',
'.cp' : 'c++',
'.cxx': 'c++',
'.CPP': 'c++',
'.c++': 'c++',
'.C' : 'c++',
}
| ext2lang = {
'.cpp': 'c++',
'.c': 'c',
}
| bsd-3-clause | Python |
3c7714e22304b6bbdefccc87d4ab3f449c233f44 | change indicator type | demisto/content,VirusTotal/content,demisto/content,demisto/content,VirusTotal/content,VirusTotal/content,demisto/content,VirusTotal/content | Packs/FeedSpamhaus/Integrations/FeedSpamhaus/FeedSpamhaus.py | Packs/FeedSpamhaus/Integrations/FeedSpamhaus/FeedSpamhaus.py | from CommonServerPython import *
def main():
params = {k: v for k, v in demisto.params().items() if v is not None}
feed_url_to_config = {
# TODO: Add this sub feed once we have an indicator type of ASN
'https://www.spamhaus.org/drop/asndrop.txt': {
'indicator_type': 'ASN',
'indicator': {
'regex': r'^AS[0-9]+'
},
'fields': [
{
'asndrop_country': {
'regex': r'^.*;\W([a-zA-Z]+)\W+',
'transform': r'\1'
}
},
{
'asndrop_org': {
'regex': r'^.*\|\W+(.*)',
'transform': r'\1'
}
}
]
},
'https://www.spamhaus.org/drop/drop.txt': {
'indicator_type': FeedIndicatorType.CIDR,
'indicator': {
'regex': r'^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/[0-9]{1,2}'
}
},
'https://www.spamhaus.org/drop/edrop.txt': {
'indicator_type': FeedIndicatorType.CIDR,
'indicator': {
'regex': r'^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/[0-9]{1,2}'
}
}
}
params['feed_url_to_config'] = feed_url_to_config
# Call the main execution of the HTTP API module.
feed_main('Spamhaus Feed', params, 'spamhaus')
from HTTPFeedApiModule import * # noqa: E402
if __name__ == '__builtin__' or __name__ == 'builtins':
main()
| from CommonServerPython import *
def main():
params = {k: v for k, v in demisto.params().items() if v is not None}
feed_url_to_config = {
# TODO: Add this sub feed once we have an indicator type of ASN
'https://www.spamhaus.org/drop/asndrop.txt': {
'indicator_type': 'ASN',
'indicator': {
'regex': r'^AS[0-9]+'
},
'fields': [
{
'asndrop_country': {
'regex': r'^.*;\W([a-zA-Z]+)\W+',
'transform': r'\1'
}
},
{
'asndrop_org': {
'regex': r'^.*\|\W+(.*)',
'transform': r'\1'
}
}
]
},
'https://www.spamhaus.org/drop/drop.txt': {
'indicator_type': FeedIndicatorType.IP,
'indicator': {
'regex': r'^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/[0-9]{1,2}'
}
},
'https://www.spamhaus.org/drop/edrop.txt': {
'indicator_type': FeedIndicatorType.IP,
'indicator': {
'regex': r'^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/[0-9]{1,2}'
}
}
}
params['feed_url_to_config'] = feed_url_to_config
# Call the main execution of the HTTP API module.
feed_main('Spamhaus Feed', params, 'spamhaus')
from HTTPFeedApiModule import * # noqa: E402
if __name__ == '__builtin__' or __name__ == 'builtins':
main()
| mit | Python |
ca4e7393c4d9ddd50e14dd06da10d8389b018670 | Update ImpNotes.py | prashantas/MyDataScience | DeepNetwork/ImpNotes.py | DeepNetwork/ImpNotes.py | #### https://www.youtube.com/watch?v=yX8KuPZCAMo # Very Good Link from Edureka for tensorflow basics Really Good
# A placeholder is nothing but a promise to provide the value later
a = tf.placeholder(tf.float32)
b = tf.placeholder(tf.float32)
adder_node = a+b
sess = tf.Session()
print(sess.run(adder_node,{a:[1,3],b:[2,4]}))
###########################################################################################################
###########################################################################################################
#When we train a model, we use variable to hold an update parameter. Variables allow us to add trainable parameters to a graph
import tensorflow as tf
#Model parameters
W = tf.Variable([.3],tf.float32)
b = tf.Variable([-.3], tf.float32)
#Inputs and Outputs
x = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32) # Actual output which we already know
linear_model = W*x+b
#Loss ffunction
squared_delta = tf.square(linear_model - y)
loss = tf.reduce_sum(squared_delta)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
print(sess.run(loss,{x:[1,2,3,4],y:[0,-1,-2,-3]}))
### the output is 23.66
###########################################################################################################################################
### Now by changing W and b we can actually reduce the loss.
## Now if W = -1 and b =1 then our our linear_model becomes y i.e. output of our model becomes the actual output
## -1 x 1 + 1 = 0
## -1 x 2 + 1 = -1
## -1 x 3 + 1 = -2
## -1 x 4 + 1 = -3
## if we change W = tf.Variable([-1.0],tf.float32)
## b = tf.Variable([1.0], tf.float32) , we will get loss as 0.0
## If we want machine to learn W and b , we need optimizer
## Optimizer modifies each variable according to the magnitude of the derivative of loss w.r.t that variable. here we will use
## Gradient Descent optimizer i.e The optimizer will check the magnitude of the derivative of loss i.e the optimizer will check
## the change in the loss w.r t the change in the variable and if the loss is decreasing then it will keep on changing the variable
## in that particular direction
| #### https://www.youtube.com/watch?v=yX8KuPZCAMo # Very Good Link from Edureka for tensorflow basics Really Good
# A placeholder is nothing but a promise to provide the value later
a = tf.placeholder(tf.float32)
b = tf.placeholder(tf.float32)
adder_node = a+b
sess = tf.Session()
print(sess.run(adder_node,{a:[1,3],b:[2,4]}))
###########################################################################################################
###########################################################################################################
#When we train a model, we use variable to hold an update parameter. Variables allow us to add trainable parameters to a graph
import tensorflow as tf
#Model parameters
W = tf.Variable([.3],tf.float32)
b = tf.Variable([-.3], tf.float32)
#Inputs and Outputs
x = tf.placeholder(tf.float32)
y = tf.placeholder(tf.float32) # Actual output which we already know
linear_model = W*x+b
#Loss ffunction
squared_delta = tf.square(linear_model - y)
loss = tf.reduce_sum(squared_delta)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
print(sess.run(loss,{x:[1,2,3,4],y:[0,-1,-2,-3]}))
### the output is 23.66
###########################################################################################################################################
| bsd-2-clause | Python |
22281b85427fbee77fd2dcde1f0480bf6cf43ea0 | Add south_triple_field | python-force/django-bleach | django_bleach/models.py | django_bleach/models.py | from django.db import models
from django_bleach import forms
from django_bleach.utils import get_bleach_default_options
from django_bleach.forms import default_widget
class BleachField(models.TextField):
def __init__(self, allowed_tags=None, allowed_styles=None, allowed_attributes=None, strip_tags=None, strip_comments=None, *args, **kwargs):
super(BleachField, self).__init__(*args, **kwargs)
self.formfield_defaults = {}
if allowed_tags is not None:
self.formfield_defaults['allowed_tags'] = allowed_tags
if allowed_attributes is not None:
self.formfield_defaults['allowed_attributes'] = allowed_attributes
if allowed_styles is not None:
self.formfield_defaults['allowed_styles'] = allowed_styles
if strip_tags is not None:
self.formfield_defaults['strip_tags'] = strip_tags
if strip_comments is not None:
self.formfield_defaults['strip_comments'] = strip_comments
def formfield(self, **kwargs):
options = {
'form_class': forms.BleachField,
'widget': default_widget,
}
options.update(self.formfield_defaults)
options.update(kwargs)
return super(BleachField, self).formfield(**options)
def south_field_triple(self):
return ('django_bleach.forms.BleachField', [], self.formfield_defaults)
| from django.db import models
from django_bleach import forms
from django_bleach.utils import get_bleach_default_options
from django_bleach.forms import default_widget
class BleachField(models.TextField):
def __init__(self, allowed_tags=None, allowed_styles=None, allowed_attributes=None, strip_tags=None, strip_comments=None, *args, **kwargs):
super(BleachField, self).__init__(*args, **kwargs)
self.formfield_defaults = {}
if allowed_tags is not None:
self.formfield_defaults['allowed_tags'] = allowed_tags
if allowed_attributes is not None:
self.formfield_defaults['allowed_attributes'] = allowed_attributes
if allowed_styles is not None:
self.formfield_defaults['allowed_styles'] = allowed_styles
if strip_tags is not None:
self.formfield_defaults['strip_tags'] = strip_tags
if strip_comments is not None:
self.formfield_defaults['strip_comments'] = strip_comments
def formfield(self, **kwargs):
options = {
'form_class': forms.BleachField,
'widget': default_widget,
}
options.update(self.formfield_defaults)
options.update(kwargs)
return super(BleachField, self).formfield(**options)
| bsd-2-clause | Python |
9d9e32693f9bb1f707b38dcee49a18374b7a2067 | Update __init__.py | DMOJ/judge,DMOJ/judge,DMOJ/judge | dmoj/cptbox/__init__.py | dmoj/cptbox/__init__.py | from collections import defaultdict
from dmoj.cptbox.sandbox import SecurePopen, PIPE
from dmoj.cptbox.handlers import DISALLOW, ALLOW
from dmoj.cptbox.chroot import CHROOTSecurity
from dmoj.cptbox.syscalls import SYSCALL_COUNT
if sys.version_info.major == 3:
xrange = range
class NullSecurity(defaultdict):
def __init__(self):
for i in xrange(SYSCALL_COUNT):
self[i] = ALLOW
| from collections import defaultdict
from dmoj.cptbox.sandbox import SecurePopen, PIPE
from dmoj.cptbox.handlers import DISALLOW, ALLOW
from dmoj.cptbox.chroot import CHROOTSecurity
from dmoj.cptbox.syscalls import SYSCALL_COUNT
if sys.version_info.major == 2:
range = xrange
class NullSecurity(defaultdict):
def __init__(self):
for i in range(SYSCALL_COUNT):
self[i] = ALLOW
| agpl-3.0 | Python |
a41a76b7e4cdf4a8cbc533550963921839dcd998 | Fix formatting errors reported by flake8. | rectalogic/mopidy-pandora,jcass77/mopidy-pandora | mopidy_pandora/rpc.py | mopidy_pandora/rpc.py | import json
import requests
class RPCClient(object):
def __init__(self, hostname, port):
self.url = 'http://' + str(hostname) + ':' + str(port) + '/mopidy/rpc'
self.id = 0
def _do_rpc(self, method, params=None):
self.id += 1
data = {'method': method, 'jsonrpc': '2.0', 'id': self.id}
if params is not None:
data['params'] = params
return requests.request('POST', self.url, data=json.dumps(data), headers={'Content-Type': 'application/json'})
def set_repeat(self):
self._do_rpc('core.tracklist.set_repeat', {'value': True})
def get_current_track_uri(self):
response = self._do_rpc('core.playback.get_current_tl_track')
return response.json()['result']['track']['uri']
| import json
import requests
class RPCClient(object):
def __init__(self, hostname, port):
self.url = 'http://' + str(hostname) + ':' + str(port) + '/mopidy/rpc'
self.id = 0
def _do_rpc(self, method, params=None):
self.id += 1
data = { 'method': method, 'jsonrpc': '2.0', 'id': self.id }
if params is not None:
data['params'] = params
return requests.request('POST', self.url, data=json.dumps(data), headers={'Content-Type': 'application/json'})
def set_repeat(self):
self._do_rpc('core.tracklist.set_repeat', {'value': True})
def get_current_track_uri(self):
response = self._do_rpc('core.playback.get_current_tl_track')
return response.json()['result']['track']['uri']
| apache-2.0 | Python |
e17f8f4abbe6fbde638b4bb14db5cf1c7d3bd47f | Update setup.py | hMatoba/tetsujin,hMatoba/tetsujin,hMatoba/tetsujin,hMatoba/tetsujin,hMatoba/tetsujin | browsertest/setup.py | browsertest/setup.py | from setuptools import setup
import sys
sys.path.append('./tests')
setup(name='browsertest',
version='1.0',
description='test a project on browser',
test_suite = 's_test.suite',
install_requires=[
'selenium',
],
)
| from setuptools import setup
import sys
sys.path.append('./tests')
setup(name='browsertest',
version='1.0',
description='test a project by browser',
test_suite = 's_test.suite',
install_requires=[
'selenium',
],
) | mit | Python |
3a2338bce37811a1d7fdbcbba6d6b7b0c46edff2 | Fix time-sensitivity (< 0.002 second execution) in output format. | azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons | test/scons-time/func/format-gnuplot.py | test/scons-time/func/format-gnuplot.py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify the func --format=gnuplot option.
"""
import TestSCons_time
test = TestSCons_time.TestSCons_time(match = TestSCons_time.match_re,
diff = TestSCons_time.diff_re)
try:
import pstats
except ImportError:
test.skip_test('No pstats module, skipping test.\n')
content = """\
def _main():
pass
"""
test.profile_data('foo-000-0.prof', 'prof.py', '_main', content)
test.profile_data('foo-000-1.prof', 'prof.py', '_main', content)
test.profile_data('foo-000-2.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-0.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-1.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-2.prof', 'prof.py', '_main', content)
expect_notitle = r"""set key bottom left
plot '-' title "Startup" with lines lt 1, \\
'-' title "Full build" with lines lt 2, \\
'-' title "Up-to-date build" with lines lt 3
# Startup
0 \d.\d\d\d
1 \d.\d\d\d
e
# Full build
0 \d.\d\d\d
1 \d.\d\d\d
e
# Up-to-date build
0 \d.\d\d\d
1 \d.\d\d\d
e
"""
expect_title = 'set title "TITLE"\n' + expect_notitle
test.run(arguments = 'func --fmt gnuplot', stdout=expect_notitle)
test.run(arguments = 'func --fmt=gnuplot --title TITLE', stdout=expect_title)
test.run(arguments = 'func --format gnuplot --title TITLE', stdout=expect_title)
test.run(arguments = 'func --format=gnuplot', stdout=expect_notitle)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify the func --format=gnuplot option.
"""
import TestSCons_time
test = TestSCons_time.TestSCons_time(match = TestSCons_time.match_re,
diff = TestSCons_time.diff_re)
try:
import pstats
except ImportError:
test.skip_test('No pstats module, skipping test.\n')
content = """\
def _main():
pass
"""
test.profile_data('foo-000-0.prof', 'prof.py', '_main', content)
test.profile_data('foo-000-1.prof', 'prof.py', '_main', content)
test.profile_data('foo-000-2.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-0.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-1.prof', 'prof.py', '_main', content)
test.profile_data('foo-001-2.prof', 'prof.py', '_main', content)
expect_notitle = r"""set key bottom left
plot '-' title "Startup" with lines lt 1, \\
'-' title "Full build" with lines lt 2, \\
'-' title "Up-to-date build" with lines lt 3
# Startup
0 0.00[012]
1 0.00[012]
e
# Full build
0 0.00[012]
1 0.00[012]
e
# Up-to-date build
0 0.00[012]
1 0.00[012]
e
"""
expect_title = 'set title "TITLE"\n' + expect_notitle
test.run(arguments = 'func --fmt gnuplot', stdout=expect_notitle)
test.run(arguments = 'func --fmt=gnuplot --title TITLE', stdout=expect_title)
test.run(arguments = 'func --format gnuplot --title TITLE', stdout=expect_title)
test.run(arguments = 'func --format=gnuplot', stdout=expect_notitle)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | Python |
0788aaf316a2b200c5283fe9f5f902a8da701403 | Add a test for julian_to_gregorian. | jwg4/qual,jwg4/calexicon | calexicon/internal/tests/test_julian.py | calexicon/internal/tests/test_julian.py | import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
| import unittest
from calexicon.internal.julian import distant_julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
| apache-2.0 | Python |
e728dc8fdfb0955844626cf0e1b26c294bc512b3 | add detail endpoint | SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree | InvenTree/plugin/api.py | InvenTree/plugin/api.py | """
JSON API for the plugin app
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django.utils.translation import ugettext_lazy as _
from rest_framework import generics
from plugin.models import PluginConfig
import plugin.serializers as PluginSerializers
class PluginList(generics.ListAPIView):
""" API endpoint for list of PluginConfig objects
- GET: Return a list of all PluginConfig objects
"""
serializer_class = PluginSerializers.PluginConfigSerializer
queryset = PluginConfig.objects.all()
ordering_fields = [
'key',
'name',
'active',
]
ordering = [
'key',
]
search_fields = [
'key',
'name',
]
class PluginDetail(generics.RetrieveUpdateDestroyAPIView):
""" API detail endpoint for PluginConfig object
get:
Return a single PluginConfig object
post:
Update a PluginConfig
delete:
Remove a PluginConfig
"""
queryset = PluginConfig.objects.all()
serializer_class = PluginSerializers.PluginConfigSerializer
plugin_api_urls = [
# Detail views for a single PluginConfig item
url(r'^(?P<pk>\d+)/', include([
url(r'^.*$', PluginDetail.as_view(), name='api-plugin-detail'),
])),
# Anything else
url(r'^.*$', PluginList.as_view(), name='api-plugin-list'),
]
| """
JSON API for the plugin app
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import url
from django.utils.translation import ugettext_lazy as _
from rest_framework import generics
from plugin.models import PluginConfig
import plugin.serializers as PluginSerializers
class PluginList(generics.ListAPIView):
""" API endpoint for list of PluginConfig objects
- GET: Return a list of all PluginConfig objects
"""
serializer_class = PluginSerializers.PluginConfigSerializer
queryset = PluginConfig.objects.all()
ordering_fields = [
'key',
'name',
'active',
]
ordering = [
'key',
]
search_fields = [
'key',
'name',
]
plugin_api_urls = [
# Anything else
url(r'^.*$', PluginList.as_view(), name='api-plugin-list'),
]
| mit | Python |
f97fef9125d7fac502720ac6c7222bf7c8101ae4 | Add ultrasonic sensor from Max | westpark/robotics | piwars/sensors/ultrasonic.py | piwars/sensors/ultrasonic.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os, sys
import RPi.GPIO as GPIO
import time
from ..core import config
from ..core import logging
THRESHOLD_MM = 10
THRESHOLD_SECS = 0.1
SPEED_OF_SOUND = 343 * 100 * 10 # mm/s
TOLERANCE_MM = 30
class Sensor(object):
def __init__(self, trigger_pin, echo_pin, name):
self.name = name
self.trigger_pin = trigger_pin
self.echo_pin = echo_pin
def initialise_GPIO(self):
"""Initialise the GPIO
We're going to be using BCM numbering
"""
GPIO.setmode(GPIO.BCM)
def initialise_pins(self):
"""Intialise the GPIO pins we're using
Set up the trigger pin as an output and the echo
pin as an input
"""
GPIO.setup(self.trigger_pin, GPIO.OUT)
GPIO.setup(self.echo_pin, GPIO.IN)
def steady_trigger(self):
"""Steady the trigger mechanism
The sensor needs the trigger line to be low for a short period
before actually starting to operate. We're waiting for two seconds
but it's pretty much arbitrary
"""
GPIO.output(self.trigger_pin, False)
time.sleep(2)
def pulse_trigger(self):
"""Pulse the trigger line
Set the trigger line high for a millisecond. This is what the
sensor is expecting to initiate a reading.
"""
GPIO.output(self.trigger_pin, True)
time.sleep(0.00001)
GPIO.output(self.trigger_pin, False)
def wait_for_echo(self):
"""Wait for the echo return from the sensor
Once the trigger line is pulsed, the sensor sends out a sound
wave and waits for it to bounce back from a nearby object. To
indicate back to us how long that bounce took, the sensor sets
the echo line high for the same amount of time as it waited
for the bounce to return.
"""
#
# Wait for the echo line to go high
#
t0 = start = time.time()
while GPIO.input(self.echo_pin) == 0:
start = time.time()
if start - t0 > THRESHOLD_SECS:
print("Waited for more than %s seconds" % THRESHOLD_SECS)
return start - t0
#
# Now start the clock and wait for it to go low again
#
end = start
while GPIO.input(self.echo_pin) == 1:
end = time.time()
if end - start > THRESHOLD_SECS:
return end - start
#
# The difference between the two timestamps is the number
# of seconds the bounce took to return to the sensor.
#
return end - start
def convert_delay_to_distance(self, delay_secs):
"""Convert the number of seconds delay into a distance from an object
The number of seconds represents the time it took a sound wave
to travel to a nearby object and back. We know the speed of sound
(in mm/s in our case). We obtain the distance by multiplying the
time taken by the speed. This gives us the distance in mm *both there
and back*. To obtain the distance, we divide this distance by 2.
"""
total_distance = delay_secs * SPEED_OF_SOUND
outbound_distance_mm = total_distance / 2
adjusted_distance_mm = outbound_distance_mm - TOLERANCE_MM
return adjusted_distance_mm
def find_distance_mm(self):
"""Find the distance in mm to a nearby object
Pulse the trigger, wait for the delay to be signalled
on the echo line, and calculate the distance in mm.
"""
self.pulse_trigger()
delay_secs = self.wait_for_echo()
#print("Delay: %3.6f" % delay_secs)
return self.convert_delay_to_distance(delay_secs)
if __name__ == '__main__':
sensor1 = Sensor(23, 24, "Right")
sensor2 = Sensor(15, 18,"Left")
sensors = [sensor1, sensor2]
for sensor in sensors:
sensor.initialise_GPIO()
sensor.initialise_pins()
sensor.steady_trigger()
try:
while True:
for s in sensors:
distance_mm = s.find_distance_mm()
print(s.name, "%5.2fmm" % distance_mm)
if distance_mm < THRESHOLD_MM:
print ("stop")
break
time.sleep(0.1)
finally:
GPIO.cleanup() | # -*- coding: utf-8 -*-
from ..core import config
from ..core import logging
| mit | Python |
7f1542bc52438e6c9796e776603553d7f5a9df7f | Debug in importing deleted module. | tgquintela/pySpatialTools,tgquintela/pySpatialTools | pySpatialTools/utils/util_classes/__init__.py | pySpatialTools/utils/util_classes/__init__.py |
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
|
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from general_mapper import General1_1Mapper
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
| mit | Python |
73ae36396b7562233b06083412b81d86ce487177 | Update rockets.py | cblgh/tenyks-contrib,kyleterry/tenyks-contrib | src/tenyksscripts/scripts/rockets.py | src/tenyksscripts/scripts/rockets.py | import datetime
import requests
import time
def run(data, settings):
if data["payload"] not in ["nextlaunch", "next launch", "smooth baby rocket", "WHOOOOOOOOOOOOOSH"]:
return
launches = requests.get("https://launchlibrary.net/1.2/launch", params={"next": 1, "mode": "verbose"}).json()
if not launches["count"]:
return "No launches scheduled"
launch = launches["launches"][0]
delta = datetime.timedelta(seconds=launch["netstamp"] - int(time.time()))
return "Next launch: {name}. When: {time} (in {delta})".format(
name=launch["name"],
time=launch["net"],
delta=delta
)
| import datetime
import requests
import time
def run(data, settings):
if data["payload"] != "nextlaunch":
return
launches = requests.get("https://launchlibrary.net/1.2/launch", params={"next": 1, "mode": "verbose"}).json()
if not launches["count"]:
return "No launches scheduled"
launch = launches["launches"][0]
delta = datetime.timedelta(seconds=launch["netstamp"] - int(time.time()))
return "Next launch: {name}. When: {time} (in {delta})".format(
name=launch["name"],
time=launch["net"],
delta=delta
)
| mit | Python |
01288bb6785e3eeb8daa1328901cd5827e95b2fd | Update problem_27.py | bruckhaus/challenges,bruckhaus/challenges,bruckhaus/challenges,bruckhaus/challenges | python_challenges/project_euler/problem_27.py | python_challenges/project_euler/problem_27.py | import os
import sys
current_path = os.path.dirname(os.path.abspath(__file__))
lib_path = os.path.join(current_path, '..')
sys.path.append(lib_path)
from lib.prime import Prime
class QuadraticPrimes:
"""
Quadratic primes
Problem 27
Euler discovered the remarkable quadratic formula:
n^2 + n + 41
It turns out that the formula will produce 40 primes for the consecutive values n = 0 to 39.
However, when n = 40, 402 + 40 + 41 = 40(40 + 1) + 41 is divisible by 41,
and certainly when n = 41, 41^2 + 41 + 41 is clearly divisible by 41.
The incredible formula n^2 - 79n + 1601 was discovered,
which produces 80 primes for the consecutive values n = 0 to 79.
The product of the coefficients, -79 and 1601, is -126479.
Considering quadratics of the form:
n^2 + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of n
e.g. |11| = 11 and |-4| = 4
Find the product of the coefficients, a and b, for the quadratic expression
that produces the maximum number of primes for consecutive values of n, starting with n = 0.
"""
def __init__(self):
self.series = []
self.coefficients = (0, 0)
def find(self, limit=1000):
longest = 0
for a in range(-limit, limit):
for b in range(-limit, limit):
length = len(self.prime_series(a, b))
if length > longest:
longest = length
self.coefficients = (a, b)
self.show_diagnostics()
product = self.coefficients[0] * self.coefficients[1]
return product
def prime_series(self, a, b):
q = QuadraticPrimes
self.series = []
x = 0
value = q.quadratic_value(a, b, x)
while Prime.is_prime(value):
self.series.append(value)
x += 1
value = q.quadratic_value(a, b, x)
return self.series
@staticmethod
def quadratic_value(a, b, x):
return x ** 2 + a * x + b
def show_diagnostics(self):
if __name__ == '__main__':
print "best coefficients:", self.coefficients, ", length:", len(self.series), ", series:", self.series
if __name__ == '__main__':
q = QuadraticPrimes().find(1000)
print "The product of the coefficients that produce the maximum number of primes is", q
| import os
import sys
current_path = os.path.dirname(os.path.abspath(__file__))
lib_path = os.path.join(current_path, '..')
sys.path.append(lib_path)
from lib.prime import Prime
class QuadraticPrimes:
"""
Quadratic primes
Problem 27
Euler discovered the remarkable quadratic formula:
n^2 + n + 41
It turns out that the formula will produce 40 primes for the consecutive values n = 0 to 39.
However, when n = 40, 402 + 40 + 41 = 40(40 + 1) + 41 is divisible by 41,
and certainly when n = 41, 41^2 + 41 + 41 is clearly divisible by 41.
The incredible formula n^2 - 79n + 1601 was discovered,
which produces 80 primes for the consecutive values n = 0 to 79.
The product of the coefficients, -79 and 1601, is -126479.
Considering quadratics of the form:
n^2 + an + b, where |a| < 1000 and |b| < 1000
where |n| is the modulus/absolute value of n
e.g. |11| = 11 and |-4| = 4
Find the product of the coefficients, a and b, for the quadratic expression
that produces the maximum number of primes for consecutive values of n, starting with n = 0.
"""
def __init__(self):
self.series = []
self.coefficients = (0, 0)
def find(self, limit=1000):
longest = 0
for a in range(-limit, limit):
for b in range(-limit, limit):
length = len(self.prime_series(a, b))
if length > longest:
longest = length
self.coefficients = (a, b)
self.show_diagnostics()
product = self.coefficients[0] * self.coefficients[1]
return product
def prime_series(self, a, b):
q = QuadraticPrimes
self.series = []
x = 0
value = q.quadratic_value(a, b, x)
while Prime.is_prime(value):
self.series.append(value)
x += 1
value = q.quadratic_value(a, b, x)
return self.series
@staticmethod
def quadratic_value(a, b, x):
return x ** 2 + a * x + b
def show_diagnostics(self):
if __name__ == '__main__':
print "best coefficients:", self.coefficients, ", length:", len(self.series), ", series:", self.series
if __name__ == '__main__':
q = QuadraticPrimes().find(1000)
print "The product of the coefficients that produce the maximum number of primes is", q
| mit | Python |
00697aeabb356c4aa3c653f9b3dfe1b619eb4dca | remove comment | compas-dev/compas | src/compas_ghpython/artists/_primitiveartist.py | src/compas_ghpython/artists/_primitiveartist.py | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas_ghpython.artists._artist import BaseArtist
__all__ = ["PrimitiveArtist"]
class PrimitiveArtist(BaseArtist):
"""Base class for artists for geometry primitives.
Parameters
----------
primitive: :class:`compas.geometry.Primitive`
The instance of the primitive.
color : 3-tuple, optional
The RGB color specification of the object.
Attributes
----------
primitive: :class:`compas.geometry.Primitive`
A reference to the geometry of the primitive.
name : str
The name of the primitive.
color : tuple
The RGB components of the base color of the primitive.
"""
def __init__(self, primitive, color=None):
super(PrimitiveArtist, self).__init__()
self.primitive = primitive
self.color = color
@property
def name(self):
"""str : Reference to the name of the primitive."""
return self.primitive.name
@name.setter
def name(self, name):
self.primitive.name = name
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
pass
| from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from compas_ghpython.artists._artist import BaseArtist
__all__ = ["PrimitiveArtist"]
class PrimitiveArtist(BaseArtist):
"""Base class for artists for geometry primitives.
Parameters
----------
primitive: :class:`compas.geometry.Primitive`
The instance of the primitive.
color : 3-tuple, optional
The RGB color specification of the object.
Attributes
----------
primitive: :class:`compas.geometry.Primitive`
A reference to the geometry of the primitive.
name : str
The name of the primitive.
color : tuple
The RGB components of the base color of the primitive.
"""
def __init__(self, primitive, color=None):
super(PrimitiveArtist, self).__init__()
self.primitive = primitive
self.color = color
@property
def name(self):
"""str : Reference to the name of the primitive."""
# if hasattr(self.primitive, 'name'):
return self.primitive.name
@name.setter
def name(self, name):
self.primitive.name = name
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
pass
| mit | Python |
9b31d7135f0d1c8a434da5bce609fde2bb313974 | use to correct incorrect timestamp | ver228/tierpsy-tracker,ver228/tierpsy-tracker,ver228/tierpsy-tracker | tierpsy/debugging/add_default_attrs.py | tierpsy/debugging/add_default_attrs.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 8 16:19:07 2017
@author: ajaver
"""
import glob
import os
import tables
from tierpsy.helper.misc import RESERVED_EXT
from tierpsy.helper.params import set_unit_conversions, read_unit_conversions
from tierpsy import DFLT_PARAMS_PATH, DFLT_PARAMS_FILES
from tierpsy.helper.params import TrackerParams
#script to correct a previous bug in how the expected_fps, microns_per_pixel are saved.
params = TrackerParams(os.path.join(DFLT_PARAMS_PATH, '_TEST.json'))
expected_fps = params.p_dict['expected_fps']
microns_per_pixel = params.p_dict['microns_per_pixel']
#main_dir = '/Volumes/behavgenom_archive$/Adam/screening'
#fnames = glob.glob(os.path.join(main_dir, '**', '*.hdf5'), recursive=True)
#dname = '/Volumes/behavgenom_archive$/Ida/test_3/**/*.hdf5'
dname = '/Volumes/behavgenom_archive$/Ida/LoopBio_rig/180222_blue_light/3/**/*.hdf5'
fnames = glob.glob(dname, recursive=True)
masked_files = [x for x in fnames if not any(x.endswith(ext) for ext in RESERVED_EXT)]
skeletons_files = [x for x in fnames if x.endswith('_skeletons.hdf5')]
def change_attrs(fname, field_name):
print(os.path.basename(fname))
read_unit_conversions(fname)
with tables.File(fname, 'r+') as fid:
group_to_save = fid.get_node(field_name)
set_unit_conversions(group_to_save,
expected_fps=expected_fps,
microns_per_pixel=microns_per_pixel)
read_unit_conversions(fname)
#for fname in masked_files:
# change_attrs(fname, '/mask')
for fname in skeletons_files:
change_attrs(fname, '/trajectories_data') | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon May 8 16:19:07 2017
@author: ajaver
"""
import glob
import os
import tables
from tierpsy.helper.misc import RESERVED_EXT
from tierpsy.helper.params import set_unit_conversions, read_unit_conversions
from tierpsy import DFLT_PARAMS_PATH, DFLT_PARAMS_FILES
from tierpsy.helper.params import TrackerParams
#script to correct a previous bug in how the expected_fps, microns_per_pixel are saved.
params = TrackerParams(os.path.join(DFLT_PARAMS_PATH, '_TEST.json'))
expected_fps = params.p_dict['expected_fps']
microns_per_pixel = params.p_dict['microns_per_pixel']
#main_dir = '/Volumes/behavgenom_archive$/Adam/screening'
#fnames = glob.glob(os.path.join(main_dir, '**', '*.hdf5'), recursive=True)
dname = '/Volumes/behavgenom_archive$/Ida/test_3/**/*.hdf5'
fnames = glob.glob(dname, recursive=True)
masked_files = [x for x in fnames if not any(x.endswith(ext) for ext in RESERVED_EXT)]
skeletons_files = [x for x in fnames if x.endswith('_skeletons.hdf5')]
def change_attrs(fname, field_name):
print(os.path.basename(fname))
read_unit_conversions(fname)
with tables.File(fname, 'r+') as fid:
group_to_save = fid.get_node(field_name)
set_unit_conversions(group_to_save,
expected_fps=expected_fps,
microns_per_pixel=microns_per_pixel)
read_unit_conversions(fname)
for skeletons_file in masked_files:
change_attrs(skeletons_file, '/mask')
| mit | Python |
7fd709ebd32764a15b26ec0e8086ef69080dd4cc | Use ggrep instead of grep for pcre on mac os | beni55/pre-commit,pre-commit/pre-commit,pre-commit/pre-commit,Teino1978-Corp/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,philipgian/pre-commit,barrysteyn/pre-commit,dnephin/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,Lucas-C/pre-commit,Lucas-C/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,dnephin/pre-commit,beni55/pre-commit,dnephin/pre-commit,pre-commit/pre-commit,Teino1978-Corp/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,beni55/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,Lucas-C/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,barrysteyn/pre-commit,Lucas-C/pre-commit,beni55/pre-commit,dnephin/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,pre-commit/pre-commit | pre_commit/languages/pcre.py | pre_commit/languages/pcre.py | from __future__ import unicode_literals
from sys import platform
from pre_commit.languages.helpers import file_args_to_stdin
from pre_commit.util import shell_escape
ENVIRONMENT_DIR = None
def install_environment(repo_cmd_runner, version='default'):
"""Installation for pcre type is a noop."""
raise AssertionError('Cannot install pcre repo.')
def run_hook(repo_cmd_runner, hook, file_args):
grep_command = 'grep -H -n -P'
if platform == 'darwin':
grep_command = 'ggrep -H -n -P'
# For PCRE the entry is the regular expression to match
return repo_cmd_runner.run(
[
'xargs', '-0', 'sh', '-c',
# Grep usually returns 0 for matches, and nonzero for non-matches
# so we flip it here.
'! {0} {1} $@'.format(grep_command, shell_escape(hook['entry'])),
'--',
],
stdin=file_args_to_stdin(file_args),
retcode=None,
encoding=None,
)
| from __future__ import unicode_literals
from pre_commit.languages.helpers import file_args_to_stdin
from pre_commit.util import shell_escape
ENVIRONMENT_DIR = None
def install_environment(repo_cmd_runner, version='default'):
"""Installation for pcre type is a noop."""
raise AssertionError('Cannot install pcre repo.')
def run_hook(repo_cmd_runner, hook, file_args):
# For PCRE the entry is the regular expression to match
return repo_cmd_runner.run(
[
'xargs', '-0', 'sh', '-c',
# Grep usually returns 0 for matches, and nonzero for non-matches
# so we flip it here.
'! grep -H -n -P {0} $@'.format(shell_escape(hook['entry'])),
'--',
],
stdin=file_args_to_stdin(file_args),
retcode=None,
encoding=None,
)
| mit | Python |
efbea8320518d94c8c7eeb23a90d8e97a4727fb6 | make sure test can find the test-management-command | zefciu/django-extensions,nikolas/django-extensions,jpadilla/django-extensions,rodo/django-extensions,jpadilla/django-extensions,artscoop/django-extensions,zefciu/django-extensions,JoseTomasTocino/django-extensions,haakenlid/django-extensions,mandx/django-extensions,kevgathuku/django-extensions,barseghyanartur/django-extensions,mandx/django-extensions,jpadilla/django-extensions,bionikspoon/django-extensions,ewjoachim/django-extensions,t1m0thy/django-extensions,haakenlid/django-extensions,maroux/django-extensions,barseghyanartur/django-extensions,haakenlid/django-extensions,dpetzold/django-extensions,dpetzold/django-extensions,nikolas/django-extensions,JoseTomasTocino/django-extensions,barseghyanartur/django-extensions,lamby/django-extensions,artscoop/django-extensions,helenst/django-extensions,bionikspoon/django-extensions,ewjoachim/django-extensions,github-account-because-they-want-it/django-extensions,frewsxcv/django-extensions,linuxmaniac/django-extensions,zefciu/django-extensions,levic/django-extensions,t1m0thy/django-extensions,helenst/django-extensions,Moulde/django-extensions,lamby/django-extensions,levic/django-extensions,Moulde/django-extensions,joeyespo/django-extensions,github-account-because-they-want-it/django-extensions,levic/django-extensions,ewjoachim/django-extensions,rodo/django-extensions,dpetzold/django-extensions,joeyespo/django-extensions,nikolas/django-extensions,kevgathuku/django-extensions,helenst/django-extensions,gvangool/django-extensions,ctrl-alt-d/django-extensions,maroux/django-extensions,atchariya/django-extensions,VishvajitP/django-extensions,gvangool/django-extensions,rodo/django-extensions,kevgathuku/django-extensions,marctc/django-extensions,maroux/django-extensions,VishvajitP/django-extensions,ctrl-alt-d/django-extensions,frewsxcv/django-extensions,frewsxcv/django-extensions,linuxmaniac/django-extensions,gvangool/django-extensions,django-extensions/django-extensions,ctrl-alt-d/django-extensions,artscoop/django-extensions,django-extensions/django-extensions,linuxmaniac/django-extensions,joeyespo/django-extensions,lamby/django-extensions,marctc/django-extensions,t1m0thy/django-extensions,mandx/django-extensions,atchariya/django-extensions,bionikspoon/django-extensions,django-extensions/django-extensions,github-account-because-they-want-it/django-extensions,Moulde/django-extensions,atchariya/django-extensions,VishvajitP/django-extensions,JoseTomasTocino/django-extensions,marctc/django-extensions | django_extensions/tests/management_command.py | django_extensions/tests/management_command.py | # -*- coding: utf-8 -*-
import logging
from cStringIO import StringIO
from django.core.management import call_command
from django.test import TestCase
class MockLoggingHandler(logging.Handler):
""" Mock logging handler to check for expected logs. """
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class CommandTest(TestCase):
def test_error_logging(self):
# Ensure command errors are properly logged and reraised
from django_extensions.management.base import logger
logger.addHandler(MockLoggingHandler())
from django.conf import settings
org_apps = None
apps = list(settings.INSTALLED_APPS)
if not 'django_extensions.tests' in apps:
apps.append('django_extensions.tests')
self.assertRaises(Exception, call_command, 'error_raising_command')
handler = logger.handlers[0]
self.assertEqual(len(handler.messages['error']), 1)
if org_apps:
settings.INSTALLED_APPS = org_apps
class ShowTemplateTagsTests(TestCase):
def test_some_output(self):
out = StringIO()
call_command('show_templatetags', stdout=out)
output = out.getvalue()
# Once django_extension is installed during tests it should appear with
# its templatetags
self.assertIn('django_extensions', output)
# let's check at least one
self.assertIn('truncate_letters', output)
| # -*- coding: utf-8 -*-
import logging
from cStringIO import StringIO
from django.core.management import call_command
from django.test import TestCase
class MockLoggingHandler(logging.Handler):
""" Mock logging handler to check for expected logs. """
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class CommandTest(TestCase):
def test_error_logging(self):
# Ensure command errors are properly logged and reraised
from django_extensions.management.base import logger
logger.addHandler(MockLoggingHandler())
self.assertRaises(Exception, call_command, 'error_raising_command')
handler = logger.handlers[0]
self.assertEqual(len(handler.messages['error']), 1)
class ShowTemplateTagsTests(TestCase):
def test_some_output(self):
out = StringIO()
call_command('show_templatetags', stdout=out)
output = out.getvalue()
# Once django_extension is installed during tests it should appear with
# its templatetags
self.assertIn('django_extensions', output)
# let's check at least one
self.assertIn('truncate_letters', output)
| mit | Python |
9ed095a64679ad8899ad6706487e05152ded8db1 | Add DJANGO_SETTINGS_MODULE to wsgi environment settings | SeattleAttic/HedyNet,akjohnson/HedyNet,SeattleAttic/HedyNet,SeattleAttic/HedyNet,akjohnson/HedyNet,SeattleAttic/HedyNet,akjohnson/HedyNet,akjohnson/HedyNet | HedyNet/HedyNet/wsgi.py | HedyNet/HedyNet/wsgi.py | """
WSGI config for HedyNet project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.insert(0, SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "HedyNet.settings.development")
environment_settings = (
"DJANGO_SETTINGS_MODULE",
"DATABASE_NAME",
"DATABASE_USER",
"DATABASE_PASSWORD",
"SECRET_KEY",
)
def application(environ, start_response):
if "VIRTUALENV_PATH" in environ:
path.insert(0, environ["VIRTUALENV_PATH"])
for key in environment_settings:
if key in environ:
os.environ[key] = str(environ[key])
import django.core.handlers.wsgi
_application = django.core.handlers.wsgi.WSGIHandler()
return _application(environ, start_response)
| """
WSGI config for HedyNet project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.insert(0, SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "jajaja.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "HedyNet.settings.development")
environment_settings = (
"DATABASE_NAME",
"DATABASE_USER",
"DATABASE_PASSWORD",
"SECRET_KEY",
)
def application(environ, start_response):
if "VIRTUALENV_PATH" in environ:
path.insert(0, environ["VIRTUALENV_PATH"])
for key in environment_settings:
if key in environ:
os.environ[key] = str(environ[key])
import django.core.handlers.wsgi
_application = django.core.handlers.wsgi.WSGIHandler()
return _application(environ, start_response)
| apache-2.0 | Python |
1097d67534c2a34ce8a381af2814abfe23e4086e | Enable ndimage on 64-bit systems. | lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt,scipy/scipy-svn,scipy/scipy-svn,scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,lesserwhirls/scipy-cwt | Lib/ndimage/__init__.py | Lib/ndimage/__init__.py | # Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
from filters import *
from fourier import *
from interpolation import *
from measurements import *
from morphology import *
from info import __doc__
from numpy.testing import ScipyTest
test = ScipyTest().test
| # Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
if numpy.int_ == numpy.int64:
raise ImportError, "ndimage does not yet support 64-bit systems"
from filters import *
from fourier import *
from interpolation import *
from measurements import *
from morphology import *
from info import __doc__
from numpy.testing import ScipyTest
test = ScipyTest().test
| bsd-3-clause | Python |
6b015573e6d087eb53e932ae3ba71311170472bf | Remove print | geoneric/starling,geoneric/starling | starling/flask/error_handler/json.py | starling/flask/error_handler/json.py | from flask import jsonify
from werkzeug.exceptions import HTTPException
def response(
code,
description):
"""
Format a response
:param int code: HTTP error code
:param str description: Error message
:return: Tuple of a wrapped JSON snippet and the error code
:rtype: Tuple of :py:class:`flask.Response` containing a JSON snippet,
and the error code
The JSON snippet is formatted like this:
.. code-block:: json
{
"status_code": 404,
"message": "The requested URL was not found on the server"
}
"""
payload = jsonify({
"status_code": code,
"message": description
})
# print(type(payload))
return payload, code
def http_exception_error_handler(
exception):
"""
Handle HTTP exception
:param werkzeug.exceptions.HTTPException exception: Raised exception
A response is returned, as formatted by the :py:func:`response` function.
"""
assert issubclass(type(exception), HTTPException), type(exception)
assert hasattr(exception, "code")
assert hasattr(exception, "description")
return response(exception.code, exception.description)
| from flask import jsonify
from werkzeug.exceptions import HTTPException
def response(
code,
description):
"""
Format a response
:param int code: HTTP error code
:param str description: Error message
:return: Tuple of a wrapped JSON snippet and the error code
:rtype: Tuple of :py:class:`flask.Response` containing a JSON snippet,
and the error code
The JSON snippet is formatted like this:
.. code-block:: json
{
"status_code": 404,
"message": "The requested URL was not found on the server"
}
"""
payload = jsonify({
"status_code": code,
"message": description
})
print(type(payload))
return payload, code
def http_exception_error_handler(
exception):
"""
Handle HTTP exception
:param werkzeug.exceptions.HTTPException exception: Raised exception
A response is returned, as formatted by the :py:func:`response` function.
"""
assert issubclass(type(exception), HTTPException), type(exception)
assert hasattr(exception, "code")
assert hasattr(exception, "description")
return response(exception.code, exception.description)
| mit | Python |
62a76827ecf7c148101b62925dea04f63709012a | Update command to work with sublime 3 | RomuloOliveira/dot-files,RomuloOliveira/unix-files,RomuloOliveira/dot-files | sublime/User/update_user_settings.py | sublime/User/update_user_settings.py | import json
import urllib
import sublime
import sublime_plugin
GIST_URL = 'https://raw.githubusercontent.com/RomuloOliveira/dot-files/master/sublime/User/Preferences.sublime-settings' # noqa
class UpdateUserSettingsCommand(sublime_plugin.TextCommand):
def run(self, edit):
gist_settings = self._get_settings_from_gist(GIST_URL)
sublime_settings = sublime.load_settings(
'Preferences.sublime-settings'
)
self._update_settings(gist_settings, sublime_settings)
@staticmethod
def _get_settings_from_gist(url):
try:
response = urllib.request.urlopen(url)
settings = json.loads(response.read().decode('utf-8'))
except (urllib.error.URLError, ValueError) as e:
sublime.error_message('Could not retrieve settings: {}'.format(e))
raise
return settings
@staticmethod
def _update_settings(settings_dict, sublime_settings):
for key, value in settings_dict.items():
sublime_settings.set(key, value)
sublime.save_settings('Preferences.sublime-settings')
sublime.status_message('Settings updated')
| import json
import urllib2
import sublime
import sublime_plugin
GIST_URL = u'https://raw.githubusercontent.com/RomuloOliveira/dot-files/master/sublime/User/Preferences.sublime-settings' # noqa
class UpdateUserSettingsCommand(sublime_plugin.TextCommand):
def run(self, edit):
gist_settings = self._get_settings_from_gist(GIST_URL)
sublime_settings = sublime.load_settings(
'Preferences.sublime-settings'
)
self._update_settings(gist_settings, sublime_settings)
@staticmethod
def _get_settings_from_gist(url):
try:
response = urllib2.urlopen(url)
settings = json.loads(response.read())
except (urllib2.URLError, ValueError) as e:
sublime.error_message('Could not retrieve settings: {}'.format(e))
raise
return settings
@staticmethod
def _update_settings(settings_dict, sublime_settings):
for key, value in settings_dict.items():
sublime_settings.set(key, value)
sublime.save_settings('Preferences.sublime-settings')
sublime.status_message('Settings updated')
| apache-2.0 | Python |
abc932da4a65ef36de8367cdba91df4cb98e0e7f | Fix check digit calculation to handle 0 | poliquin/brazilnum | brazilnum/cei.py | brazilnum/cei.py | #!/usr/bin/env python
import re
import random
from operator import mul
"""
Functions for working with Brazilian CEI identifiers.
"""
NONDIGIT = re.compile(r'[^0-9]')
CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4]
def clean_cei(cei):
"""Takes a CEI and turns it into a string of only numbers."""
return NONDIGIT.sub('', str(cei))
def validate_cei(cei):
"""Check whether CEI is valid."""
cei = clean_cei(cei)
# all complete CEI are 12 digits long
if len(cei) != 12:
return False
digits = [int(k) for k in cei] # identifier digits
return _cei_check(digits[:-1]) == digits[-1]
def cei_check_digit(cei):
"""Find check digit needed to make a CEI valid."""
cei = clean_cei(cei)
if len(cei) < 11:
raise ValueError('CEI must have at least 11 digits: {0}'.format(cei))
digits = [int(k) for k in cei[:12]]
return _cei_check(digits)
def format_cei(cei):
"""Applies typical 00.000.00000/00 formatting to CEI."""
fmt = '{0}.{1}.{2}/{3}'
cei = clean_cei(cei)
return fmt.format(cei[:2], cei[2:5], cei[5:10], cei[10:])
def pad_cei(cei, validate=True):
"""Takes a CEI that probably had leading zeros and pads it."""
cei = clean_cei(cei)
cei = '%0.012i' % int(cei)
if validate and not validate_cei(cei):
raise ValueError('Invalid CEI: {0}'.format(cei))
return cei
def random_cei(formatted=True):
"""Create a random, valid CEI identifier."""
uf = random.randint(11, 53)
stem = random.randint(100000000, 999999999)
cei = '{0}{1}{2}'.format(uf, stem, cei_check_digit(stem))
if formatted:
return format_cei(cei)
return cei
def _cei_check(digits):
"""Calculate check digit from iterable of integers."""
digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits))
modulo = (sum(divmod(digsum % 100, 10)) % 10)
if modulo == 0:
return 0
return 10 - modulo
| #!/usr/bin/env python
import re
import random
from operator import mul
"""
Functions for working with Brazilian CEI identifiers.
"""
NONDIGIT = re.compile(r'[^0-9]')
CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4]
def clean_cei(cei):
"""Takes a CEI and turns it into a string of only numbers."""
return NONDIGIT.sub('', str(cei))
def validate_cei(cei):
"""Check whether CEI is valid."""
cei = clean_cei(cei)
# all complete CEI are 12 digits long
if len(cei) != 12:
return False
digits = [int(k) for k in cei] # identifier digits
# validate the check digit
digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1]))
check = 10 - (sum(divmod(digsum % 100, 10)) % 10)
return check == digits[-1]
def cei_check_digit(cei):
"""Find check digit needed to make a CEI valid."""
cei = clean_cei(cei)
if len(cei) < 11:
raise ValueError('CEI must have at least 11 digits: {0}'.format(cei))
digits = [int(k) for k in cei[:12]]
# find the check digit
digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits))
return 10 - (sum(divmod(digsum % 100, 10)) % 10)
def format_cei(cei):
"""Applies typical 00.000.00000/00 formatting to CEI."""
fmt = '{0}.{1}.{2}/{3}'
cei = clean_cei(cei)
return fmt.format(cei[:2], cei[2:5], cei[5:10], cei[10:])
def pad_cei(cei, validate=True):
"""Takes a CEI that probably had leading zeros and pads it."""
cei = clean_cei(cei)
cei = '%0.012i' % int(cei)
if validate and not validate_cei(cei):
raise ValueError('Invalid CEI: {0}'.format(cei))
return cei
def random_cei(formatted=True):
"""Create a random, valid CEI identifier."""
uf = random.randint(11, 53)
stem = random.randint(100000000, 999999999)
cei = '{0}{1}{2}'.format(uf, stem, cei_check_digit(stem))
if formatted:
return format_cei(cei)
return cei
| mit | Python |
9a83a07dbf16198b67c80c4846bdeba5342e5374 | fix related_name for FB profile | colbypalmer/cp-broker | broker/models.py | broker/models.py | from django.db import models
from django.contrib.auth.models import User
from django.dispatch.dispatcher import receiver
from django_facebook.models import FacebookModel
from django.db.models.signals import post_save
from django_facebook.utils import get_user_model, get_profile_model
class Service(models.Model):
name = models.CharField(max_length=75)
url = models.URLField(blank=True, null=True)
class Connection(models.Model):
user = models.ForeignKey(User)
provider = models.CharField(max_length=75)
uid = models.IntegerField(null=True, blank=True)
token = models.CharField(max_length=255)
secret = models.CharField(max_length=255, null=True, blank=True)
username = models.CharField(max_length=100, null=True, blank=True)
is_active = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now_add=True, auto_now=True, editable=False)
def __unicode__(self):
if not self.username:
return '{}: {}'.format(self.provider, self.uid)
else:
return '{}: {}'.format(self.provider, self.username)
class FacebookProfile(FacebookModel):
user = models.OneToOneField(User, related_name='profile')
@receiver(post_save)
def create_profile(sender, instance, created, **kwargs):
"""Create a matching profile whenever a user object is created."""
if sender == get_user_model():
user = instance
profile_model = get_profile_model()
if profile_model == FacebookProfile and created:
profile, new = FacebookProfile.objects.get_or_create(user=instance) | from django.db import models
from django.contrib.auth.models import User
from django.dispatch.dispatcher import receiver
from django_facebook.models import FacebookModel
from django.db.models.signals import post_save
from django_facebook.utils import get_user_model, get_profile_model
class Service(models.Model):
name = models.CharField(max_length=75)
url = models.URLField(blank=True, null=True)
class Connection(models.Model):
user = models.ForeignKey(User)
provider = models.CharField(max_length=75)
uid = models.IntegerField(null=True, blank=True)
token = models.CharField(max_length=255)
secret = models.CharField(max_length=255, null=True, blank=True)
username = models.CharField(max_length=100, null=True, blank=True)
is_active = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now_add=True, auto_now=True, editable=False)
def __unicode__(self):
if not self.username:
return '{}: {}'.format(self.provider, self.uid)
else:
return '{}: {}'.format(self.provider, self.username)
class FacebookProfile(FacebookModel):
user = models.OneToOneField(User, related_name='Facebook Profile')
@receiver(post_save)
def create_profile(sender, instance, created, **kwargs):
"""Create a matching profile whenever a user object is created."""
if sender == get_user_model():
user = instance
profile_model = get_profile_model()
if profile_model == FacebookProfile and created:
profile, new = FacebookProfile.objects.get_or_create(user=instance) | mit | Python |
839632f923ccd6691dc7135c5a49d66e1b0e3721 | Add copyright notice | prasannav7/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core | src/ggrc/migrations/versions/20151203153139_297131e22e28_add_final_state_to_request_status_and_.py | src/ggrc/migrations/versions/20151203153139_297131e22e28_add_final_state_to_request_status_and_.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: urban@reciprocitylabs.com
# Maintained By: urban@reciprocitylabs.com
"""Add final state to request status and rename Unstarted to Open
Revision ID: 297131e22e28
Revises: 18cbdd3a7fd9
Create Date: 2015-12-03 15:31:39.979333
"""
# revision identifiers, used by Alembic.
revision = '297131e22e28'
down_revision = '504f541411a5'
from alembic import op
def upgrade():
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Unstarted","In Progress","Finished","Verified","Open","Final") NOT NULL;""")
op.execute("""UPDATE requests SET status="Open" WHERE status="Unstarted";""")
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Open","In Progress","Finished","Verified","Final") NOT NULL;""")
def downgrade():
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Open","In Progress","Finished","Verified","Final","Unstarted") NOT NULL;""")
op.execute("""UPDATE requests SET status="Unstarted" WHERE status="Open";""")
op.execute("""UPDATE requests SET status="Finished" WHERE status="Final";""")
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Unstarted","In Progress","Finished","Verified") NOT NULL;""")
|
"""Add final state to request status and rename Unstarted to Open
Revision ID: 297131e22e28
Revises: 18cbdd3a7fd9
Create Date: 2015-12-03 15:31:39.979333
"""
# revision identifiers, used by Alembic.
revision = '297131e22e28'
down_revision = '504f541411a5'
from alembic import op
def upgrade():
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Unstarted","In Progress","Finished","Verified","Open","Final") NOT NULL;""")
op.execute("""UPDATE requests SET status="Open" WHERE status="Unstarted";""")
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Open","In Progress","Finished","Verified","Final") NOT NULL;""")
def downgrade():
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Open","In Progress","Finished","Verified","Final","Unstarted") NOT NULL;""")
op.execute("""UPDATE requests SET status="Unstarted" WHERE status="Open";""")
op.execute("""UPDATE requests SET status="Finished" WHERE status="Final";""")
op.execute("""ALTER TABLE requests CHANGE status status ENUM("Unstarted","In Progress","Finished","Verified") NOT NULL;""")
| apache-2.0 | Python |
e84d6231538fe0cf587648c0b9e301b3b0399db7 | Support structured_code.__contains__ | angr/angr,angr/angr,angr/angr | angr/knowledge_plugins/structured_code/manager.py | angr/knowledge_plugins/structured_code/manager.py | from typing import TYPE_CHECKING
from .. import KnowledgeBasePlugin
if TYPE_CHECKING:
from angr.knowledge_base import KnowledgeBase
from angr.analyses.decompiler.structured_codegen import StructuredCodeGenerator
class StructuredCodeManager(KnowledgeBasePlugin):
def __init__(self, kb):
self._kb = kb # type: KnowledgeBase
self._codegens = {}
def _normalize_key(self, item):
if type(item) is not tuple:
raise TypeError("Structured code can only be queried by tuples of (func, flavor)")
if type(item[0]) is str:
item = (self._kb.labels.lookup(item[0]), *item[1:])
return item
def __getitem__(self, item) -> 'StructuredCodeGenerator':
return self._codegens[self._normalize_key(item)]
def __setitem__(self, key, value):
self._codegens[self._normalize_key(key)] = value
def __contains__(self, key):
return self._normalize_key(key) in self._codegens
def copy(self):
raise NotImplementedError
KnowledgeBasePlugin.register_default('structured_code', StructuredCodeManager)
| from typing import TYPE_CHECKING
from .. import KnowledgeBasePlugin
if TYPE_CHECKING:
from angr.knowledge_base import KnowledgeBase
from angr.analyses.decompiler.structured_codegen import StructuredCodeGenerator
class StructuredCodeManager(KnowledgeBasePlugin):
def __init__(self, kb):
self._kb = kb # type: KnowledgeBase
self._codegens = {}
def _normalize_key(self, item):
if type(item) is not tuple:
raise TypeError("Structured code can only be queried by tuples of (func, flavor)")
if type(item[0]) is str:
item = (self._kb.labels.lookup(item[0]), *item[1:])
return item
def __getitem__(self, item) -> 'StructuredCodeGenerator':
return self._codegens[self._normalize_key(item)]
def __setitem__(self, key, value):
self._codegens[self._normalize_key(key)] = value
def copy(self):
raise NotImplementedError
KnowledgeBasePlugin.register_default('structured_code', StructuredCodeManager)
| bsd-2-clause | Python |
86449f40f288bf360cb239104e62e3b1acc9f1f8 | Bump to v1.0.0 | gisce/enerdata | enerdata/__init__.py | enerdata/__init__.py | __author__ = 'ecarreras'
__version__ = '1.0.0'
| __author__ = 'ecarreras'
__version__ = '0.29.0'
| mit | Python |
1a7c9d4734b09b398ae1778fa400a6d53def227a | Replace a query with an if statement in category_tree(). | dokterbob/satchmo,ringemup/satchmo,twidi/satchmo,twidi/satchmo,dokterbob/satchmo,ringemup/satchmo,Ryati/satchmo,Ryati/satchmo | satchmo/shop/templatetags/satchmo_category.py | satchmo/shop/templatetags/satchmo_category.py | from django.template import Library
from satchmo.product.models import Category
try:
from xml.etree.ElementTree import Element, SubElement, tostring
except ImportError:
from elementtree.ElementTree import Element, SubElement, tostring
register = Library()
def recurse_for_children(current_node, parent_node, show_empty=True):
child_count = current_node.child.count()
if show_empty or child_count > 0 or current_node.product_set.count() > 0:
temp_parent = SubElement(parent_node, 'li')
attrs = {'href': current_node.get_absolute_url()}
link = SubElement(temp_parent, 'a', attrs)
link.text = current_node.name
if child_count > 0:
new_parent = SubElement(temp_parent, 'ul')
children = current_node.child.all()
for child in children:
recurse_for_children(child, new_parent)
def category_tree():
"""
Creates an unnumbered list of the categories. For example:
<ul>
<li>Books
<ul>
<li>Science Fiction
<ul>
<li>Space stories</li>
<li>Robot stories</li>
</ul>
</li>
<li>Non-fiction</li>
</ul>
</ul>
"""
root = Element("ul")
for cats in Category.objects.filter(parent__isnull=True):
recurse_for_children(cats, root)
return tostring(root, 'utf-8')
register.simple_tag(category_tree)
| from django.template import Library
from satchmo.product.models import Category
try:
from xml.etree.ElementTree import Element, SubElement, tostring
except ImportError:
from elementtree.ElementTree import Element, SubElement, tostring
register = Library()
def recurse_for_children(current_node, parent_node, show_empty=True):
child_count = current_node.child.count()
if show_empty or child_count > 0 or current_node.product_set.count() > 0:
temp_parent = SubElement(parent_node, 'li')
attrs = {'href': current_node.get_absolute_url()}
link = SubElement(temp_parent, 'a', attrs)
link.text = current_node.name
if child_count > 0:
new_parent = SubElement(temp_parent, 'ul')
children = current_node.child.all()
for child in children:
recurse_for_children(child, new_parent)
def category_tree():
"""
Creates an unnumbered list of the categories. For example:
<ul>
<li>Books
<ul>
<li>Science Fiction
<ul>
<li>Space stories</li>
<li>Robot stories</li>
</ul>
</li>
<li>Non-fiction</li>
</ul>
</ul>
"""
root = Element("ul")
for cats in Category.objects.all():
if not cats.parent:
recurse_for_children(cats, root)
return tostring(root, 'utf-8')
register.simple_tag(category_tree)
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.