commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
e30e5e9780cfe674a70856609ad6010056936263 | Use requests instead of urllib.request | kanosaki/PicDump,kanosaki/PicDump | picdump/webadapter.py | picdump/webadapter.py |
import requests
class WebAdapter:
def __init__(self):
self.cookies = {}
def get(self, urllike):
res = requests.get(str(urllike), cookies=self.cookies)
self.cookies = res.cookies
return res.text
|
import urllib.request
class WebAdapter:
def get(self, urllike):
url = self.mk_url(urllike)
try:
res = urllib.request.urlopen(url)
return res.read()
except Exception as e:
raise e
def open(self, urllike):
url = self.mk_url(urllike)
try:
return urllib.request.urlopen(url)
except Exception as e:
raise e
def mk_url(self, urllike):
return str(urllike)
| mit | Python |
a35abfda8af01f3c5bab4f4122060b630c118cac | Use ros-logging instead of print | bit-bots/bitbots_behaviour | bitbots_head_behavior/src/bitbots_head_behavior/actions/pattern_search.py | bitbots_head_behavior/src/bitbots_head_behavior/actions/pattern_search.py | import math
from dynamic_stack_decider.abstract_action_element import AbstractActionElement
class PatternSearch(AbstractActionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(PatternSearch, self).__init__(blackboard, dsd, parameters)
self.index = 0
self.pattern = self.blackboard.config['search_pattern']
def perform(self, reevaluate=False):
head_pan, head_tilt = self.pattern[int(self.index)]
# Convert to radians
head_pan = head_pan / 180.0 * math.pi
head_tilt = head_tilt / 180.0 * math.pi
rospy.logdebug_throttle_identical(1, f"Searching at {head_pan}, {head_tilt}")
self.blackboard.head_capsule.send_motor_goals(head_pan, head_tilt, 1.5, 1.5)
# Increment index
self.index = (self.index + 0.2) % len(self.pattern)
| import math
from dynamic_stack_decider.abstract_action_element import AbstractActionElement
class PatternSearch(AbstractActionElement):
def __init__(self, blackboard, dsd, parameters=None):
super(PatternSearch, self).__init__(blackboard, dsd, parameters)
self.index = 0
self.pattern = self.blackboard.config['search_pattern']
def perform(self, reevaluate=False):
head_pan, head_tilt = self.pattern[int(self.index)]
# Convert to radians
head_pan = head_pan / 180.0 * math.pi
head_tilt = head_tilt / 180.0 * math.pi
print("Searching at {}, {}".format(head_pan, head_tilt))
self.blackboard.head_capsule.send_motor_goals(head_pan, head_tilt, 1.5, 1.5)
# Increment index
self.index = (self.index + 0.2) % len(self.pattern)
| bsd-3-clause | Python |
80e7fbff75a51d9dea8716f74d7d06fb01155704 | Add python3 compat boilerplate | thaim/ansible,thaim/ansible | lib/ansible/plugins/test/mathstuff.py | lib/ansible/plugins/test/mathstuff.py | # (c) 2016, Ansible, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
__metaclass__ = type
from __future__ import (absolute_import, division, print_function)
def issubset(a, b):
return set(a) <= set(b)
def issuperset(a, b):
return set(a) >= set(b)
class TestModule:
''' Ansible math jinja2 tests '''
def tests(self):
return {
# set theory
'issubset': issubset,
'issuperset': issuperset,
}
| # (c) 2016, Ansible, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
__metaclass__ = type
def issubset(a, b):
return set(a) <= set(b)
def issuperset(a, b):
return set(a) >= set(b)
class TestModule(object):
''' Ansible math jinja2 tests '''
def tests(self):
return {
# set theory
'issubset': issubset,
'issuperset': issuperset,
}
| mit | Python |
6d7cf3575e02212ec68d16338a055b372ae6a9f5 | Fix tabs | mithro/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,mithro/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware,cr1901/HDMI2USB-litex-firmware | gateware/git_info.py | gateware/git_info.py | import binascii
import os
import subprocess
import sys
from litex.gen.fhdl import *
from litex.soc.interconnect.csr import *
def git_root():
if sys.platform == "win32":
# Git on Windows is likely to use Unix-style paths (`/c/path/to/repo`),
# whereas directories passed to Python should be Windows-style paths
# (`C:/path/to/repo`) (because Python calls into the Windows API).
# `cygpath` converts between the two.
git = subprocess.Popen(
"git rev-parse --show-toplevel",
cwd=os.path.dirname(__file__),
stdout=subprocess.PIPE,
)
path = subprocess.check_output(
"cygpath -wf -",
stdin=git.stdout,
)
git.wait()
return path.decode('ascii').strip()
else:
return subprocess.check_output(
"git rev-parse --show-toplevel",
shell=True,
cwd=os.path.dirname(__file__),
).decode('ascii').strip()
def git_commit():
data = subprocess.check_output(
"git rev-parse HEAD",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
return binascii.unhexlify(data)
def git_describe():
return subprocess.check_output(
"git describe --dirty",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
def git_status():
return subprocess.check_output(
"git status --short",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
class GitInfo(Module, AutoCSR):
def __init__(self):
commit = sum(int(x) << (i*8) for i, x in enumerate(reversed(git_commit())))
self.commit = CSRStatus(160)
# FIXME: This should be a read-only Memory object
#extradata = [ord(x) for x in "\0".join([
# "https://github.com/timvideos/HDMI2USB-misoc-firmware.git",
# git_describe(),
# git_status(),
# "",
# ])]
#self.extradata = CSRStatus(len(extradata)*8)
self.comb += [
self.commit.status.eq(commit),
# self.extradata.status.eq(extradata),
]
| import binascii
import os
import subprocess
import sys
from litex.gen.fhdl import *
from litex.soc.interconnect.csr import *
def git_root():
if sys.platform == "win32":
# Git on Windows is likely to use Unix-style paths (`/c/path/to/repo`),
# whereas directories passed to Python should be Windows-style paths
# (`C:/path/to/repo`) (because Python calls into the Windows API).
# `cygpath` converts between the two.
git = subprocess.Popen(
"git rev-parse --show-toplevel",
cwd=os.path.dirname(__file__),
stdout=subprocess.PIPE,
)
path = subprocess.check_output(
"cygpath -wf -",
stdin=git.stdout,
)
git.wait()
return path.decode('ascii').strip()
else:
return subprocess.check_output(
"git rev-parse --show-toplevel",
shell=True,
cwd=os.path.dirname(__file__),
).decode('ascii').strip()
def git_commit():
data = subprocess.check_output(
"git rev-parse HEAD",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
return binascii.unhexlify(data)
def git_describe():
return subprocess.check_output(
"git describe --dirty",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
def git_status():
return subprocess.check_output(
"git status --short",
shell=True,
cwd=git_root(),
).decode('ascii').strip()
class GitInfo(Module, AutoCSR):
def __init__(self):
commit = sum(int(x) << (i*8) for i, x in enumerate(reversed(git_commit())))
self.commit = CSRStatus(160)
# FIXME: This should be a read-only Memory object
#extradata = [ord(x) for x in "\0".join([
# "https://github.com/timvideos/HDMI2USB-misoc-firmware.git",
# git_describe(),
# git_status(),
# "",
# ])]
#self.extradata = CSRStatus(len(extradata)*8)
self.comb += [
self.commit.status.eq(commit),
# self.extradata.status.eq(extradata),
]
| bsd-2-clause | Python |
bf6f985d4746a42979184cca70daa039df484ab0 | Update __init__.py | pv2b/zonetruck | zonetruck/__init__.py | zonetruck/__init__.py | import dns.resolver
def main():
answers = dns.resolver.query('dnspython.org', 'MX')
for rdata in answers:
print 'Host', rdata.exchange, 'has preference', rdata.preference
| import dnspython
def main():
print ("Hello, world!")
| mit | Python |
8fec29601080c2cc0357b8d3882eb63d9d8cbe15 | modify test unit | jhsrcmh/AlgoRun | suffix_tree/test_suffix_tree.py | suffix_tree/test_suffix_tree.py | #!/usr/bin/python
#encoding=utf-8
'''
Authored by jhsrcmh in NCI
-----------------------------
<strong> Wolf's MIS</strong>
-----------------------------
'''
import unittest
import codecs
from suffix_tree import SuffixTree
class SuffixTreeTest(unittest.TestCase):
def test_empty_string(self):
st = SuffixTree('')
self.assertEqual(st.find_substring('not there'), -1)
self.assertEqual(st.find_substring(''), -1)
self.assertFalse(st.has_substring('not there'))
self.assertFalse(st.has_substring(''))
def test_repeated_string(self):
st = SuffixTree("aaa")
self.assertEqual(st.find_substring('a'), 0)
self.assertEqual(st.find_substring('aa'), 0)
self.assertEqual(st.find_substring('aaa'), 0)
self.assertEqual(st.find_substring('b'), -1)
self.assertTrue(st.has_substring('a'))
self.assertTrue(st.has_substring('aa'))
self.assertTrue(st.has_substring('aaa'))
def test_text_string(self):
f = codecs.open("test.txt", encoding='utf-8')
st = SuffixTree(f.read())
self.assertTrue(st.has_substring(u'a'))
def test_chinese_string(self):
st = SuffixTree(u"才高八斗")
self.assertTrue(st.has_substring(u'高'))
self.assertFalse(st.has_substring(u'豆豆'))
def test_chinese_text(self):
st = SuffixTree(codecs.open("test.txt", encoding="utf-8").read())
self.assertTrue(st.find_substring(u'概括性总结'))
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
#encoding=utf-8
'''
Authored by jhsrcmh in NCI
-----------------------------
<strong> Wolf's MIS</strong>
-----------------------------
'''
import unittest
import codecs
from suffix_tree import SuffixTree
class SuffixTreeTest(unittest.TestCase):
def test_empty_string(self):
st = SuffixTree('')
self.assertEqual(st.find_substring('not there'), -1)
self.assertEqual(st.find_substring(''), -1)
self.assertFalse(st.has_substring('not there'))
self.assertFalse(st.has_substring(''))
def test_repeated_string(self):
st = SuffixTree("aaa")
self.assertEqual(st.find_substring('a'), 0)
self.assertEqual(st.find_substring('aa'), 0)
self.assertEqual(st.find_substring('aaa'), 0)
self.assertEqual(st.find_substring('b'), -1)
self.assertTrue(st.has_substring('a'))
self.assertTrue(st.has_substring('aa'))
self.assertTrue(st.has_substring('aaa'))
def test_text_string(self):
f = codecs.open("test.txt", encoding='utf-8')
st = SuffixTree(f.read())
self.assertTrue(st.has_substring(u'谢'))
def test_chinese_string(self):
st = SuffixTree(u"才高八斗")
self.assertTrue(st.has_substring(u'高'))
self.assertFalse(st.has_substring(u'豆豆'))
def test_chinese_text(self):
st = SuffixTree(codecs.open("test.txt", encoding="utf-8").read())
self.assertTrue(st.find_substring(u'概括性总结'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
7da8d040d5020809495f828ba9ea7bd14393af3a | Add `nullable` property to DataField template | chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano | serrano/resources/templates.py | serrano/resources/templates.py | # DataField core fields and properties
DataCategory = {
'fields': [':pk', 'name', 'order', 'parent'],
'related': {
'parent': {
'fields': [':pk', 'name', 'order'],
}
}
}
DataField = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category', 'app_name', 'model_name', 'field_name',
'modified', 'published', 'archived', 'operators',
'simple_type', 'internal_type', 'data_modified', 'enumerable',
'searchable', 'unit', 'plural_unit', 'nullable'
],
'key_map': {
'plural_name': 'get_plural_name',
'plural_unit': 'get_plural_unit',
'operators': 'operator_choices',
},
'related': {
'category': DataCategory,
},
}
DataConcept = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category', 'order', 'modified', 'published', 'archived',
'formatter_name', 'queryview'
],
'key_map': {
'plural_name': 'get_plural_name',
},
'related': {
'category': DataCategory,
},
}
DataConceptField = {
'fields': ['alt_name', 'alt_plural_name'],
'key_map': {
'alt_name': 'name',
'alt_plural_name': 'get_plural_name',
},
}
DataContext = {
'fields': [':pk', ':local', 'language'],
'exclude': ['user', 'session_key'],
}
DataView = {
'exclude': ['user', 'session_key'],
}
| # DataField core fields and properties
DataCategory = {
'fields': [':pk', 'name', 'order', 'parent'],
'related': {
'parent': {
'fields': [':pk', 'name', 'order'],
}
}
}
DataField = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category', 'app_name', 'model_name', 'field_name',
'modified', 'published', 'archived', 'operators',
'simple_type', 'internal_type', 'data_modified', 'enumerable',
'searchable', 'unit', 'plural_unit'
],
'key_map': {
'plural_name': 'get_plural_name',
'plural_unit': 'get_plural_unit',
'operators': 'operator_choices',
},
'related': {
'category': DataCategory,
},
}
DataConcept = {
'fields': [
':pk', 'name', 'plural_name', 'description', 'keywords',
'category', 'order', 'modified', 'published', 'archived',
'formatter_name', 'queryview'
],
'key_map': {
'plural_name': 'get_plural_name',
},
'related': {
'category': DataCategory,
},
}
DataConceptField = {
'fields': ['alt_name', 'alt_plural_name'],
'key_map': {
'alt_name': 'name',
'alt_plural_name': 'get_plural_name',
},
}
DataContext = {
'fields': [':pk', ':local', 'language'],
'exclude': ['user', 'session_key'],
}
DataView = {
'exclude': ['user', 'session_key'],
}
| bsd-2-clause | Python |
a3d1d323c943c2bef763850dfa57adb1413abc32 | fix static url | ktmud/david,ktmud/david,ktmud/david | david/ext/views/static.py | david/ext/views/static.py | # -*- coding: utf-8 -*-
import os
from flask import url_for, current_app, json
from david.lib.cache import lc
from config import APP_ROOT, STATIC_ROOT, DEBUG, SITE_ROOT
class LazyStatic(object):
def __init__(self, path):
self.path = path
def __get__(self, obj, type):
return static_url(self.path)
def lazy_static_url(filename):
return LazyStatic(filename)
def static_url(filename):
if DEBUG:
return url_for('static', filename=filename)
return STATIC_ROOT + _hashed_filename(filename)
def admin_static_url(filename):
return SITE_ROOT + url_for('admin.static', filename=filename).replace('/', '', 1)
def urlmap(*filenames):
ret = {}
for f in filenames:
fname = f
if not f.endswith('.js') and not f.endswith('.css'):
fname = 'js/' + f + '.js'
ret[f] = static_url(fname)
return ret
def _hashed_filename(filename):
if filename in hashmap:
if '.' in filename:
basename, ext = filename.rsplit('.', 1)
return '%s_%s.%s' % (basename, hashmap[filename], ext)
return filename + '_' + hashmap[filename]
return filename
def inline_static(filename):
pass
STATIC_HASH_JSON = os.path.join(APP_ROOT, 'david/static/dist/hash.json')
hashmap = {}
def load_hashmap():
hashmap.update(json.load(open(STATIC_HASH_JSON)))
if not DEBUG:
load_hashmap()
| # -*- coding: utf-8 -*-
import os
from flask import url_for, current_app, json
from david.lib.cache import lc
from config import APP_ROOT, STATIC_ROOT, DEBUG, SITE_ROOT
class LazyStatic(object):
def __init__(self, path):
self.path = path
def __get__(self, obj, type):
return static_url(self.path)
def lazy_static_url(filename):
return LazyStatic(filename)
def static_url(filename):
if DEBUG:
return url_for('static', filename=filename)
return STATIC_ROOT + _hashed_filename(filename)
def admin_static_url(filename):
return SITE_ROOT + url_for('admin.static', filename=filename).replace('/', '', 1)
def urlmap(*filenames):
ret = {}
for f in filenames:
fname = f
if not f.endswith('.js') and not f.endswith('.css'):
fname = 'js/' + f + '.js'
ret[f] = static_url(fname)
return ret
def _hashed_filename(filename):
if filename in hashmap:
return
return os.path.join('/', filename)
def inline_static(filename):
pass
STATIC_HASH_JSON = os.path.join(APP_ROOT, 'david/static/dist/hash.json')
hashmap = {}
def load_hashmap():
hashmap = json.load(open(STATIC_HASH_JSON))
if not DEBUG:
load_hashmap()
| mit | Python |
4cedc5b496041d5d7ba3632302bc77d89c59f44b | Tweak how to handle fastq file name for 2 pass mapping | dgaston/ddb-ngsflow,dgaston/ddbio-ngsflow | ddb_ngsflow/rna/bowtie.py | ddb_ngsflow/rna/bowtie.py | """
.. module:: cufflinks
:platform: Unix, OSX
:synopsis: A module of methods for working with the bowtie alignment program
into additional formats.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def add_additional_options(command_list, config, flags):
if 'local' in flags:
command_list.append("--local")
return command_list
def bowtie_unpaired(job, config, name, samples, flags):
"""Align RNA-Seq data to a reference using Bowtie2
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples info and config dictionary.
:type samples: dict.
:returns: str -- The output vcf file name.
"""
output = "{}.bowtie.sam".format(name)
logfile = "{}.bowtie.log".format(name)
if "2-pass" in flags:
samples[name]['unmapped_fastq'] = samples[name]['fastq1']
command = ("{}".format(config['bowtie']['bin']),
"-x {}".format(config['bowtie']['index']),
"-p {}".format(config['bowtie']['num_cores']),
"-U {}".format(samples[name]['fastq1']),
"-S {}".format(output)
)
command = add_additional_options(command, config, flags)
job.fileStore.logToMaster("Bowtie Command: {}\n".format(command))
pipeline.run_and_log_command(" ".join(command), logfile)
return output
def bowtie_paired(job, config, name, samples, flags):
"""Align RNA-Seq data to a reference using Bowtie2
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples info and config dictionary.
:type samples: dict.
:returns: str -- The output vcf file name.
"""
output = "{}.bowtie.sam".format(name)
logfile = "{}.bowtie.log".format(name)
command = ("{}".format(config['bowtie']['bin']),
"-x {}".format(config['bowtie']['index']),
"-p {}".format(config['bowtie']['num_cores']),
"-1 {}".format(samples[name]['fastq1']),
"-2 {}".format(samples[name]['fastq2']),
"-S {}".format(output)
)
command = add_additional_options(command, config, flags)
job.fileStore.logToMaster("Bowtie Command: {}\n".format(command))
pipeline.run_and_log_command(" ".join(command), logfile)
return output
| """
.. module:: cufflinks
:platform: Unix, OSX
:synopsis: A module of methods for working with the bowtie alignment program
into additional formats.
.. moduleauthor:: Daniel Gaston <daniel.gaston@dal.ca>
"""
from ddb_ngsflow import pipeline
def add_additional_options(command_list, config, flags):
if 'local' in flags:
command_list.append("--local")
return command_list
def bowtie_unpaired(job, config, name, samples, flags):
"""Align RNA-Seq data to a reference using Bowtie2
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples info and config dictionary.
:type samples: dict.
:returns: str -- The output vcf file name.
"""
output = "{}.bowtie.sam".format(name)
logfile = "{}.bowtie.log".format(name)
command = ("{}".format(config['bowtie']['bin']),
"-x {}".format(config['bowtie']['index']),
"-p {}".format(config['bowtie']['num_cores']),
"-U {}".format(samples[name]['fastq1']),
"-S {}".format(output)
)
command = add_additional_options(command, config, flags)
job.fileStore.logToMaster("Bowtie Command: {}\n".format(command))
pipeline.run_and_log_command(" ".join(command), logfile)
return output
def bowtie_paired(job, config, name, samples, flags):
"""Align RNA-Seq data to a reference using Bowtie2
:param config: The configuration dictionary.
:type config: dict.
:param name: sample name.
:type name: str.
:param samples: The samples info and config dictionary.
:type samples: dict.
:returns: str -- The output vcf file name.
"""
output = "{}.bowtie.sam".format(name)
logfile = "{}.bowtie.log".format(name)
command = ("{}".format(config['bowtie']['bin']),
"-x {}".format(config['bowtie']['index']),
"-p {}".format(config['bowtie']['num_cores']),
"-1 {}".format(samples[name]['fastq1']),
"-2 {}".format(samples[name]['fastq1']),
"-S {}".format(output)
)
command = add_additional_options(command, config, flags)
job.fileStore.logToMaster("Bowtie Command: {}\n".format(command))
pipeline.run_and_log_command(" ".join(command), logfile)
return output
| mit | Python |
379ffea89716a515e85de9cdaab2bc9c47a68fc4 | Add LineWidthNode | vorburger/mcedit2,vorburger/mcedit2 | src/mcedit2/rendering/scenegraph/misc.py | src/mcedit2/rendering/scenegraph/misc.py | """
misc
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from OpenGL import GL
from mcedit2.rendering.scenegraph.rendernode import RenderstateRenderNode, RenderNode
from mcedit2.rendering.scenegraph.scenenode import Node
log = logging.getLogger(__name__)
class PolygonModeRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_POLYGON_BIT)
GL.glPolygonMode(self.sceneNode.face, self.sceneNode.mode)
def exit(self):
GL.glPopAttrib()
class PolygonModeNode(Node):
RenderNodeClass = PolygonModeRenderNode
def __init__(self, face, mode):
super(PolygonModeNode, self).__init__()
self.face = face
self.mode = mode
class LineWidthRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_LINE_BIT)
GL.glLineWidth(self.sceneNode.lineWidth)
def exit(self):
GL.glPopAttrib()
class LineWidthNode(Node):
RenderNodeClass = LineWidthRenderNode
def __init__(self, lineWidth):
super(LineWidthNode, self).__init__()
self.lineWidth = lineWidth
class ClearRenderNode(RenderNode):
def drawSelf(self):
color = self.sceneNode.clearColor
if color is None:
GL.glClear(GL.GL_DEPTH_BUFFER_BIT)
else:
GL.glClearColor(*color)
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
class ClearNode(Node):
RenderNodeClass = ClearRenderNode
def __init__(self, clearColor=(0, 0, 0, 1)):
super(ClearNode, self).__init__()
self.clearColor = clearColor
| """
misc
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from OpenGL import GL
from mcedit2.rendering.scenegraph.rendernode import RenderstateRenderNode, RenderNode
from mcedit2.rendering.scenegraph.scenenode import Node
log = logging.getLogger(__name__)
class PolygonModeRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_POLYGON_BIT)
GL.glPolygonMode(self.sceneNode.face, self.sceneNode.mode)
def exit(self):
GL.glPopAttrib()
class PolygonModeNode(Node):
RenderNodeClass = PolygonModeRenderNode
def __init__(self, face, mode):
super(PolygonModeNode, self).__init__()
self.face = face
self.mode = mode
class ClearRenderNode(RenderNode):
def drawSelf(self):
color = self.sceneNode.clearColor
if color is None:
GL.glClear(GL.GL_DEPTH_BUFFER_BIT)
else:
GL.glClearColor(*color)
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
class ClearNode(Node):
RenderNodeClass = ClearRenderNode
def __init__(self, clearColor=(0, 0, 0, 1)):
super(ClearNode, self).__init__()
self.clearColor = clearColor
| bsd-3-clause | Python |
ad36a4a6ae8376a779f9feb08adfb2ca4a59dbb4 | add \x0c to whitespace characters, as per html5 standard | starrify/scrapy,eLRuLL/scrapy,pawelmhm/scrapy,finfish/scrapy,rolando/scrapy,dangra/scrapy,scrapy/scrapy,wujuguang/scrapy,eLRuLL/scrapy,darkrho/scrapy-scrapy,Parlin-Galanodel/scrapy,shaform/scrapy,finfish/scrapy,ssteo/scrapy,pablohoffman/scrapy,Ryezhang/scrapy,kmike/scrapy,kmike/scrapy,shaform/scrapy,ssteo/scrapy,umrashrf/scrapy,rolando/scrapy,wujuguang/scrapy,dangra/scrapy,pablohoffman/scrapy,pablohoffman/scrapy,scrapy/scrapy,rolando-contrib/scrapy,ssteo/scrapy,Ryezhang/scrapy,dangra/scrapy,elacuesta/scrapy,Ryezhang/scrapy,umrashrf/scrapy,umrashrf/scrapy,starrify/scrapy,Parlin-Galanodel/scrapy,taito/scrapy,ArturGaspar/scrapy,rolando-contrib/scrapy,darkrho/scrapy-scrapy,ArturGaspar/scrapy,elacuesta/scrapy,Parlin-Galanodel/scrapy,pawelmhm/scrapy,taito/scrapy,rolando-contrib/scrapy,pawelmhm/scrapy,elacuesta/scrapy,finfish/scrapy,darkrho/scrapy-scrapy,shaform/scrapy,wujuguang/scrapy,kmike/scrapy,rolando/scrapy,starrify/scrapy,ArturGaspar/scrapy,taito/scrapy,scrapy/scrapy,eLRuLL/scrapy | scrapy/linkextractors/regex.py | scrapy/linkextractors/regex.py | import re
from six.moves.urllib.parse import urljoin
from w3lib.html import remove_tags, replace_entities, replace_escape_chars, get_base_url
from scrapy.link import Link
from .sgml import SgmlLinkExtractor
linkre = re.compile(
"<a\s.*?href=(\"[.#]+?\"|\'[.#]+?\'|[^\s]+?)(>|\s.*?>)(.*?)<[/ ]?a>",
re.DOTALL | re.IGNORECASE)
def clean_link(link_text):
"""Remove leading and trailing whitespace and punctuation"""
return link_text.strip("\t\r\n '\"\x0c")
class RegexLinkExtractor(SgmlLinkExtractor):
"""High performant link extractor"""
def _extract_links(self, response_text, response_url, response_encoding, base_url=None):
def clean_text(text):
return replace_escape_chars(remove_tags(text.decode(response_encoding))).strip()
def clean_url(url):
clean_url = ''
try:
clean_url = urljoin(base_url, replace_entities(clean_link(url.decode(response_encoding))))
except ValueError:
pass
return clean_url
if base_url is None:
base_url = get_base_url(response_text, response_url, response_encoding)
links_text = linkre.findall(response_text)
return [Link(clean_url(url).encode(response_encoding),
clean_text(text))
for url, _, text in links_text]
| import re
from six.moves.urllib.parse import urljoin
from w3lib.html import remove_tags, replace_entities, replace_escape_chars, get_base_url
from scrapy.link import Link
from .sgml import SgmlLinkExtractor
linkre = re.compile(
"<a\s.*?href=(\"[.#]+?\"|\'[.#]+?\'|[^\s]+?)(>|\s.*?>)(.*?)<[/ ]?a>",
re.DOTALL | re.IGNORECASE)
def clean_link(link_text):
"""Remove leading and trailing whitespace and punctuation"""
return link_text.strip("\t\r\n '\"")
class RegexLinkExtractor(SgmlLinkExtractor):
"""High performant link extractor"""
def _extract_links(self, response_text, response_url, response_encoding, base_url=None):
def clean_text(text):
return replace_escape_chars(remove_tags(text.decode(response_encoding))).strip()
def clean_url(url):
clean_url = ''
try:
clean_url = urljoin(base_url, replace_entities(clean_link(url.decode(response_encoding))))
except ValueError:
pass
return clean_url
if base_url is None:
base_url = get_base_url(response_text, response_url, response_encoding)
links_text = linkre.findall(response_text)
return [Link(clean_url(url).encode(response_encoding),
clean_text(text))
for url, _, text in links_text]
| bsd-3-clause | Python |
d531d03dfa19d1e785d71ac28e010531eefbca84 | Add TODO | davidgasquez/kaggle-airbnb | scripts/generate_submission.py | scripts/generate_submission.py | #!/usr/bin/env python
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from sklearn.ensemble import BaggingClassifier
from utils.io import generate_submission
def main():
path = '../data/processed/'
prefix = 'processed_'
suffix = '1'
train_users = pd.read_csv(path + prefix + 'train_users.csv' + suffix)
test_users = pd.read_csv(path + prefix + 'test_users.csv' + suffix)
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
xgb = XGBClassifier(
max_depth=5,
learning_rate=0.3,
n_estimators=10,
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
# TODO: Fit bagging with different datasets and warm start True
clf = BaggingClassifier(xgb, random_state=42)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder, name='bagging')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from sklearn.ensemble import BaggingClassifier
from utils.io import generate_submission
def main():
path = '../data/processed/'
prefix = 'processed_'
suffix = '1'
train_users = pd.read_csv(path + prefix + 'train_users.csv' + suffix)
test_users = pd.read_csv(path + prefix + 'test_users.csv' + suffix)
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
xgb = XGBClassifier(
max_depth=5,
learning_rate=0.3,
n_estimators=10,
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
clf = BaggingClassifier(xgb, random_state=42)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder, name='bagging')
if __name__ == '__main__':
main()
| mit | Python |
a31d5f88f3761e2b77300a2c882424c19baae9b3 | improve publishing of joint states | bit-bots/bitbots_misc,bit-bots/bitbots_misc,bit-bots/bitbots_misc | bitbots_bringup/scripts/motor_goals_viz_helper.py | bitbots_bringup/scripts/motor_goals_viz_helper.py | #!/usr/bin/env python3
import argparse
import rospy
from sensor_msgs.msg import JointState
from bitbots_msgs.msg import JointCommand
from humanoid_league_msgs.msg import Animation
JOINT_NAMES = ['LHipYaw', 'LHipRoll', 'LHipPitch', 'LKnee', 'LAnklePitch', 'LAnkleRoll', 'RHipYaw', 'RHipRoll', 'RHipPitch', 'RKnee', 'RAnklePitch', 'RAnkleRoll', 'LShoulderRoll', 'LShoulderPitch', 'LElbow', 'RShoulderRoll', 'RShoulderPitch', 'RElbow', 'HeadPan', 'HeadTilt']
class MotorVizHelper:
def __init__(self):
# get rid of addional ROS args when used in launch file
args0 = rospy.myargv()
parser = argparse.ArgumentParser()
parser.add_argument("--walking", "-w", help="Directly get walking motor goals", action="store_true")
parser.add_argument("--animation", "-a", help="Directly get animation motor goals", action="store_true")
args = parser.parse_args(args0[1:])
rospy.init_node("motor_viz_helper", anonymous=False)
self.joint_publisher = rospy.Publisher('joint_states', JointState, queue_size=1, tcp_nodelay=True)
if args.walking:
rospy.Subscriber("walking_motor_goals", JointCommand, self.joint_command_cb, queue_size=1, tcp_nodelay=True)
if args.animation:
rospy.Subscriber("animation_motor_goals", JointCommand, self.animation_cb, queue_size=1, tcp_nodelay=True)
rospy.Subscriber("/DynamixelController/command", JointCommand, self.joint_command_cb, queue_size=1, tcp_nodelay=True)
self.joint_state_msg = JointState()
self.joint_state_msg.header.stamp = rospy.Time.now()
self.joint_state_msg.name = JOINT_NAMES
self.joint_state_msg.position = [0] * 20
self.joint_publisher.publish(self.joint_state_msg)
rate = rospy.Rate(100)
while not rospy.is_shutdown():
self.joint_state_msg.header.stamp = rospy.Time.now()
self.joint_publisher.publish(self.joint_state_msg)
rate.sleep()
def joint_command_cb(self, msg: JointCommand):
self.joint_state_msg.header = msg.header
self.joint_state_msg.name = JOINT_NAMES
for i in range(len(msg.joint_names)):
name = msg.joint_names[i]
self.joint_state_msg.position[JOINT_NAMES.index(name)] = msg.positions[i]
def animation_cb(self, msg: Animation):
self.joint_state_msg.header = msg.header
self.joint_state_msg.name = msg.position.joint_names
self.joint_state_msg.position = msg.position.points[0].positions
helper = MotorVizHelper()
| #!/usr/bin/env python3
import argparse
import rospy
from sensor_msgs.msg import JointState
from bitbots_msgs.msg import JointCommand
from humanoid_league_msgs.msg import Animation
class MotorVizHelper:
def __init__(self):
# get rid of addional ROS args when used in launch file
args0 = rospy.myargv()
parser = argparse.ArgumentParser()
parser.add_argument("--walking", "-w", help="Directly get walking motor goals", action="store_true")
parser.add_argument("--animation", "-a", help="Directly get animation motor goals", action="store_true")
args = parser.parse_args(args0[1:])
rospy.init_node("motor_viz_helper", anonymous=False)
self.joint_state_msg = JointState()
self.joint_publisher = rospy.Publisher('joint_states', JointState, queue_size=1, tcp_nodelay=True)
if args.walking:
rospy.Subscriber("walking_motor_goals", JointCommand, self.joint_command_cb, queue_size=1, tcp_nodelay=True)
elif args.animation:
rospy.Subscriber("animation", Animation, self.animation_cb, queue_size=1, tcp_nodelay=True)
else:
rospy.Subscriber("/DynamixelController/command", JointCommand, self.joint_command_cb, queue_size=1, tcp_nodelay=True)
rate = rospy.Rate(100)
while not rospy.is_shutdown():
self.joint_state_msg.header.stamp = rospy.Time.now()
self.joint_publisher.publish(self.joint_state_msg)
rate.sleep()
def joint_command_cb(self, msg:JointCommand):
self.joint_state_msg.header = msg.header
self.joint_state_msg.name = msg.joint_names
self.joint_state_msg.position = msg.positions
def animation_cb(self, msg:Animation):
self.joint_state_msg.header = msg.header
self.joint_state_msg.name = msg.position.joint_names
self.joint_state_msg.position = msg.position.points[0].positions
helper = MotorVizHelper()
| mit | Python |
86ff28441a23762d30cbab9843a7abeb67bfd028 | Improve printing in bucky's DebugClient | mistio/mist.monitor,mistio/mist.monitor | src/mist/bucky_extras/clients/debug_client.py | src/mist/bucky_extras/clients/debug_client.py | import sys
import time
import datetime
from bucky.client import Client
from bucky.names import statname
class DebugClient(Client):
out_path = None
def __init__(self, cfg, pipe):
super(DebugClient, self).__init__(pipe)
if self.out_path:
self.stdout = open(self.out_path, 'w')
else:
self.stdout = sys.stdout
def send(self, host, name, value, tstamp):
if self.filter(host, name, value, tstamp):
self.write(host, name, value, tstamp)
def filter(self, host, name, value, tstamp):
return True
def write(self, host, name, value, tstamp):
target = statname(host, name)
dtime = datetime.datetime.fromtimestamp(tstamp)
time_lbl = dtime.strftime('%y%m%d %H:%M:%S')
self.stdout.write('%s (%.1fs) %s %r\n' % (time_lbl,
tstamp - time.time(),
target, value))
self.stdout.flush()
| import sys
from bucky.client import Client
class DebugClient(Client):
out_path = None
def __init__(self, cfg, pipe):
super(DebugClient, self).__init__(pipe)
if self.out_path:
self.stdout = open(self.out_path, 'w')
else:
self.stdout = sys.stdout
def send(self, host, name, value, time):
if self.filter(host, name, value, time):
self.write(host, name, value, time)
def filter(self, host, name, value, time):
return True
def write(self, host, name, value, time):
self.stdout.write('%s %s %s %s\n' % (host, name, value, time))
self.stdout.flush()
| apache-2.0 | Python |
b50eb2f28aa4248e73c817fa4d73d23d0e9abcc1 | fix @ Sat Sep 27 21:44:01 EDT 2014 | f825f5242ed81a32cd04e5269665f40a/libmyolinux,f825f5242ed81a32cd04e5269665f40a/libmyolinux,f825f5242ed81a32cd04e5269665f40a/libmyolinux,f825f5242ed81a32cd04e5269665f40a/libmyolinux | src/pi/leds.py | src/pi/leds.py | #!/usr/bin/env python
# leds.py
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.cleanup()
GPIO.setup(7, GPIO.OUT)
GPIO.setup(12, GPIO.OUT)
while True:
GPIO.output(7, GPIO.HIGH)
time.sleep(1)
GPIO.output(12, GPIO.HIGH)
time.sleep(1)
GPIO.cleanup() | #!/usr/bin/env python
# leds.py
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.cleanup()
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.OUT)
while True:
GPIO.output(7, GPIO.HIGH)
time.sleep(1)
GPIO.output(11, GPIO.HIGH)
time.sleep(1)
GPIO.output(12, GPIO.HIGH)
time.sleep(1)
GPIO.cleanup() | mit | Python |
4ed52f75df03ef52e93fa7897cbfbd01f9d20190 | use matplotlib's triplot instead of pydec's | whereisravi/pydec,alejospina/pydec,ryanbressler/pydec,pkuwwt/pydec,DongliangGao/pydec,DongliangGao/pydec,alejospina/pydec,pkuwwt/pydec,whereisravi/pydec,wangregoon/pydec,ryanbressler/pydec,wangregoon/pydec | Examples/MakeMesh/make_mesh.py | Examples/MakeMesh/make_mesh.py | """
Reads ascii vertex and element files, writes a pydec mesh and displays it
"""
import scipy
from pydec import SimplicialMesh, write_mesh, read_mesh
from matplotlib.pylab import triplot, show
vertices = scipy.loadtxt("v.txt")
elements = scipy.loadtxt("s.txt",dtype='int32') - 1
mymesh = SimplicialMesh(vertices=vertices,indices=elements)
write_mesh("square_8.xml",mymesh,format='basic')
rmesh = read_mesh("square_8.xml")
triplot(rmesh.vertices[:,0], rmesh.vertices[:,1], rmesh.indices)
show()
| """
Reads ascii vertex and element files, writes a pydec mesh and displays it
"""
import scipy
from pydec import *
from matplotlib.pylab import show
vertices = scipy.loadtxt("v.txt")
elements = scipy.loadtxt("s.txt",dtype='int32') - 1
mymesh = SimplicialMesh(vertices=vertices,indices=elements)
write_mesh("square_8.xml",mymesh,format='basic')
rmesh = read_mesh("square_8.xml")
triplot(rmesh.vertices,rmesh.indices)
show()
| bsd-3-clause | Python |
f6e58c512b53db1443e5cfe6ad8ba6cbaf5b3726 | Bump to version 0.30.0 | reubano/meza,reubano/meza,reubano/tabutils,reubano/tabutils,reubano/tabutils,reubano/meza | meza/__init__.py | meza/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza
~~~~
Provides methods for reading and processing data from tabular formatted files
Attributes:
CURRENCIES [tuple(unicode)]: Currency symbols to remove from decimal
strings.
ENCODING (str): Default file encoding.
DEFAULT_DATETIME (obj): Default datetime object
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from datetime import datetime as dt
__version__ = '0.30.0'
__title__ = 'meza'
__package_name__ = 'meza'
__author__ = 'Reuben Cummings'
__description__ = 'A Python toolkit for processing tabular data'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CURRENCIES = ('$', '£', '€')
ENCODING = 'utf-8'
DEFAULT_DATETIME = dt(9999, 12, 31, 0, 0, 0)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
meza
~~~~
Provides methods for reading and processing data from tabular formatted files
Attributes:
CURRENCIES [tuple(unicode)]: Currency symbols to remove from decimal
strings.
ENCODING (str): Default file encoding.
DEFAULT_DATETIME (obj): Default datetime object
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from datetime import datetime as dt
__version__ = '0.29.6'
__title__ = 'meza'
__package_name__ = 'meza'
__author__ = 'Reuben Cummings'
__description__ = 'A Python toolkit for processing tabular data'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
CURRENCIES = ('$', '£', '€')
ENCODING = 'utf-8'
DEFAULT_DATETIME = dt(9999, 12, 31, 0, 0, 0)
| mit | Python |
869ba7c298a56ff49d81516d7c5374ebbf3a0949 | Add activity for assign/unassign | hongliang5623/sentry,TedaLIEz/sentry,wujuguang/sentry,ifduyue/sentry,mvaled/sentry,gencer/sentry,mvaled/sentry,wong2/sentry,zenefits/sentry,ifduyue/sentry,hongliang5623/sentry,wujuguang/sentry,gencer/sentry,fotinakis/sentry,ewdurbin/sentry,beeftornado/sentry,Natim/sentry,daevaorn/sentry,nicholasserra/sentry,korealerts1/sentry,jean/sentry,jean/sentry,alexm92/sentry,daevaorn/sentry,mvaled/sentry,mvaled/sentry,imankulov/sentry,zenefits/sentry,kevinlondon/sentry,jean/sentry,ifduyue/sentry,1tush/sentry,kevinastone/sentry,pauloschilling/sentry,fotinakis/sentry,ngonzalvez/sentry,korealerts1/sentry,JamesMura/sentry,korealerts1/sentry,looker/sentry,songyi199111/sentry,jean/sentry,boneyao/sentry,mitsuhiko/sentry,drcapulet/sentry,BuildingLink/sentry,JTCunning/sentry,looker/sentry,Natim/sentry,felixbuenemann/sentry,BayanGroup/sentry,ifduyue/sentry,mitsuhiko/sentry,imankulov/sentry,ewdurbin/sentry,ngonzalvez/sentry,drcapulet/sentry,ngonzalvez/sentry,JackDanger/sentry,fuziontech/sentry,kevinastone/sentry,looker/sentry,1tush/sentry,beeftornado/sentry,TedaLIEz/sentry,fuziontech/sentry,looker/sentry,fotinakis/sentry,JamesMura/sentry,BuildingLink/sentry,ewdurbin/sentry,zenefits/sentry,JamesMura/sentry,kevinlondon/sentry,felixbuenemann/sentry,nicholasserra/sentry,JackDanger/sentry,looker/sentry,vperron/sentry,beeftornado/sentry,boneyao/sentry,zenefits/sentry,BuildingLink/sentry,alexm92/sentry,JamesMura/sentry,BayanGroup/sentry,jean/sentry,felixbuenemann/sentry,vperron/sentry,BuildingLink/sentry,pauloschilling/sentry,vperron/sentry,gg7/sentry,wong2/sentry,mvaled/sentry,fuziontech/sentry,Kryz/sentry,ifduyue/sentry,gg7/sentry,JTCunning/sentry,JTCunning/sentry,zenefits/sentry,alexm92/sentry,daevaorn/sentry,hongliang5623/sentry,gencer/sentry,wong2/sentry,kevinastone/sentry,gg7/sentry,nicholasserra/sentry,drcapulet/sentry,gencer/sentry,1tush/sentry,gencer/sentry,pauloschilling/sentry,songyi199111/sentry,BuildingLink/sentry,daevaorn/sentry,BayanGroup/sentry,Natim/sentry,songyi199111/sentry,JackDanger/sentry,JamesMura/sentry,wujuguang/sentry,Kryz/sentry,imankulov/sentry,mvaled/sentry,boneyao/sentry,Kryz/sentry,TedaLIEz/sentry,kevinlondon/sentry,fotinakis/sentry | src/sentry/templatetags/sentry_activity.py | src/sentry/templatetags/sentry_activity.py | """
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import logging
from django import template
from django.utils.html import escape, urlize, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
from sentry.utils.avatar import get_gravatar_url
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.NOTE: 'left a note',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
Activity.CREATE_ISSUE: u'created an issue on {provider:s} titled <a href="{location:s}">{title:s}</a>',
Activity.FIRST_SEEN: 'first saw this event',
Activity.ASSIGNED: 'assigned this event to {user:s}',
Activity.UNASSIGNED: 'unassigned this event',
Activity.RELEASE: 'saw a new release: {version:s}',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
try:
action_str = ACTIVITY_ACTION_STRINGS[item.type]
except KeyError:
logging.warning('Unknown activity type present: %s', item.type)
return
if item.type == Activity.CREATE_ISSUE:
action_str = action_str.format(**item.data)
output = ''
if item.user:
user = item.user
name = user.first_name or user.email
output += '<span class="avatar"><img src="%s"></span> ' % (get_gravatar_url(user.email, size=20),)
output += '<strong>%s</strong> %s' % (escape(name), action_str)
else:
output += '<span class="avatar sentry"></span> '
output += 'The system %s' % (action_str,)
output += ' <span class="sep">—</span> <span class="time">%s</span>' % (timesince(item.datetime),)
if item.type == Activity.NOTE:
output += linebreaks(urlize(escape(item.data['text'])))
return mark_safe(output)
| """
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import logging
from django import template
from django.utils.html import escape, urlize, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
from sentry.utils.avatar import get_gravatar_url
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.NOTE: 'left a note',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
Activity.CREATE_ISSUE: u'created an issue on {provider:s} titled <a href="{location:s}">{title:s}</a>',
Activity.FIRST_SEEN: 'first saw this event',
Activity.RELEASE: 'saw a new release: {version:s}',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
try:
action_str = ACTIVITY_ACTION_STRINGS[item.type]
except KeyError:
logging.warning('Unknown activity type present: %s', item.type)
return
if item.type == Activity.CREATE_ISSUE:
action_str = action_str.format(**item.data)
output = ''
if item.user:
user = item.user
name = user.first_name or user.email
output += '<span class="avatar"><img src="%s"></span> ' % (get_gravatar_url(user.email, size=20),)
output += '<strong>%s</strong> %s' % (escape(name), action_str)
else:
output += '<span class="avatar sentry"></span> '
output += 'The system %s' % (action_str,)
output += ' <span class="sep">—</span> <span class="time">%s</span>' % (timesince(item.datetime),)
if item.type == Activity.NOTE:
output += linebreaks(urlize(escape(item.data['text'])))
return mark_safe(output)
| bsd-3-clause | Python |
252784ccf5a381f5a611ca74cc486f74218b9ce6 | disable tests on gcc11 (#26593) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/cppgsl/package.py | var/spack/repos/builtin/packages/cppgsl/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cppgsl(CMakePackage):
"""C++ Guideline Support Library"""
homepage = "https://github.com/Microsoft/GSL"
url = "https://github.com/Microsoft/GSL/archive/v2.0.0.tar.gz"
git = "https://github.com/Microsoft/GSL.git"
version('3.1.0', sha256='d3234d7f94cea4389e3ca70619b82e8fb4c2f33bb3a070799f1e18eef500a083')
version('master', branch='master')
version('2.1.0', sha256='ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d')
version('2.0.0', sha256='6cce6fb16b651e62711a4f58e484931013c33979b795d1b1f7646f640cfa9c8e')
version('1.0.0', sha256='9694b04cd78e5b1a769868f19fdd9eea2002de3d4c3a81a1b769209364543c36')
variant('cxxstd',
default='14',
values=('14', '17'),
multi=False,
description='Use the specified C++ standard when building.')
depends_on('cmake@3.1.3:', type='build')
def cmake_args(self):
return [
self.define_from_variant('GSL_CXX_STANDARD', 'cxxstd'),
self.define('GSL_TEST', self.run_tests)
]
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cppgsl(CMakePackage):
"""C++ Guideline Support Library"""
homepage = "https://github.com/Microsoft/GSL"
url = "https://github.com/Microsoft/GSL/archive/v2.0.0.tar.gz"
git = "https://github.com/Microsoft/GSL.git"
version('3.1.0', sha256='d3234d7f94cea4389e3ca70619b82e8fb4c2f33bb3a070799f1e18eef500a083')
version('master', branch='master')
version('2.1.0', sha256='ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d')
version('2.0.0', sha256='6cce6fb16b651e62711a4f58e484931013c33979b795d1b1f7646f640cfa9c8e')
version('1.0.0', sha256='9694b04cd78e5b1a769868f19fdd9eea2002de3d4c3a81a1b769209364543c36')
variant('cxxstd',
default='14',
values=('14', '17'),
multi=False,
description='Use the specified C++ standard when building.')
depends_on('cmake@3.1.3:', type='build')
def cmake_args(self):
args = [
'-DGSL_CXX_STANDARD={0}'.format(self.spec.variants['cxxstd'].value)
]
return args
| lgpl-2.1 | Python |
6d5cd24480a53ea2bf75f1663de707cb84cda380 | add version 0.8.0 (#23717) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-neo/package.py | var/spack/repos/builtin/packages/py-neo/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNeo(PythonPackage):
"""Neo is a package for representing electrophysiology data in Python,
together with support for reading a wide range of neurophysiology
file formats"""
homepage = "http://neuralensemble.org/neo"
pypi = "neo/neo-0.4.1.tar.gz"
version('0.8.0', sha256='3382a37b24a384006238b72981f1e9259de9bfa71886f8ed564d35d254ace458')
version('0.5.2', sha256='1de436b7d5e72a5b4f1baa68bae5b790624a9ac44b2673811cb0b6ef554d3f8b')
version('0.4.1', sha256='a5a4f3aa31654d52789f679717c9fb622ad4f59b56d227dca490357b9de0a1ce')
depends_on('py-setuptools', type='build')
depends_on('py-numpy@1.7.1:', type=('build', 'run'))
depends_on('py-quantities@0.9.0:', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNeo(PythonPackage):
"""Neo is a package for representing electrophysiology data in Python,
together with support for reading a wide range of neurophysiology
file formats"""
homepage = "http://neuralensemble.org/neo"
pypi = "neo/neo-0.4.1.tar.gz"
version('0.5.2', sha256='1de436b7d5e72a5b4f1baa68bae5b790624a9ac44b2673811cb0b6ef554d3f8b')
version('0.4.1', sha256='a5a4f3aa31654d52789f679717c9fb622ad4f59b56d227dca490357b9de0a1ce')
depends_on('py-setuptools', type='build')
depends_on('py-numpy@1.7.1:', type=('build', 'run'))
depends_on('py-quantities@0.9.0:', type=('build', 'run'))
| lgpl-2.1 | Python |
d07e988fbcd7698a7776e71aecb7eaabdbdb05be | Fix py-pox build recipe (#14078) | LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-pox/package.py | var/spack/repos/builtin/packages/py-pox/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPox(PythonPackage):
"""Utilities for filesystem exploration and automated builds."""
homepage = "https://github.com/uqfoundation/pox"
url = "https://pypi.io/packages/source/p/pox/pox-0.2.5.tar.gz"
version('0.2.5', sha256='2b53fbdf02596240483dc2cb94f94cc21252ad1b1858c7b1c151afeec9022cc8')
version('0.2.3', sha256='d3e8167a1ebe08ae56262a0b9359118d90bc4648cd284b5d10ae240343100a75')
version('0.2.2', sha256='c0b88e59ef0e4f2fa4839e11bf90d2c32d6ceb5abaf01f0c8138f7558e6f87c1')
version('0.2.1', sha256='580bf731fee233c58eac0974011b5bf0698efb7337b0a1696d289043b4fcd7f4')
depends_on('python@2.5:2.8,3.1:')
depends_on('py-setuptools@0.6:', type='build')
def url_for_version(self, version):
url = "https://pypi.io/packages/source/p/pox/"
if version >= Version('0.2.4'):
url += 'pox-{0}.tar.gz'
else:
url += 'pox-{0}.zip'
url = url.format(version)
return url
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPox(PythonPackage):
"""Utilities for filesystem exploration and automated builds."""
homepage = "https://github.com/uqfoundation/pox"
url = "https://pypi.io/packages/source/p/pox/pox-0.2.3.zip"
version('0.2.3', sha256='d3e8167a1ebe08ae56262a0b9359118d90bc4648cd284b5d10ae240343100a75')
version('0.2.2', sha256='c0b88e59ef0e4f2fa4839e11bf90d2c32d6ceb5abaf01f0c8138f7558e6f87c1')
version('0.2.1', sha256='580bf731fee233c58eac0974011b5bf0698efb7337b0a1696d289043b4fcd7f4')
depends_on('python@2.5:2.8,3.1:')
depends_on('py-setuptools@0.6:', type='build')
| lgpl-2.1 | Python |
6c05c8c91dcb7b631df932e19f363f67049e0872 | Fix ValueError: path is on mount 'X:', start on mount 'D:' on Windows | josephcslater/scipy,perimosocordiae/scipy,Eric89GXL/scipy,dominicelse/scipy,rgommers/scipy,dominicelse/scipy,e-q/scipy,andyfaff/scipy,Stefan-Endres/scipy,jjhelmus/scipy,aeklant/scipy,jor-/scipy,aeklant/scipy,apbard/scipy,nmayorov/scipy,jakevdp/scipy,Stefan-Endres/scipy,jjhelmus/scipy,josephcslater/scipy,scipy/scipy,aarchiba/scipy,nmayorov/scipy,ilayn/scipy,nmayorov/scipy,pbrod/scipy,matthew-brett/scipy,ilayn/scipy,befelix/scipy,person142/scipy,e-q/scipy,aeklant/scipy,mdhaber/scipy,grlee77/scipy,jor-/scipy,matthew-brett/scipy,jakevdp/scipy,befelix/scipy,gfyoung/scipy,person142/scipy,aarchiba/scipy,Stefan-Endres/scipy,endolith/scipy,endolith/scipy,dominicelse/scipy,apbard/scipy,vigna/scipy,andyfaff/scipy,rgommers/scipy,jamestwebber/scipy,gfyoung/scipy,rgommers/scipy,nmayorov/scipy,person142/scipy,vigna/scipy,lhilt/scipy,anntzer/scipy,anntzer/scipy,anntzer/scipy,Stefan-Endres/scipy,aeklant/scipy,jakevdp/scipy,scipy/scipy,Eric89GXL/scipy,perimosocordiae/scipy,mdhaber/scipy,aarchiba/scipy,lhilt/scipy,endolith/scipy,scipy/scipy,person142/scipy,mdhaber/scipy,gertingold/scipy,endolith/scipy,perimosocordiae/scipy,matthew-brett/scipy,jjhelmus/scipy,matthew-brett/scipy,grlee77/scipy,Eric89GXL/scipy,Stefan-Endres/scipy,befelix/scipy,pbrod/scipy,zerothi/scipy,rgommers/scipy,ilayn/scipy,Eric89GXL/scipy,pbrod/scipy,andyfaff/scipy,WarrenWeckesser/scipy,jakevdp/scipy,josephcslater/scipy,WarrenWeckesser/scipy,pizzathief/scipy,tylerjereddy/scipy,jjhelmus/scipy,zerothi/scipy,josephcslater/scipy,jamestwebber/scipy,gertingold/scipy,zerothi/scipy,gfyoung/scipy,pbrod/scipy,lhilt/scipy,mdhaber/scipy,WarrenWeckesser/scipy,apbard/scipy,andyfaff/scipy,gertingold/scipy,arokem/scipy,aarchiba/scipy,arokem/scipy,zerothi/scipy,mdhaber/scipy,andyfaff/scipy,endolith/scipy,ilayn/scipy,vigna/scipy,arokem/scipy,jakevdp/scipy,ilayn/scipy,nmayorov/scipy,perimosocordiae/scipy,anntzer/scipy,grlee77/scipy,zerothi/scipy,jor-/scipy,ilayn/scipy,perimosocordiae/scipy,dominicelse/scipy,gertingold/scipy,tylerjereddy/scipy,aarchiba/scipy,lhilt/scipy,lhilt/scipy,dominicelse/scipy,jamestwebber/scipy,e-q/scipy,gfyoung/scipy,arokem/scipy,e-q/scipy,pbrod/scipy,person142/scipy,jamestwebber/scipy,endolith/scipy,tylerjereddy/scipy,anntzer/scipy,gfyoung/scipy,matthew-brett/scipy,Eric89GXL/scipy,scipy/scipy,scipy/scipy,Eric89GXL/scipy,tylerjereddy/scipy,pizzathief/scipy,apbard/scipy,pizzathief/scipy,pbrod/scipy,arokem/scipy,jor-/scipy,rgommers/scipy,befelix/scipy,grlee77/scipy,WarrenWeckesser/scipy,vigna/scipy,vigna/scipy,josephcslater/scipy,perimosocordiae/scipy,jamestwebber/scipy,zerothi/scipy,befelix/scipy,gertingold/scipy,scipy/scipy,tylerjereddy/scipy,aeklant/scipy,jor-/scipy,Stefan-Endres/scipy,apbard/scipy,WarrenWeckesser/scipy,WarrenWeckesser/scipy,jjhelmus/scipy,andyfaff/scipy,pizzathief/scipy,pizzathief/scipy,e-q/scipy,grlee77/scipy,mdhaber/scipy,anntzer/scipy | scipy/special/_precompute/expn_asy.py | scipy/special/_precompute/expn_asy.py | """Precompute the polynomials for the asymptotic expansion of the
generalized exponential integral.
Sources
-------
[1] NIST, Digital Library of Mathematical Functions,
http://dlmf.nist.gov/8.20#ii
"""
from __future__ import division, print_function, absolute_import
import os
import warnings
try:
# Can remove when sympy #11255 is resolved; see
# https://github.com/sympy/sympy/issues/11255
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
import sympy
from sympy import Poly
x = sympy.symbols('x')
except ImportError:
pass
def generate_A(K):
A = [Poly(1, x)]
for k in range(K):
A.append(Poly(1 - 2*k*x, x)*A[k] + Poly(x*(x + 1))*A[k].diff())
return A
WARNING = """\
/* This file was automatically generated by _precompute/expn_asy.py.
* Do not edit it manually!
*/
"""
def main():
print(__doc__)
fn = os.path.join('..', 'cephes', 'expn.h')
K = 12
A = generate_A(K)
with open(fn + '.new', 'w') as f:
f.write(WARNING)
f.write("#define nA {}\n".format(len(A)))
for k, Ak in enumerate(A):
tmp = ', '.join([str(x.evalf(18)) for x in Ak.coeffs()])
f.write("double A{}[] = {{{}}};\n".format(k, tmp))
tmp = ", ".join(["A{}".format(k) for k in range(K + 1)])
f.write("double *A[] = {{{}}};\n".format(tmp))
tmp = ", ".join([str(Ak.degree()) for Ak in A])
f.write("int Adegs[] = {{{}}};\n".format(tmp))
os.rename(fn + '.new', fn)
if __name__ == "__main__":
main()
| """Precompute the polynomials for the asymptotic expansion of the
generalized exponential integral.
Sources
-------
[1] NIST, Digital Library of Mathematical Functions,
http://dlmf.nist.gov/8.20#ii
"""
from __future__ import division, print_function, absolute_import
import os
import warnings
try:
# Can remove when sympy #11255 is resolved; see
# https://github.com/sympy/sympy/issues/11255
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
import sympy
from sympy import Poly
x = sympy.symbols('x')
except ImportError:
pass
def generate_A(K):
A = [Poly(1, x)]
for k in range(K):
A.append(Poly(1 - 2*k*x, x)*A[k] + Poly(x*(x + 1))*A[k].diff())
return A
WARNING = """\
/* This file was automatically generated by {}.
* Do not edit it manually!
*/
""".format(os.path.relpath(__file__, '..'))
def main():
print(__doc__)
fn = os.path.join('..', 'cephes', 'expn.h')
K = 12
A = generate_A(K)
with open(fn + '.new', 'w') as f:
f.write(WARNING)
f.write("#define nA {}\n".format(len(A)))
for k, Ak in enumerate(A):
tmp = ', '.join([str(x.evalf(18)) for x in Ak.coeffs()])
f.write("double A{}[] = {{{}}};\n".format(k, tmp))
tmp = ", ".join(["A{}".format(k) for k in range(K + 1)])
f.write("double *A[] = {{{}}};\n".format(tmp))
tmp = ", ".join([str(Ak.degree()) for Ak in A])
f.write("int Adegs[] = {{{}}};\n".format(tmp))
os.rename(fn + '.new', fn)
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
f3d4c9a485abe9b92894fadc4d4db7f075b4d4e1 | Update python.py | vadimkantorov/wigwam | wigs/python.py | wigs/python.py | class python(Wig):
git_uri = 'https://github.com/python/cpython'
tarball_uri = 'https://github.com/python/cpython/archive/v$RELEASE_VERSION$.tar.gz'
last_release_version = 'v2.7.13'
dependencies = ['openssl', 'ncurses', 'readline', 'zlib', 'bz2']
| class python(Wig):
git_uri = 'https://github.com/python/cpython'
tarball_uri = 'https://github.com/python/cpython/archive/v$RELEASE_VERSION$.tar.gz'
last_release_version = 'v2.7.13'
dependencies = ['openssl', 'ncurses']
| mit | Python |
bd8a31c675e86f52db2fe473be22e4b497ab5bd1 | test on kvdb key count to get how many diary entries | bambooom/OMOOC2py,bambooom/OMOOC2py | _src/om2py5w/5wex0/index.wsgi | _src/om2py5w/5wex0/index.wsgi | # -*- coding: utf-8 -*-
#!/usr/bin/env python
# author: bambooom
'''
MyDiary Web Application
Open web browser and access http://bambooomdiary.sinaapp.com/
You can read the old diary and input new diary
'''
from bottle import Bottle, request, route, run, template
import sae
import sae.kvdb
from time import localtime, strftime
app = Bottle()
kv = sae.kvdb.Client()
log = []
def read_diary_all(count):
# f = open('diary log.txt','a+')
# return f.read()
for i in count:
log.append(kv.get(count))
return log
def write_diary(newdiary,count):
# key must be str()
countkey = str(count)
edit_time = strftime("%Y %b %d %H:%M:%S", localtime())
diary = {'time':edit_time, 'diary':newdiary}
kv.set(countkey,diary)
count += 1
return count
# f = open('diary log.txt','a+')
# f.write('%s %s\n' % (edit_time, newdiary))
# f.close()
count = write_diary("hello world",1)
count = write_diary("hello world again",count)
print count
#write_diary("hello world 2","hh2")
#print read_diary_bykey(str(2))
#print read_diary("taghh2")
#@app.route('/')
#def start():
# log = read_diary()
# return template("diarysae", diarylog=log)
#@app.route('/', method='POST')
#def input_new():
# newdiary = request.forms.get('newdiary')
# write_diary(newdiary)
# log = read_diary()
# return template("diarysae", diarylog=log)
#application = sae.create_wsgi_app(app) | # -*- coding: utf-8 -*-
#!/usr/bin/env python
# author: bambooom
'''
MyDiary Web Application
Open web browser and access http://bambooomdiary.sinaapp.com/
You can read the old diary and input new diary
'''
from bottle import Bottle, request, route, run, template
import sae
import sae.kvdb
from time import localtime, strftime
app = Bottle()
kv = sae.kvdb.Client()
log = []
def read_diary_bykey(count):
# f = open('diary log.txt','a+')
# return f.read()
log.append(kv.get(count))
return log
def write_diary(newdiary,count):
# key must be str()
count = str(count)
edit_time = strftime("%Y %b %d %H:%M:%S", localtime())
diary = {'time':edit_time, 'diary':newdiary}
kv.set(count,diary)
# f = open('diary log.txt','a+')
# f.write('%s %s\n' % (edit_time, newdiary))
# f.close()
write_diary("hello world",1)
write_diary("hello world again",2)
#write_diary("hello world 2","hh2")
print read_diary_bykey(str(1))
print read_diary_bykey(str(2))
#print read_diary("taghh2")
#@app.route('/')
#def start():
# log = read_diary()
# return template("diarysae", diarylog=log)
#@app.route('/', method='POST')
#def input_new():
# newdiary = request.forms.get('newdiary')
# write_diary(newdiary)
# log = read_diary()
# return template("diarysae", diarylog=log)
#application = sae.create_wsgi_app(app) | mit | Python |
e7e6cc998ab8ed5d1d1330c7f13c10e84826e131 | Add the new fields in the alembic script | kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud,kushaldas/autocloud | alembic/versions/1868e29e8306_add_compose_details_and_compose_job_.py | alembic/versions/1868e29e8306_add_compose_details_and_compose_job_.py | """add compose details and compose job details table
Revision ID: 1868e29e8306
Revises: 55932e5d6b3f
Create Date: 2016-04-26 18:28:54.865917
"""
# revision identifiers, used by Alembic.
revision = '1868e29e8306'
down_revision = '55932e5d6b3f'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('compose_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date', sa.DateTime(), nullable=False),
sa.Column('compose_id', sa.String(length=255), nullable=False),
sa.Column('respin', sa.String(length=255), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.Column('passed', sa.Integer(), nullable=True, default=0),
sa.Column('failed', sa.Integer(), nullable=True, default=0),
sa.Column('status', sa.String(length=255), nullable=True)
sa.Column('created_on', sa.DateTime(), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=False),
sa.Column('location', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'compose_job_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('arch', sa.String(length=255), nullable=True)
sa.Column('compose_id', sa.String(length=255), nullable=False),
sa.Column('created_on', sa.DateTime(), nullable=False),
sa.Column('family', sa.String(length=255), nullable=True)
sa.Column('image_url', sa.String(length=255), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=True),
sa.Column('output', sa.Text(), nullable=False),
sa.Column('release', sa.String(length=255), nullable=True)
sa.Column('status', sa.String(length=255), nullable=False)
sa.Column('subvariant', sa.String(length=255), nullable=False)
sa.Column('user', sa.String(length=255), nullable=False),
sa.Column('image_format', sa.String(length=255), nullable=False),
sa.Column('image_type', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
pass
def downgrade():
op.drop_table('compose_details')
os.drop_table('compose_job_details')
pass
| """add compose details and compose job details table
Revision ID: 1868e29e8306
Revises: 55932e5d6b3f
Create Date: 2016-04-26 18:28:54.865917
"""
# revision identifiers, used by Alembic.
revision = '1868e29e8306'
down_revision = '55932e5d6b3f'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('compose_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('date', sa.DateTime(), nullable=False),
sa.Column('compose_id', sa.String(length=255), nullable=False),
sa.Column('type', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
'compose_job_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('arch', sa.String(length=255), nullable=True)
sa.Column('compose_id', sa.String(length=255), nullable=False),
sa.Column('created_on', sa.DateTime(), nullable=False),
sa.Column('family', sa.String(length=255), nullable=True)
sa.Column('image_url', sa.String(length=255), nullable=False),
sa.Column('last_updated', sa.DateTime(), nullable=True),
sa.Column('output', sa.Text(), nullable=False),
sa.Column('release', sa.String(length=255), nullable=True)
sa.Column('status', sa.String(length=255), nullable=False)
sa.Column('subvariant', sa.String(length=255), nullable=False)
sa.Column('user', sa.String(length=255), nullable=False),
sa.Column('image_format', sa.String(length=255), nullable=False),
sa.Column('image_type', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
pass
def downgrade():
op.drop_table('compose_details')
os.drop_table('compose_job_details')
pass
| agpl-3.0 | Python |
6f2adf06d9f0385305472f200af06d869db3e9b3 | Create some base views which call manager's synchronise function every time data is retrieved | remarkablerocket/django-vend,remarkablerocket/django-vend | django_vend/core/views.py | django_vend/core/views.py | from django.contrib.auth.mixins import LoginRequiredMixin
class VendAuthMixin(LoginRequiredMixin):
def get_queryset(self):
retailer = self.request.user.vendprofile.retailer
return self.model.objects.filter(retailer=retailer)
class VendAuthSingleObjectSyncMixin(VendAuthMixin):
def get_object(self):
retailer = self.request.user.vendprofile.retailer
pk = self.kwargs.get(self.pk_url_kwarg)
self.model.objects.synchronise(retailer, pk)
return super(VendAuthSingleObjectSyncMixin, self).get_object()
class VendAuthCollectionSyncMixin(VendAuthMixin):
def get_queryset(self):
retailer = self.request.user.vendprofile.retailer
self.model.objects.synchronise(retailer)
return super(VendAuthCollectionSyncMixin, self).get_queryset()
| from django.shortcuts import render
# Create your views here.
| bsd-3-clause | Python |
a406b42ab8f1cd2c7b53a3cbacc30d7fd5a0e820 | Update to 0.0.96 | KerkhoffTechnologies/django-connectwise,KerkhoffTechnologies/django-connectwise | djconnectwise/__init__.py | djconnectwise/__init__.py | # -*- coding: utf-8 -*-
VERSION = (0, 0, 96, 'alpha')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
| # -*- coding: utf-8 -*-
VERSION = (0, 0, 95, 'alpha')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
| mit | Python |
b78ea2493b4fb6bea60ceb48237394b99ee8bcdd | fix unittest import in accessors | lixun910/pysal,hasecbinusr/pysal,TaylorOshan/pysal,jlaura/pysal,ljwolf/pysal_core,hasecbinusr/pysal,ljwolf/pysal,TaylorOshan/pysal,lixun910/pysal,ljwolf/pysal,pysal/pysal,hasecbinusr/pysal,ljwolf/pysal,sjsrey/pysal,pastephens/pysal,ljwolf/pysal_core,sjsrey/pysal_core,sjsrey/pysal_core,jlaura/pysal,pastephens/pysal,pedrovma/pysal,TaylorOshan/pysal,weikang9009/pysal,lixun910/pysal,sjsrey/pysal_core,jlaura/pysal,lanselin/pysal,pastephens/pysal,ljwolf/pysal_core | pysal/contrib/geotable/ops/tests/test_accessors.py | pysal/contrib/geotable/ops/tests/test_accessors.py | from ....pdio import read_files as rf
from .._accessors import __all__ as to_test
import unittest as ut
class Test_Accessors(ut.TestCase):
def test_area(self):
raise
def test_bbox(self):
raise
def test_bounding_box(self):
raise
def test_centroid(self):
raise
def test_holes(self):
raise
def test_len(self):
raise
def test_parts(self):
raise
def test_perimeter(self):
raise
def test_segments(self):
raise
def test_vertices(self):
raise
| from ....pdio import read_files as rf
from .._accessors import __all__ as to_test
from unittest as ut
class Test_Accessors(ut.TestCase):
def test_area(self):
raise
def test_bbox(self):
raise
def test_bounding_box(self):
raise
def test_centroid(self):
raise
def test_holes(self):
raise
def test_len(self):
raise
def test_parts(self):
raise
def test_perimeter(self):
raise
def test_segments(self):
raise
def test_vertices(self):
raise
| bsd-3-clause | Python |
a1590b4b91bab0174edb99e9b54d8a47e008ebc3 | update version | globocom/GloboNetworkAPI-client-python | networkapiclient/__init__.py | networkapiclient/__init__.py | # -*- coding:utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MAJOR_VERSION = '0'
MINOR_VERSION = '6'
PATCH_VERSION = '20'
VERSION = '.'.join((MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION,))
| # -*- coding:utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
MAJOR_VERSION = '0'
MINOR_VERSION = '6'
PATCH_VERSION = '19'
VERSION = '.'.join((MAJOR_VERSION, MINOR_VERSION, PATCH_VERSION,))
| apache-2.0 | Python |
a78bc40c8f3548477ea0c42a92214132c380792a | resolve traceback on add followers and channels | dfang/odoo,hip-odoo/odoo,hip-odoo/odoo,ygol/odoo,ygol/odoo,dfang/odoo,dfang/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,hip-odoo/odoo,hip-odoo/odoo,ygol/odoo,dfang/odoo,dfang/odoo,hip-odoo/odoo,hip-odoo/odoo,dfang/odoo | addons/calendar/wizard/mail_invite.py | addons/calendar/wizard/mail_invite.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
from odoo.addons.calendar.models.calendar import get_real_ids
class MailInvite(models.TransientModel):
_inherit = 'mail.wizard.invite'
@api.model
def default_get(self, fields):
""" In case someone clicked on 'invite others' wizard in the followers widget, transform virtual ids in real ids """
if 'default_res_id' in self._context:
self = self.with_context(default_res_id=get_real_ids(self._context['default_res_id']))
result = super(MailInvite, self).default_get(fields)
if 'res_id' in result:
result['res_id'] = get_real_ids(result['res_id'])
return result
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
from odoo.addons.calendar.models.calendar import get_real_ids
class MailInvite(models.TransientModel):
_inherit = 'mail.wizard.invite'
def default_get(self, fields):
""" In case someone clicked on 'invite others' wizard in the followers widget, transform virtual ids in real ids """
if 'default_res_id' in self._context:
self = self.with_context(default_res_id=get_real_ids(self._context['default_res_id']))
result = super(MailInvite, self).default_get(fields)
if 'res_id' in result:
result['res_id'] = get_real_ids(result['res_id'])
return result
| agpl-3.0 | Python |
64b3cbae56f68ce9786ca8553e9ed2565102368a | clean up resolve logic using collections.Iterable (sapling split of 0848c2ad8b375e6a93f40eb9e6a24f5e4c85495e) | manasapte/pants,fkorotkov/pants,manasapte/pants,dturner-tw/pants,TansyArron/pants,dturner-tw/pants,areitz/pants,pombredanne/pants,tdyas/pants,slyphon/pants,digwanderlust/pants,qma/pants,benjyw/pants,dgomez10/xanon,foursquare/pants,scode/pants,landism/pants,mateor/pants,15Dkatz/pants,twitter/pants,UnrememberMe/pants,tejal29/pants,dbentley/pants,baroquebobcat/pants,qma/pants,foursquare/pants,landism/pants,ity/pants,pombredanne/pants,landism/pants,sid-kap/pants,kslundberg/pants,slyphon/pants,kwlzn/pants,kslundberg/pants,ericzundel/pants,jessrosenfield/pants,TansyArron/pants,kslundberg/pants,twitter/pants,kwlzn/pants,peiyuwang/pants,fkorotkov/pants,jtrobec/pants,jtrobec/pants,areitz/pants,megaserg/pants,peiyuwang/pants,ity/pants,dturner-tw/pants,sid-kap/pants,di0spyr0s/pants,kslundberg/pants,mateor/pants,lahosken/pants,ity/pants,ericzundel/pants,slyphon/pants,Gabriel439/pants,pantsbuild/pants,sameerparekh/pants,peiyuwang/pants,Gabriel439/pants,ericzundel/pants,pgroudas/pants,tdyas/pants,wisechengyi/pants,foursquare/pants,dgomez10/xanon,baroquebobcat/pants,gmalmquist/pants,baroquebobcat/pants,pantsbuild/pants,twitter/pants,kwlzn/pants,tejal29/pants,tdyas/pants,ericzundel/pants,jessrosenfield/pants,pantsbuild/pants,ity/pants,scode/pants,jtrobec/pants,square/pants,jtrobec/pants,qma/pants,laurentgo/pants,foursquare/pants,tdyas/pants,sameerparekh/pants,kslundberg/pants,qma/pants,fkorotkov/pants,dbentley/pants,dturner-tw/pants,megaserg/pants,digwanderlust/pants,Gabriel439/pants,mateor/pants,baroquebobcat/pants,Gabriel439/pants,dturner-tw/pants,manasapte/pants,pombredanne/pants,megaserg/pants,kwlzn/pants,laurentgo/pants,megaserg/pants,UnrememberMe/pants,pombredanne/pants,lahosken/pants,kwlzn/pants,digwanderlust/pants,sameerparekh/pants,square/pants,mateor/pants,di0spyr0s/pants,15Dkatz/pants,di0spyr0s/pants,di0spyr0s/pants,dbentley/pants,sameerparekh/pants,baroquebobcat/pants,jtrobec/pants,mateor/pants,wisechengyi/pants,dbentley/pants,digwanderlust/pants,pombredanne/pants,pgroudas/pants,Gabriel439/pants,sid-kap/pants,sid-kap/pants,wisechengyi/pants,dgomez10/xanon,areitz/pants,ericzundel/pants,15Dkatz/pants,wisechengyi/pants,lahosken/pants,foursquare/pants,15Dkatz/pants,sameerparekh/pants,jsirois/pants,benjyw/pants,landism/pants,jessrosenfield/pants,dbentley/pants,cevaris/pants,sid-kap/pants,kslundberg/pants,fkorotkov/pants,manasapte/pants,mateor/pants,laurentgo/pants,twitter/pants,sid-kap/pants,pombredanne/pants,baroquebobcat/pants,15Dkatz/pants,Gabriel439/pants,15Dkatz/pants,scode/pants,mateor/pants,lahosken/pants,cevaris/pants,cevaris/pants,tejal29/pants,scode/pants,dgomez10/xanon,manasapte/pants,qma/pants,wisechengyi/pants,scode/pants,fkorotkov/pants,foursquare/pants,dbentley/pants,areitz/pants,landism/pants,foursquare/pants,jessrosenfield/pants,megaserg/pants,twitter/pants,areitz/pants,dgomez10/xanon,landism/pants,UnrememberMe/pants,sameerparekh/pants,UnrememberMe/pants,peiyuwang/pants,cevaris/pants,foursquare/pants,jsirois/pants,megaserg/pants,Gabriel439/pants,slyphon/pants,laurentgo/pants,areitz/pants,landism/pants,peiyuwang/pants,tdyas/pants,tdyas/pants,twitter/pants,wisechengyi/pants,jsirois/pants,dgomez10/xanon,cevaris/pants,jessrosenfield/pants,tdyas/pants,jessrosenfield/pants,lahosken/pants,gmalmquist/pants,TansyArron/pants,fkorotkov/pants,pantsbuild/pants,dbentley/pants,tejal29/pants,peiyuwang/pants,mateor/pants,scode/pants,pantsbuild/pants,wisechengyi/pants,dgomez10/xanon,pgroudas/pants,jtrobec/pants,TansyArron/pants,di0spyr0s/pants,manasapte/pants,landism/pants,dgomez10/xanon,sameerparekh/pants,pgroudas/pants,ericzundel/pants,megaserg/pants,UnrememberMe/pants,qma/pants,benjyw/pants,pantsbuild/pants,ity/pants,digwanderlust/pants,UnrememberMe/pants,fkorotkov/pants,peiyuwang/pants,sid-kap/pants,gmalmquist/pants,gmalmquist/pants,baroquebobcat/pants,twitter/pants,benjyw/pants,di0spyr0s/pants,kwlzn/pants,cevaris/pants,dgomez10/xanon,UnrememberMe/pants,tdyas/pants,TansyArron/pants,lahosken/pants,kslundberg/pants,pombredanne/pants,twitter/pants,15Dkatz/pants,di0spyr0s/pants,twitter/pants,gmalmquist/pants,gmalmquist/pants,baroquebobcat/pants,TansyArron/pants,dturner-tw/pants,lahosken/pants,tdyas/pants,ity/pants,benjyw/pants,baroquebobcat/pants,square/pants,benjyw/pants,TansyArron/pants,laurentgo/pants,pgroudas/pants,ericzundel/pants,dgomez10/xanon,jtrobec/pants,ericzundel/pants,slyphon/pants,qma/pants,UnrememberMe/pants,dturner-tw/pants,laurentgo/pants,benjyw/pants,slyphon/pants,15Dkatz/pants,pgroudas/pants,ity/pants,dgomez10/xanon,areitz/pants,cevaris/pants,manasapte/pants,square/pants,UnrememberMe/pants,scode/pants,gmalmquist/pants,slyphon/pants,foursquare/pants,lahosken/pants,fkorotkov/pants,wisechengyi/pants,kwlzn/pants,wisechengyi/pants,digwanderlust/pants,tejal29/pants,peiyuwang/pants,jessrosenfield/pants,pantsbuild/pants | src/python/twitter/pants/targets/util.py | src/python/twitter/pants/targets/util.py | # ==================================================================================================
# Copyright 2013 Foursquare Labs, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
__author__ = 'Ryan Williams'
from collections import Iterable
from twitter.common.lang import Compatibility
from twitter.pants.targets.pants_target import Pants
def resolve(arg, clazz=Pants):
"""Wraps strings in Pants() targets, for BUILD file convenience.
- single string literal gets wrapped in Pants() target
- single object is left alone
- list of strings and other miscellaneous objects gets its strings wrapped in Pants() targets
"""
if isinstance(arg, Compatibility.string):
# Strings get wrapped in a given class (default Pants).
return clazz(arg)
elif isinstance(arg, Iterable):
# If arg is iterable, recurse on its elements.
return [resolve(dependency, clazz=clazz) for dependency in arg]
else:
# NOTE(ryan): if arg is a non-iterable object, just return it. Ideally we'd check isinstance(arg, Target) here, but
# some things that Targets depend on are not themselves subclasses of Target, notably JarDependencies.
return arg
| # ==================================================================================================
# Copyright 2013 Foursquare Labs, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
__author__ = 'Ryan Williams'
from twitter.common.lang import Compatibility
from twitter.pants.targets.pants_target import Pants
def resolve(arg, clazz=Pants):
"""Wraps strings in Pants() targets, for BUILD file convenience.
- single string literal gets wrapped in Pants() target
- single object is left alone
- list of strings and other miscellaneous objects gets its strings wrapped in Pants() targets
"""
if isinstance(arg, Compatibility.string):
# Strings get wrapped in a given class (default Pants).
return clazz(arg)
# NOTE(ryan): if arg is a non-iterable object, just return it. Ideally we'd check isinstance(arg, Target) here, but
# some things that Targets depend on are not themselves subclasses of Target, notably JarDependencies.
try:
[ e for e in arg ]
except TypeError:
return arg
# If arg is in fact iterable, recurse on its elements.
return [resolve(dependency, clazz=clazz) for dependency in arg]
| apache-2.0 | Python |
555cda8a6d9d696b8999a7a728111edd36835a73 | Fix unit tests [WAL-1670] | opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur | src/waldur_mastermind/marketplace/views.py | src/waldur_mastermind/marketplace/views.py | from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from waldur_core.core import views as core_views
from waldur_core.structure import permissions as structure_permissions
from . import serializers, models, filters
class BaseMarketplaceView(core_views.ActionsViewSet):
lookup_field = 'uuid'
filter_backends = (DjangoFilterBackend,)
update_permissions = \
partial_update_permissions = \
destroy_permissions = \
[structure_permissions.is_owner]
class ServiceProviderViewSet(BaseMarketplaceView):
queryset = models.ServiceProvider.objects.all()
serializer_class = serializers.ServiceProviderSerializer
filter_class = filters.ServiceProviderFilter
class CategoryViewSet(core_views.ActionsViewSet):
queryset = models.Category.objects.all()
serializer_class = serializers.CategorySerializer
lookup_field = 'uuid'
filter_backends = (DjangoFilterBackend,)
create_permissions = \
update_permissions = \
partial_update_permissions = \
destroy_permissions = \
[structure_permissions.is_staff]
class OfferingViewSet(BaseMarketplaceView):
queryset = models.Offering.objects.all()
serializer_class = serializers.OfferingSerializer
filter_class = filters.OfferingFilter
class ScreenshotViewSet(BaseMarketplaceView):
queryset = models.Screenshots.objects.all()
serializer_class = serializers.ScreenshotSerializer
filter_class = filters.ScreenshotFilter
| from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from waldur_core.core import views as core_views
from waldur_core.structure import filters as structure_filters
from waldur_core.structure import permissions as structure_permissions
from . import serializers, models, filters
class BaseMarketplaceView(core_views.ActionsViewSet):
lookup_field = 'uuid'
filter_backends = (DjangoFilterBackend,)
update_permissions = \
partial_update_permissions = \
destroy_permissions = \
[structure_permissions.is_owner]
class ServiceProviderViewSet(BaseMarketplaceView):
queryset = models.ServiceProvider.objects.all()
serializer_class = serializers.ServiceProviderSerializer
filter_class = filters.ServiceProviderFilter
class CategoryViewSet(core_views.ActionsViewSet):
queryset = models.Category.objects.all()
serializer_class = serializers.CategorySerializer
lookup_field = 'uuid'
filter_backends = (DjangoFilterBackend,)
create_permissions = \
update_permissions = \
partial_update_permissions = \
destroy_permissions = \
[structure_permissions.is_staff]
class OfferingViewSet(BaseMarketplaceView):
queryset = models.Offering.objects.all()
serializer_class = serializers.OfferingSerializer
lookup_field = 'uuid'
filter_backends = (structure_filters.GenericRoleFilter, DjangoFilterBackend)
filter_class = filters.OfferingFilter
class ScreenshotViewSet(BaseMarketplaceView):
queryset = models.Screenshots.objects.all()
serializer_class = serializers.ScreenshotSerializer
filter_class = filters.ScreenshotFilter
| mit | Python |
4e09732945d7d81e48ac43be7e51637ba27d3a92 | Update help messages | soslan/passgen | src/passgen.py | src/passgen.py | import string
import random
import argparse
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser(
description="Generate strong random password."
)
parser.add_argument("-l", "--length",
help="the number of characters to generate "
"for each password",
type=int, default=12)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
| import string
import random
import argparse
def passgen(length=12):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("-l", "--length",
help="the number of characters to generate",
type=int, default=12)
parser.add_argument("-n", "--number",
help="how many passwords to generate",
type=int, default=10)
args = parser.parse_args()
for _ in range(args.number):
print passgen(args.length)
| mit | Python |
b38f465e512f9b7e79935c156c60ef56d6122387 | Order HTTP methods in constant. | playpauseandstop/aiohttp-middlewares,playpauseandstop/aiohttp-middlewares | aiohttp_middlewares/constants.py | aiohttp_middlewares/constants.py | """
=============================
aiohttp_middlewares.constants
=============================
Collection of constants for ``aiohttp_middlewares`` project.
"""
#: Set of idempotent HTTP methods
IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'})
#: Set of non-idempotent HTTP methods
NON_IDEMPOTENT_METHODS = frozenset({'DELETE', 'PATCH', 'POST', 'PUT'})
| """
=============================
aiohttp_middlewares.constants
=============================
Collection of constants for ``aiohttp_middlewares`` project.
"""
#: Set of idempotent HTTP methods
IDEMPOTENT_METHODS = frozenset({'GET', 'HEAD', 'OPTIONS', 'TRACE'})
#: Set of non-idempotent HTTP methods
NON_IDEMPOTENT_METHODS = frozenset({'POST', 'PUT', 'PATCH', 'DELETE'})
| bsd-3-clause | Python |
d2ec1eca12ee2f6356a96009e65f60b1975f9ecd | Fix typo in message | trungdong/cookiecutter-django,ryankanno/cookiecutter-django,ryankanno/cookiecutter-django,pydanny/cookiecutter-django,trungdong/cookiecutter-django,trungdong/cookiecutter-django,pydanny/cookiecutter-django,pydanny/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,pydanny/cookiecutter-django,ryankanno/cookiecutter-django | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py | from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.generic import DetailView, RedirectView, UpdateView
User = get_user_model()
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
user_detail_view = UserDetailView.as_view()
class UserUpdateView(LoginRequiredMixin, UpdateView):
model = User
fields = ["name"]
def get_success_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
def get_object(self):
return User.objects.get(username=self.request.user.username)
def form_valid(self, form):
messages.add_message(
self.request, messages.INFO, _("Information successfully updated")
)
return super().form_valid(form)
user_update_view = UserUpdateView.as_view()
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
user_redirect_view = UserRedirectView.as_view()
| from django.contrib import messages
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.views.generic import DetailView, RedirectView, UpdateView
User = get_user_model()
class UserDetailView(LoginRequiredMixin, DetailView):
model = User
slug_field = "username"
slug_url_kwarg = "username"
user_detail_view = UserDetailView.as_view()
class UserUpdateView(LoginRequiredMixin, UpdateView):
model = User
fields = ["name"]
def get_success_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
def get_object(self):
return User.objects.get(username=self.request.user.username)
def form_valid(self, form):
messages.add_message(
self.request, messages.INFO, _("Infos successfully updated")
)
return super().form_valid(form)
user_update_view = UserUpdateView.as_view()
class UserRedirectView(LoginRequiredMixin, RedirectView):
permanent = False
def get_redirect_url(self):
return reverse("users:detail", kwargs={"username": self.request.user.username})
user_redirect_view = UserRedirectView.as_view()
| bsd-3-clause | Python |
fbb2c05aef76c02094c13f5edeaecd9b7428ff11 | Update UI preferences model (dict) | Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend | alignak_backend/models/uipref.py | alignak_backend/models/uipref.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'dict',
'ui': {
'title': "Preference's dictionary",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Resource information of host
"""
def get_name():
"""
Get name of this resource
:return: name of this resource
:rtype: str
"""
return 'uipref'
def get_schema():
"""
Schema structure of this resource
:return: schema dictionnary
:rtype: dict
"""
return {
'allow_unknown': True,
'schema': {
'type': {
'type': 'string',
'ui': {
'title': "Preference's type",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'user': {
'type': 'string',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': ''
},
'data': {
'type': 'list',
'ui': {
'title': "User name",
'visible': True,
'orderable': True,
'searchable': True,
"format": None
},
'default': []
},
}
}
| agpl-3.0 | Python |
5e2e06dcdc973663cba0677c185e5efdd4a01b8c | Use lxml to pretty-print cached XML. minidom inserts too many spaces! | redtoad/python-amazon-product-api,redtoad/python-amazon-product-api,redtoad/python-amazon-product-api | amazonproduct/contrib/caching.py | amazonproduct/contrib/caching.py | import os
import tempfile
from lxml import etree
try: # make it python2.4 compatible!
from hashlib import md5
except ImportError: # pragma: no cover
from md5 import new as md5
from amazonproduct.api import API
DEFAULT_CACHE_DIR = tempfile.mkdtemp(prefix='amzn_')
class ResponseCachingAPI (API):
"""
This API stores each response from Amazon in an XML file and uses these for
subsequent requests. File are name with a hash based on submitted parameters
in URL (excluding Timestamp and Signature).
Using this class is an excellent idea during development!
This class is based on code by Dmitry Chaplinsky
https://gist.github.com/657174
"""
def __init__(self, access_key_id, secret_access_key, locale,
cachedir=DEFAULT_CACHE_DIR, **kwargs):
"""
:param cachedir: Path to directory containing cached responses.
"""
API.__init__(self, access_key_id, secret_access_key, locale, **kwargs)
self.cache = cachedir
if self.cache and not os.path.isdir(self.cache):
os.mkdir(self.cache)
def _fetch(self, url):
if self.cache:
path = os.path.join(self.cache, '%s.xml' % self.get_hash(url))
# if response was fetched previously, use that one
if os.path.isfile(path):
return open(path)
# fetch original response from Amazon
resp = API._fetch(self, url)
if self.cache:
fp = open(path, 'w+')
fp.write(etree.tostring(etree.parse(resp), pretty_print=True))
fp.seek(0)
return fp
return resp
@staticmethod
def get_hash(url):
"""
Calculate hash value for request based on URL.
"""
cachename = "&".join([chunk for chunk in url.split('&')
if chunk.find('Timestamp') != 0 and chunk.find('Signature') != 0])
return md5(cachename).hexdigest()
| import os
import tempfile
import xml.dom.minidom
try: # make it python2.4 compatible!
from hashlib import md5
except ImportError: # pragma: no cover
from md5 import new as md5
from amazonproduct.api import API
DEFAULT_CACHE_DIR = tempfile.mkdtemp(prefix='amzn_')
class ResponseCachingAPI (API):
"""
This API stores each response from Amazon in an XML file and uses these for
subsequent requests. File are name with a hash based on submitted parameters
in URL (excluding Timestamp and Signature).
Using this class is an excellent idea during development!
This class is based on code by Dmitry Chaplinsky
https://gist.github.com/657174
"""
def __init__(self, access_key_id, secret_access_key, locale,
cachedir=DEFAULT_CACHE_DIR, **kwargs):
"""
:param cachedir: Path to directory containing cached responses.
"""
API.__init__(self, access_key_id, secret_access_key, locale, **kwargs)
self.cache = cachedir
if self.cache and not os.path.isdir(self.cache):
os.mkdir(self.cache)
def _fetch(self, url):
if self.cache:
path = os.path.join(self.cache, '%s.xml' % self.get_hash(url))
# if response was fetched previously, use that one
if os.path.isfile(path):
return open(path)
# fetch original response from Amazon
resp = API._fetch(self, url)
if self.cache:
fp = open(path, 'w+')
node = xml.dom.minidom.parseString(resp.read())
fp.write(node.toprettyxml())
#from lxml import etree
#fp.write(etree.tostring(etree.parse(resp), pretty_print=True))
fp.seek(0)
return fp
return resp
@staticmethod
def get_hash(url):
"""
Calculate hash value for request based on URL.
"""
cachename = "&".join([chunk for chunk in url.split('&')
if chunk.find('Timestamp') != 0 and chunk.find('Signature') != 0])
return md5(cachename).hexdigest()
| bsd-3-clause | Python |
ad1e6e26184bcc7219ac715bbf435bfdd26aa458 | Fix indenting | nettitude/PoshC2,nettitude/PoshC2,nettitude/PoshC2,nettitude/PoshC2,nettitude/PoshC2,nettitude/PoshC2 | CookieDecrypter.py | CookieDecrypter.py | #!/usr/bin/python
from Colours import Colours
from Core import decrypt
from DB import get_keys
import os, sys, re
file = open(sys.argv[1], "r")
result = get_keys()
if result:
for line in file:
if re.search("SessionID", line):
for i in result:
try:
value = decrypt(i[0], line.split('=')[1])
print (Colours.GREEN + "Success with Key %s - %s" % (i[0],value))
except:
print (Colours.RED + "Failed with Key %s" % i[0])
| #!/usr/bin/python
from Colours import Colours
from Core import decrypt
from DB import get_keys
import os, sys, re
file = open(sys.argv[1], "r")
result = get_keys()
for line in file:
if re.search("SessionID", line):
if result:
for i in result:
try:
value = decrypt(i[0], line.split('=')[1])
print (Colours.GREEN + "Success with Key %s - %s" % (i[0],value))
except:
print (Colours.RED + "Failed with Key %s" % i[0])
| bsd-3-clause | Python |
53b9eff3ffc1768d3503021e7248351e24d59af7 | Fix test http server, change to echo back request body | chop-dbhi/django-webhooks,pombredanne/django-webhooks,pombredanne/django-webhooks,chop-dbhi/django-webhooks | tests/httpd.py | tests/httpd.py | import BaseHTTPServer
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
def do_POST(self):
content_type = self.headers.getheader('content-type')
content_length = int(self.headers.getheader('content-length'))
self.send_response(200)
self.send_header('Content-Type', content_type)
self.send_header('Content-Length', str(content_length))
self.end_headers()
self.wfile.write(self.rfile.read(content_length))
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('0.0.0.0', 8328), Handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
| import SimpleHTTPServer
import BaseHTTPServer
class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_POST(s):
s.send_response(200)
s.end_headers()
if __name__ == '__main__':
server_class = BaseHTTPServer.HTTPServer
httpd = server_class(('0.0.0.0', 8328), Handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.server_close()
| bsd-2-clause | Python |
698902a817ca081addc6cb71978c17cd8cf42a7b | fix error value output | zshimanchik/unconditioned-reflexes | NeuralNetwork/NeuralNetwork.py | NeuralNetwork/NeuralNetwork.py | from Layer import InputLayer, Layer, Readiness
class NeuralNetwork:
def __init__(self, shape):
self.layers = []
self.shape = shape
self.time = 0
self.input_layer = InputLayer(shape[0])
self.middle_layer = Layer(shape[1], self.input_layer.neurons)
self.input_layer.listeners.append(self.middle_layer)
self.output_layer = Layer(shape[2], self.middle_layer.neurons)
self.middle_layer.listeners.append(self.output_layer)
self.layers.append(self.input_layer)
self.layers.append(self.middle_layer)
self.layers.append(self.output_layer)
self._reset_layers_states()
def __len__(self):
return len(self.shape)
def __getitem__(self, i):
return self.layers[i]
def calculate(self, x):
"""
calculate vector x, if random value is set, add to result vector value (random()*2-1)*random_value
:param x: input vector
:param random_value: random range added to result vector
:return: result of network calculation
"""
self.input_layer.input_values = x
done = False
while not done:
done = True
for layer in self:
if layer.ready_to_calculate == Readiness.READY:
layer.calculate()
done = False
self._reset_layers_states()
return self.output_layer.get_output_values()
def _reset_layers_states(self):
for layer in self:
layer.reset_state()
def teach_by_sample(self, database, teach_value=0.5):
err = 0
for inp, out in database:
net_out = self.calculate(inp)
self.output_layer.teach_output_layer_by_sample(teach_value, out)
# teach middle layers
self.middle_layer.teach_middle_layer(teach_value)
for layer in self:
layer.commit_teach()
err += sum([abs(no-o) for no, o in zip(net_out, out)])
return err / len(database)
| from Layer import InputLayer, Layer, Readiness
class NeuralNetwork:
def __init__(self, shape):
self.layers = []
self.shape = shape
self.time = 0
self.input_layer = InputLayer(shape[0])
self.middle_layer = Layer(shape[1], self.input_layer.neurons)
self.input_layer.listeners.append(self.middle_layer)
self.output_layer = Layer(shape[2], self.middle_layer.neurons)
self.middle_layer.listeners.append(self.output_layer)
self.layers.append(self.input_layer)
self.layers.append(self.middle_layer)
self.layers.append(self.output_layer)
self._reset_layers_states()
def __len__(self):
return len(self.shape)
def __getitem__(self, i):
return self.layers[i]
def calculate(self, x):
"""
calculate vector x, if random value is set, add to result vector value (random()*2-1)*random_value
:param x: input vector
:param random_value: random range added to result vector
:return: result of network calculation
"""
self.input_layer.input_values = x
done = False
while not done:
done = True
for layer in self:
if layer.ready_to_calculate == Readiness.READY:
layer.calculate()
done = False
self._reset_layers_states()
return self.output_layer.get_output_values()
def _reset_layers_states(self):
for layer in self:
layer.reset_state()
def teach_by_sample(self, database, teach_value=0.5):
err = 0
for inp, out in database:
net_out = self.calculate(inp)
self.output_layer.teach_output_layer_by_sample(teach_value, out)
# teach middle layers
self.middle_layer.teach_middle_layer(teach_value)
for layer in self:
layer.commit_teach()
err += sum([abs(no-o) for no, o in zip(net_out, out)])
return err
| mit | Python |
b5235a67948da28fca6faf09337cb909c1f1825e | Read input | swank-rats/roboter-software | PythonTests/WebsocketClient.py | PythonTests/WebsocketClient.py | from ws4py.client.threadedclient import WebSocketClient
import Adafruit_BBIO.GPIO as GPIO
class DummyClient(WebSocketClient):
def opened(self):
#GPIO.setup("USR3", GPIO.OUT)
#GPIO.setup("USR2", GPIO.IN)
for i in range(0, 200, 25):
self.send("#" * i)
def closed(self, code, reason=None):
# GPIO.cleanup()
print "Closed down", code, reason
def received_message(self, m):
print m
if len(m) == 175:
self.close(reason='Bye bye')
if len(m) == 100:
GPIO.setup("USR3", GPIO.OUT)
GPIO.output("USR3", GPIO.HIGH)
GPIO.cleanup()
print "should be HIGH"
if __name__ == '__main__':
try:
ws = DummyClient('ws://echo.websocket.org', protocols=['http-only', 'chat'])
ws.connect()
ws.run_forever()
except KeyboardInterrupt:
ws.close() | from ws4py.client.threadedclient import WebSocketClient
import Adafruit_BBIO.GPIO as GPIO
class DummyClient(WebSocketClient):
def opened(self):
#GPIO.setup("USR3", GPIO.OUT)
#GPIO.setup("USR2", GPIO.IN)
for i in range(0, 200, 25):
self.send("#" * i)
def closed(self, code, reason=None):
# GPIO.cleanup()
print "Closed down", code, reason
def received_message(self, m):
print m
if len(m) == 175:
self.close(reason='Bye bye')
if len(m) == 100:
GPIO.setup("P8_10", GPIO.OUT)
GPIO.output("P8_10", GPIO.HIGH)
GPIO.cleanup()
if __name__ == '__main__':
try:
ws = DummyClient('ws://echo.websocket.org', protocols=['http-only', 'chat'])
ws.connect()
ws.run_forever()
except KeyboardInterrupt:
ws.close() | mit | Python |
6d91240f3416e85bf525e72c0151964206912b57 | Use https URLs for openstreetmap.org. The oauth endpoint must change soon, the rest is good practice | python-social-auth/social-core,python-social-auth/social-core | social_core/backends/openstreetmap.py | social_core/backends/openstreetmap.py | """
OpenStreetMap OAuth support.
This adds support for OpenStreetMap OAuth service. An application must be
registered first on OpenStreetMap and the settings
SOCIAL_AUTH_OPENSTREETMAP_KEY and SOCIAL_AUTH_OPENSTREETMAP_SECRET
must be defined with the corresponding values.
More info: https://wiki.openstreetmap.org/wiki/OAuth
"""
from xml.dom import minidom
from .oauth import BaseOAuth1
class OpenStreetMapOAuth(BaseOAuth1):
"""OpenStreetMap OAuth authentication backend"""
name = 'openstreetmap'
AUTHORIZATION_URL = 'https://www.openstreetmap.org/oauth/authorize'
REQUEST_TOKEN_URL = 'https://www.openstreetmap.org/oauth/request_token'
ACCESS_TOKEN_URL = 'https://www.openstreetmap.org/oauth/access_token'
EXTRA_DATA = [
('id', 'id'),
('avatar', 'avatar'),
('account_created', 'account_created')
]
def get_user_details(self, response):
"""Return user details from OpenStreetMap account"""
return {
'username': response['username'],
'email': '',
'fullname': '',
'first_name': '',
'last_name': ''
}
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
response = self.oauth_request(
access_token, 'https://api.openstreetmap.org/api/0.6/user/details'
)
try:
dom = minidom.parseString(response.content)
except ValueError:
return None
user = dom.getElementsByTagName('user')[0]
try:
avatar = dom.getElementsByTagName('img')[0].getAttribute('href')
except IndexError:
avatar = None
return {
'id': user.getAttribute('id'),
'username': user.getAttribute('display_name'),
'account_created': user.getAttribute('account_created'),
'avatar': avatar
}
| """
OpenStreetMap OAuth support.
This adds support for OpenStreetMap OAuth service. An application must be
registered first on OpenStreetMap and the settings
SOCIAL_AUTH_OPENSTREETMAP_KEY and SOCIAL_AUTH_OPENSTREETMAP_SECRET
must be defined with the corresponding values.
More info: http://wiki.openstreetmap.org/wiki/OAuth
"""
from xml.dom import minidom
from .oauth import BaseOAuth1
class OpenStreetMapOAuth(BaseOAuth1):
"""OpenStreetMap OAuth authentication backend"""
name = 'openstreetmap'
AUTHORIZATION_URL = 'http://www.openstreetmap.org/oauth/authorize'
REQUEST_TOKEN_URL = 'http://www.openstreetmap.org/oauth/request_token'
ACCESS_TOKEN_URL = 'http://www.openstreetmap.org/oauth/access_token'
EXTRA_DATA = [
('id', 'id'),
('avatar', 'avatar'),
('account_created', 'account_created')
]
def get_user_details(self, response):
"""Return user details from OpenStreetMap account"""
return {
'username': response['username'],
'email': '',
'fullname': '',
'first_name': '',
'last_name': ''
}
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
response = self.oauth_request(
access_token, 'http://api.openstreetmap.org/api/0.6/user/details'
)
try:
dom = minidom.parseString(response.content)
except ValueError:
return None
user = dom.getElementsByTagName('user')[0]
try:
avatar = dom.getElementsByTagName('img')[0].getAttribute('href')
except IndexError:
avatar = None
return {
'id': user.getAttribute('id'),
'username': user.getAttribute('display_name'),
'account_created': user.getAttribute('account_created'),
'avatar': avatar
}
| bsd-3-clause | Python |
51457d7c55a1e8e713d4407533e0b2159dd759c3 | Improve display of validation error messages. [SDESK-5444] (#1977) | petrjasek/superdesk-core,petrjasek/superdesk-core,superdesk/superdesk-core,petrjasek/superdesk-core,superdesk/superdesk-core,petrjasek/superdesk-core,superdesk/superdesk-core,superdesk/superdesk-core | superdesk/macros/validate_for_publish.py | superdesk/macros/validate_for_publish.py | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk import get_resource_service, config
from superdesk.validation import ValidationError
from apps.publish.content.common import ITEM_PUBLISH
from flask_babel import lazy_gettext
import json
def validate_for_publish(item, **kwargs):
doc = get_resource_service('archive').find_one(req=None, _id=item[config.ID_FIELD])
validate_item = {'act': ITEM_PUBLISH, 'type': doc['type'], 'validate': doc}
validation_errors = get_resource_service('validate').create([validate_item], fields=True)
if validation_errors[0][0]:
raise ValidationError(json.dumps(validation_errors[0][0]))
return item
name = 'Validate for Publish'
label = lazy_gettext('Validate for Publish')
callback = validate_for_publish
access_type = 'frontend'
action_type = 'direct'
| # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from superdesk import get_resource_service, config
from superdesk.validation import ValidationError
from apps.publish.content.common import ITEM_PUBLISH
from flask_babel import lazy_gettext
def validate_for_publish(item, **kwargs):
doc = get_resource_service('archive').find_one(req=None, _id=item[config.ID_FIELD])
validate_item = {'act': ITEM_PUBLISH, 'type': doc['type'], 'validate': doc}
validation_errors = get_resource_service('validate').create([validate_item], fields=True)
if validation_errors[0][0]:
raise ValidationError(validation_errors[0][0])
return item
name = 'Validate for Publish'
label = lazy_gettext('Validate for Publish')
callback = validate_for_publish
access_type = 'frontend'
action_type = 'direct'
| agpl-3.0 | Python |
8e0680013bd2b4791129aac386d48be54bef3a5b | clear command - debug logging even if log parameter is false | lukas-linhart/pageobject | pageobject/commands/clear.py | pageobject/commands/clear.py | from selenium.webdriver.common.keys import Keys
def clear(self, log=True, press_enter=False):
"""
Clear the page object.
:param bool log: whether to log or not (defualt is True)
:param bool press_enter: whether to press enter key after
the element is cleared (defualt is False)
:returns: `self`
:rtype: `PageObjectBase` instance
:raises NoSuchElementException: if the element cannot be found
:raises InvalidSelectorException: if the selector is invalid
or doesn't select an element
"""
if log:
self.logger.info('clearing page object {}'.format(self._log_id_short))
self.logger.debug('clearing page object; {}'.format(self._log_id_long))
self.webelement.clear()
if log:
self.logger.info('page object {} cleared'.format(self._log_id_short))
self.logger.debug('page object cleared; {}'.format(self._log_id_long))
if press_enter:
self.webelement.send_keys(Keys.ENTER)
if log:
self.logger.info('"enter" key sent to page object {}'.format(self._log_id_short))
self.logger.debug('"enter" key sent to page object; {}'.format(self._log_id_long))
return self
| from selenium.webdriver.common.keys import Keys
def clear(self, log=True, press_enter=False):
"""
Clear the page object.
:param bool log: whether to log or not (defualt is True)
:param bool press_enter: whether to press enter key after
the element is cleared (defualt is False)
:returns: `self`
:rtype: `PageObjectBase` instance
:raises NoSuchElementException: if the element cannot be found
:raises InvalidSelectorException: if the selector is invalid
or doesn't select an element
"""
if log:
self.logger.info('clearing page object {}'.format(self._log_id_short))
self.logger.debug('clearing page object; {}'.format(self._log_id_long))
self.webelement.clear()
if log:
self.logger.info('page object {} cleared'.format(self._log_id_short))
self.logger.debug('page object cleared; {}'.format(self._log_id_long))
if press_enter:
self.webelement.send_keys(Keys.ENTER)
if log:
self.logger.info('"enter" key sent to page object {}'.format(self._log_id_short))
self.logger.debug('"enter" key sent to page object; {}'.format(self._log_id_long))
return self
| mit | Python |
b9143462c004af7d18a66fa92ad94585468751b9 | Change IRFieldClassic to use 'encoded_str_type' | kata198/indexedredis,kata198/indexedredis | IndexedRedis/fields/classic.py | IndexedRedis/fields/classic.py | # Copyright (c) 2017 Timothy Savannah under LGPL version 2.1. See LICENSE for more information.
#
# fields.classic - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
#
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes, encoded_str_type
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, valueType=encoded_str_type, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
| # Copyright (c) 2017 Timothy Savannah under LGPL version 2.1. See LICENSE for more information.
#
# fields.classic - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
#
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
from . import IRField, IR_NULL_STRINGS, irNull
from ..compat_str import tobytes
class IRClassicField(IRField):
'''
IRClassicField - The IRField type which behaves like the "classic" IndexedRedis string-named fields.
This will store and retrieve data encoding into the default encoding (@see IndexedRedis.compat_str.setDefaultIREncoding)
and have a default value of empty string.
'''
CAN_INDEX = True
def __init__(self, name='', hashIndex=False):
IRField.__init__(self, name=name, hashIndex=hashIndex, defaultValue='')
def __new__(self, name='', hashIndex=False):
return IRField.__new__(self, name)
# vim: set ts=8 shiftwidth=8 softtabstop=8 noexpandtab :
| lgpl-2.1 | Python |
b6b59d93a8b290bebea99c4c7d095cf7ab91cc77 | fix #88 | rowhit/gitfs,PressLabs/gitfs,bussiere/gitfs,ksmaheshkumar/gitfs,PressLabs/gitfs | gitfs/utils/args.py | gitfs/utils/args.py | import os
import grp
import getpass
import tempfile
class Args(object):
def __init__(self, parser):
self.DEFAULTS = {
"repos_path": self.get_repos_path,
"user": self.get_current_user,
"group": self.get_current_group,
"foreground": True,
"branch": "master",
"upstream": "origin",
"allow_other": False,
"allow_root": False,
"author_name": "Presslabs",
"author_email": "git@presslabs.com",
"commiter_name": "Presslabs",
"commiter_email": "git@presslabs.com",
"max_size": 10 * 1024 * 1024,
"max_offset": 10 * 1024 * 1024,
"fetch_timeout": 5,
"merge_timeout": 2,
}
self.config = self.build_config(parser.parse_args())
def build_config(self, args):
if args.o:
for arg in args.o.split(","):
if "=" in arg:
item, value = arg.split("=")
if value == "True":
value = True
if value == "False":
value = False
setattr(args, item, value)
args = self.set_defaults(args)
return args
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.__dict__['config'], attr)
def set_defaults(self, args):
for option, value in self.DEFAULTS.iteritems():
if not hasattr(args, option):
if callable(value):
value = value()
setattr(args, option, value)
return args
def get_current_group(self):
gid = os.getegid()
return grp.getgrgid(gid).gr_name
def get_current_user(self):
return getpass.getuser()
def get_repos_path(self):
return tempfile.mkdtemp()
| import os
import grp
import getpass
import tempfile
class Args(object):
def __init__(self, parser):
self.DEFAULTS = {
"repos_path": self.get_repos_path(),
"user": self.get_current_user(),
"group": self.get_current_group(),
"foreground": True,
"branch": "master",
"upstream": "origin",
"allow_other": False,
"allow_root": False,
"author_name": "Presslabs",
"author_email": "git@presslabs.com",
"commiter_name": "Presslabs",
"commiter_email": "git@presslabs.com",
"max_size": 10 * 1024 * 1024,
"max_offset": 10 * 1024 * 1024,
"fetch_timeout": 5,
"merge_timeout": 2,
}
self.config = self.build_config(parser.parse_args())
def build_config(self, args):
args = self.set_defaults(args)
if args.o:
for arg in args.o.split(","):
if "=" in arg:
item, value = arg.split("=")
if value == "True":
value = True
if value == "False":
value = False
setattr(args, item, value)
return args
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.__dict__['config'], attr)
def set_defaults(self, args):
for option, value in self.DEFAULTS.iteritems():
setattr(args, option, value)
return args
def get_current_group(self):
gid = os.getegid()
return grp.getgrgid(gid).gr_name
def get_current_user(self):
return getpass.getuser()
def get_repos_path(self):
return tempfile.mkdtemp()
| apache-2.0 | Python |
e599e01b7602399bf3e13fcad67f1195f6dea2ed | fix after API change | Senseg/robotframework,userzimmermann/robotframework-python3,userzimmermann/robotframework-python3,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,Senseg/robotframework | doc/libraries/lib2html.py | doc/libraries/lib2html.py | #!/usr/bin/env python
"""Usage: lib2html.py [ library | all ]
Libraries:
BuiltIn (bu)
Collections (co)
Dialogs (di)
OperatingSystem (op)
Screenshot (sc)
String (st)
Telnet (te)
"""
import sys
import os
import re
ROOT = os.path.normpath(os.path.join(os.path.abspath(__file__),'..','..','..'))
sys.path.insert(0, os.path.join(ROOT,'src'))
from robot.libdoc import libdoc
LIBRARIES = {}
for line in __doc__.splitlines():
res = re.search(' (\w+) \((\w\w)\)', line)
if res:
name, alias = res.groups()
LIBRARIES[name.lower()] = LIBRARIES[alias] = name
def create_libdoc(name):
ipath = os.path.join(ROOT,'src','robot','libraries',name+'.py')
opath = os.path.join(ROOT,'doc','libraries',name+'.html')
libdoc(ipath, opath)
if __name__ == '__main__':
try:
name = sys.argv[1].lower()
if name == 'all':
for name in sorted(set(LIBRARIES.values())):
create_libdoc(name)
else:
create_libdoc(LIBRARIES[name])
except (IndexError, KeyError):
print __doc__
| #!/usr/bin/env python
"""Usage: lib2html.py [ library | all ]
Libraries:
BuiltIn (bu)
Collections (co)
Dialogs (di)
OperatingSystem (op)
Screenshot (sc)
String (st)
Telnet (te)
"""
import sys
import os
import re
ROOT = os.path.normpath(os.path.join(os.path.abspath(__file__),'..','..','..'))
sys.path.insert(0, os.path.join(ROOT,'src'))
from robot.libdoc import libdoc
LIBRARIES = {}
for line in __doc__.splitlines():
res = re.search(' (\w+) \((\w\w)\)', line)
if res:
name, alias = res.groups()
LIBRARIES[name.lower()] = LIBRARIES[alias] = name
def create_libdoc(name):
ipath = os.path.join(ROOT,'src','robot','libraries',name+'.py')
opath = os.path.join(ROOT,'doc','libraries',name+'.html')
libdoc(ipath, output=opath)
if __name__ == '__main__':
try:
name = sys.argv[1].lower()
if name == 'all':
for name in sorted(set(LIBRARIES.values())):
create_libdoc(name)
else:
create_libdoc(LIBRARIES[name])
except (IndexError, KeyError):
print __doc__
| apache-2.0 | Python |
621549a0bd4c5d077bf3cebb75158c34ee6aa7d4 | change useraccount/passwd | macauleycheng/AOS_OF_Example,macauleycheng/AOS_OF_Example | 000-Netconf/01-AddController/edit-config-controller.py | 000-Netconf/01-AddController/edit-config-controller.py | from ncclient import manager
import ncclient
import xml.etree.ElementTree as ET
host = "192.168.1.1"
username="netconfuser"
password="netconfuser"
#username="root"
#password="root"
##NOTE:
# below two colum shall change to switch CPU MAC
# <id>00:00:70:72:cf:dc:9d:b2</id>
# <datapath-id>00:00:70:72:cf:dc:9d:b2</datapath-id>
config_xml="""
<config>
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>capable-switch-1</id>
<logical-switches>
<switch>
<id>00:00:70:72:cf:dc:9e:da</id>
<datapath-id>00:00:70:72:cf:dc:9e:da</datapath-id>
<controllers>
<controller>
<id>192.168.10.105:6633</id>
<ip-address>192.168.10.105</ip-address>
<port>6633</port>
</controller>
</controllers>
</switch>
</logical-switches>
</capable-switch>
</config>
"""
with manager.connect_ssh(host=host, port=830, username=username, password=password, hostkey_verify=False ) as m:
#print m.server_capabilities
print m.edit_config(target='running',
config=config_xml,
default_operation='merge',
error_option='stop-on-error')
print m.get_config(source='running').data_xml
| from ncclient import manager
import ncclient
import xml.etree.ElementTree as ET
host = "192.168.1.1"
username="root"
password="root"
##NOTE:
# below two colum shall change to switch CPU MAC
# <id>00:00:70:72:cf:dc:9d:b2</id>
# <datapath-id>00:00:70:72:cf:dc:9d:b2</datapath-id>
config_xml="""
<config>
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>capable-switch-1</id>
<logical-switches>
<switch>
<id>00:00:70:72:cf:dc:9d:b2</id>
<datapath-id>00:00:70:72:cf:dc:9d:b2</datapath-id>
<controllers>
<controller>
<id>192.168.10.105:6633</id>
<ip-address>192.168.10.105</ip-address>
<port>6633</port>
</controller>
</controllers>
</switch>
</logical-switches>
</capable-switch>
</config>
"""
with manager.connect_ssh(host=host, port=830, username=username, password=password, hostkey_verify=False ) as m:
#print m.server_capabilities
print m.edit_config(target='running',
config=config_xml,
default_operation='merge',
error_option='stop-on-error')
print m.get_config(source='running').data_xml
| apache-2.0 | Python |
aa349dfaf6159227c5871c16a6ae6b0f9cad4713 | use change_plan and dont' save twice | dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | corehq/apps/accounting/management/commands/make_domain_enterprise_level.py | corehq/apps/accounting/management/commands/make_domain_enterprise_level.py | from django.core.management import BaseCommand
from corehq import Domain
from corehq.apps.accounting.exceptions import NewSubscriptionError
from corehq.apps.accounting.models import (
BillingAccount,
SoftwarePlanEdition,
SoftwarePlanVersion,
Subscription,
BillingAccountType)
class Command(BaseCommand):
help = ('Create a billing account and an enterprise level subscription '
'for the given domain')
def handle(self, *args, **options):
if len(args) != 1:
print "Invalid arguments: %s" % str(args)
return
domain = Domain.get_by_name(args[0])
if not domain:
print "Invalid domain name: %s" % args[0]
return
plan_version, subscription = Subscription.get_subscribed_plan_by_domain(domain.name)
if plan_version.plan.edition == SoftwarePlanEdition.ENTERPRISE:
print "Domain %s is already enterprise level" % domain.name
return
if subscription:
subscription.change_plan(self.enterprise_plan_version)
else:
try:
self.make_new_enterprise_subscription(domain)
except NewSubscriptionError as e:
print e.message
return
print 'Domain %s has been upgraded to enterprise level.' % domain.name
def make_new_enterprise_subscription(self, domain):
account, _ = BillingAccount.get_or_create_account_by_domain(
domain.name,
account_type=BillingAccountType.CONTRACT,
created_by="management command",
)
Subscription.new_domain_subscription(
account,
domain.name,
self.enterprise_plan_version,
)
@property
def enterprise_plan_version(self):
return SoftwarePlanVersion.objects.filter(
plan__edition=SoftwarePlanEdition.ENTERPRISE
)[0]
| from django.core.management import BaseCommand
from corehq import Domain
from corehq.apps.accounting.exceptions import NewSubscriptionError
from corehq.apps.accounting.models import (
BillingAccount,
SoftwarePlanEdition,
SoftwarePlanVersion,
Subscription,
BillingAccountType)
class Command(BaseCommand):
help = ('Create a billing account and an enterprise level subscription '
'for the given domain')
def handle(self, *args, **options):
if len(args) != 1:
print "Invalid arguments: %s" % str(args)
return
domain = Domain.get_by_name(args[0])
if not domain:
print "Invalid domain name: %s" % args[0]
return
plan_version, subscription = Subscription.get_subscribed_plan_by_domain(domain.name)
if plan_version.plan.edition == SoftwarePlanEdition.ENTERPRISE:
print "Domain %s is already enterprise level" % domain.name
return
if subscription:
subscription.plan_version = self.enterprise_plan_version
else:
try:
self.make_new_enterprise_subscription(domain)
except NewSubscriptionError as e:
print e.message
return
subscription.is_active = True
subscription.save()
print 'Domain %s has been upgraded to enterprise level.' % domain.name
def make_new_enterprise_subscription(self, domain):
account, _ = BillingAccount.get_or_create_account_by_domain(
domain.name,
account_type=BillingAccountType.CONTRACT,
created_by="management command",
)
return Subscription.new_domain_subscription(
account,
domain.name,
self.enterprise_plan_version,
)
@property
def enterprise_plan_version(self):
return SoftwarePlanVersion.objects.filter(
plan__edition=SoftwarePlanEdition.ENTERPRISE
)[0]
| bsd-3-clause | Python |
98941b6670bca96b56e3ffe2f69a0acbc5692fa0 | Use link without variable in wsgi.py to fix cookiecutter generation | r0x73/django-template,r0x73/django-template,r0x73/django-template | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/wsgi.py | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/wsgi.py | """
WSGI config for {{ cookiecutter.project_slug }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/stable/howto/deployment/wsgi/
"""
import os
from {{ cookiecutter.project_slug }} import get_project_root_path, import_env_vars
import_env_vars(os.path.join(get_project_root_path(), 'envdir'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ cookiecutter.project_slug }}.settings.base")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| """
WSGI config for {{ cookiecutter.project_slug }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/{{ docs_version }}/howto/deployment/wsgi/
"""
import os
from {{ cookiecutter.project_slug }} import get_project_root_path, import_env_vars
import_env_vars(os.path.join(get_project_root_path(), 'envdir'))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ cookiecutter.project_slug }}.settings.base")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| mit | Python |
effa5f84fc93ced38ad9e5d3b0a16bea2d3914ef | Allow column to be a property | makinacorpus/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,johan--/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,camillemonchicourt/Geotrek | caminae/common/templatetags/field_verbose_name.py | caminae/common/templatetags/field_verbose_name.py | from django import template
from django.db.models.fields.related import FieldDoesNotExist
register = template.Library()
def field_verbose_name(obj, field):
"""Usage: {{ object|get_object_field }}"""
try:
return obj._meta.get_field(field).verbose_name
except FieldDoesNotExist:
a = getattr(obj, '%s_verbose_name' % field)
if a is None:
raise
return unicode(a)
register.filter(field_verbose_name)
register.filter('verbose', field_verbose_name)
| from django import template
register = template.Library()
def field_verbose_name(obj, field):
"""Usage: {{ object|get_object_field }}"""
return obj._meta.get_field(field).verbose_name
register.filter(field_verbose_name)
register.filter('verbose', field_verbose_name)
| bsd-2-clause | Python |
5f91ca32ae3d8fccb916fdf06bd6f2f546e6032c | Update dev | thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie | genie/__version__.py | genie/__version__.py | __version__ = "8.0.0-dev"
| __version__ = "7.0.0-dev"
| mit | Python |
1943daaa02b4d314c7aa722a05ae77b5586b46f1 | Add a little explanation on usage | Klumhru/boost-python-bullet,Klumhru/boost-python-bullet,Klumhru/boost-python-bullet | linear_math_tests/test_motionstate.py | linear_math_tests/test_motionstate.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_motionstate
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class MyMotionState(bullet.btMotionState):
def __init__(self):
self.transform = bullet.btTransform.identity
def setWorldTransform(self, t):
self.transform = t
def getWorldTransform(self, t):
# Assign basis and origin as theres no way to assign by reference
# as intended in the native method
t.set_basis(self.transform.basis)
t.set_origin(self.transform.origin)
class TestMotionState(unittest.TestCase):
def setUp(self):
self.m = bullet.btDefaultMotionState()
def test_ctor(self):
self.m = bullet.btDefaultMotionState()
self.m = bullet.btDefaultMotionState(bullet.btTransform.identity)
self.m = bullet.btDefaultMotionState(bullet.btTransform.identity,
bullet.btTransform.identity)
self.m = MyMotionState()
def test_virtual(self):
self.m = bullet.btMotionState()
t1 = bullet.btTransform()
self.assertRaises(RuntimeError, self.m.getWorldTransform, t1)
def test_implemented(self):
self.m = MyMotionState()
t1 = bullet.btTransform(bullet.btQuaternion.identity,
bullet.btVector3(0, 10, 0))
print('t1', t1)
self.m.getWorldTransform(t1)
self.assertEqual(t1, bullet.btTransform.identity)
def tearDown(self):
del self.m
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
linear_math_tests.test_motionstate
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import math
import bullet
class MyMotionState(bullet.btMotionState):
def __init__(self):
self.transform = bullet.btTransform.identity
def setWorldTransform(self, t):
self.transform = t
def getWorldTransform(self, t):
t.set_basis(self.transform.basis)
t.set_origin(self.transform.origin)
class TestMotionState(unittest.TestCase):
def setUp(self):
self.m = bullet.btDefaultMotionState()
def test_ctor(self):
self.m = bullet.btDefaultMotionState()
self.m = bullet.btDefaultMotionState(bullet.btTransform.identity)
self.m = bullet.btDefaultMotionState(bullet.btTransform.identity,
bullet.btTransform.identity)
self.m = MyMotionState()
def test_virtual(self):
self.m = bullet.btMotionState()
t1 = bullet.btTransform()
self.assertRaises(RuntimeError, self.m.getWorldTransform, t1)
def test_implemented(self):
self.m = MyMotionState()
t1 = bullet.btTransform(bullet.btQuaternion.identity,
bullet.btVector3(0, 10, 0))
print('t1', t1)
self.m.getWorldTransform(t1)
self.assertEqual(t1, bullet.btTransform.identity)
def tearDown(self):
del self.m
| mit | Python |
ac500b059ac02ff5a104a25d2f5ff9f4848f536d | set default re subprocess timeout to 0.1, since that should be quite enough. | ProgVal/Limnoria-test,ProgVal/Limnoria-test,frumiousbandersnatch/supybot-code,Ban3/Limnoria,haxwithaxe/supybot,buildbot/supybot,Ban3/Limnoria,jeffmahoney/supybot | plugins/String/config.py | plugins/String/config.py | ###
# Copyright (c) 2003-2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('String', True)
String = conf.registerPlugin('String')
conf.registerGroup(String, 'levenshtein')
conf.registerGlobalValue(String.levenshtein, 'max',
registry.PositiveInteger(256, """Determines the maximum size of a string
given to the levenshtein command. The levenshtein command uses an O(m*n)
algorithm, which means that with strings of length 256, it can take 1.5
seconds to finish; with strings of length 384, though, it can take 4
seconds to finish, and with strings of much larger lengths, it takes more
and more time. Using nested commands, strings can get quite large, hence
this variable, to limit the size of arguments passed to the levenshtein
command."""))
conf.registerGroup(String, 're')
conf.registerGlobalValue(String.re, 'timeout',
registry.PositiveFloat(0.1, """Determines the maximum time, in seconds, that
a regular expression is given to execute before being terminated. Since
there is a possibility that user input for the re command can cause it to
eat up large amounts of ram or cpu time, it's a good idea to keep this
low. Most normal regexps should not take very long at all."""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| ###
# Copyright (c) 2003-2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.conf as conf
import supybot.registry as registry
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('String', True)
String = conf.registerPlugin('String')
conf.registerGroup(String, 'levenshtein')
conf.registerGlobalValue(String.levenshtein, 'max',
registry.PositiveInteger(256, """Determines the maximum size of a string
given to the levenshtein command. The levenshtein command uses an O(m*n)
algorithm, which means that with strings of length 256, it can take 1.5
seconds to finish; with strings of length 384, though, it can take 4
seconds to finish, and with strings of much larger lengths, it takes more
and more time. Using nested commands, strings can get quite large, hence
this variable, to limit the size of arguments passed to the levenshtein
command."""))
conf.registerGroup(String, 're')
conf.registerGlobalValue(String.re, 'timeout',
registry.PositiveFloat(5, """Determines the maximum time, in seconds, that
a regular expression is given to execute before being terminated. Since
there is a possibility that user input for the re command can cause it to
eat up large amounts of ram or cpu time, it's a good idea to keep this
low. Most normal regexps should not take very long at all."""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| bsd-3-clause | Python |
c481c935af42e88c0e8688ad995247b37cc96f6a | fix anyurl plugin's title parser and add googlebot UA | jkent/jkent-pybot,jrspruitt/jkent-pybot | plugins/anyurl_plugin.py | plugins/anyurl_plugin.py | # -*- coding: utf-8 -*-
# vim: set ts=4 et
import cgi
import requests
from six.moves.html_parser import HTMLParser
from plugin import *
content_types = (
'text/html',
'text/xml',
'application/xhtml+xml',
'application/xml'
)
class TitleParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.match = False
self.title = ''
def handle_starttag(self, tag, attrs):
if tag == 'meta':
og_title = False
for attr in attrs:
if attr == ('property', 'og:title'):
og_title = True
if og_title:
for attr in attrs:
if attr[0] == 'content':
self.title = attr[1]
self.match = True if not self.title and tag == 'title' else False
def handle_data(self, data):
if self.match:
self.title = data.strip()
self.match = False
class Plugin(BasePlugin):
default_priority = 1
@hook
def any_url(self, msg, domain, url):
headers = {
'User-Agent': 'Mozilla/5.0 AppleWebKit/537.36 (KHTML, like Gecko; compatible; Googlebot/2.1; +http://www.google.com/bot.html) Safari/537.36'
}
r = requests.get(url, stream=True, headers=headers)
content_type, params = cgi.parse_header(r.headers['Content-Type'])
if not content_type in content_types:
return
r.encoding = 'utf-8'
if 'charset' in params:
r.encoding = params['charset'].strip("'\"")
parser = TitleParser()
for line in r.iter_lines(chunk_size=1024, decode_unicode=True):
parser.feed(line)
if parser.title:
break
msg.reply('\x031,0URL\x03 %s' % parser.title)
| # -*- coding: utf-8 -*-
# vim: set ts=4 et
import cgi
import requests
from six.moves.html_parser import HTMLParser
from plugin import *
content_types = (
'text/html',
'text/xml',
'application/xhtml+xml',
'application/xml'
)
class TitleParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.match = False
self.title = ''
def handle_starttag(self, tag, attrs):
if tag == 'meta':
og_title = False
for attr in attrs:
if attr == ('property', 'og:title'):
og_title = True
if og_title:
for attr in attrs:
if attr[0] == 'content':
self.title = attr[1]
self.match = True if not self.title and tag == 'title' else False
def handle_data(self, data):
if self.match:
self.title = data
self.match = False
class Plugin(BasePlugin):
default_priority = 1
@hook
def any_url(self, msg, domain, url):
r = requests.get(url, stream=True)
content_type, params = cgi.parse_header(r.headers['Content-Type'])
if not content_type in content_types:
return
r.encoding = 'utf-8'
if 'charset' in params:
r.encoding = params['charset'].strip("'\"")
parser = TitleParser()
for line in r.iter_lines(chunk_size=1024, decode_unicode=True):
parser.feed(line)
if parser.title:
break
print(parser.title)
msg.reply('\x031,0URL\x03 %s' % parser.title)
| mit | Python |
30e9368203c96900f5cc6dc993b1201a6face9bf | test for a logged in user implemented | ndraper2/old-learning-journal | features/steps.py | features/steps.py | from lettuce import before, after, world, step
import datetime
import os
from contextlib import closing
from journal import connect_db
from journal import DB_SCHEMA
from journal import INSERT_ENTRY
from pyramid import testing
TEST_DSN = 'dbname=test_learning_journal user=ndraper2'
settings = {'db': TEST_DSN}
INPUT_BTN = '<input type="submit" value="Share" name="Share"/>'
@world.absorb
def make_an_entry():
entry_data = {
'title': 'Hello there',
'text': 'This is a post',
}
response = app.get('/add', params=entry_data, status='3*')
return response
@world.absorb
def login_helper(username, password, app):
"""encapsulate app login for reuse in tests
Accept all status codes so that we can make assertions in tests
"""
login_data = {'username': username, 'password': password}
return app.post('/login', params=login_data, status='*')
@before.all
def init_db():
with closing(connect_db(settings)) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@after.all
def clear_db(total):
with closing(connect_db(settings)) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@after.each_scenario
def clear_entries(scenario):
with closing(connect_db(settings)) as db:
db.cursor().execute("DELETE FROM entries")
db.commit()
@before.each_scenario
def app(scenario):
from journal import main
from webtest import TestApp
os.environ['DATABASE_URL'] = TEST_DSN
app = main()
world.test_app = TestApp(app)
@step('a journal home page')
def get_home_page(step):
response = world.test_app.get('/')
assert response.status_code == 200
actual = response.body
expected = 'No entries here so far'
assert expected in actual
# @step('I click on the entry link')
# def go_to_detail(step):
# response = world.test_app.get('/1')
# assert response.status_code == 200
# actual = response.body
# for expected in entry[:2]:
# assert expected in actual
@step('a logged in user')
def a_logged_in_user(step):
username, password = ('admin', 'secret')
app = world.test_app
redirect = login_helper(username, password, app)
assert redirect.status_code == 302
response = redirect.follow()
assert response.status_code == 200
actual = response.body
assert INPUT_BTN in actual
| from lettuce import before, after, world, step
import datetime
import os
from contextlib import closing
from journal import connect_db
from journal import DB_SCHEMA
from journal import INSERT_ENTRY
from pyramid import testing
TEST_DSN = 'dbname=test_learning_journal user=ndraper2'
settings = {'db': TEST_DSN}
@world.absorb
def make_an_entry():
entry_data = {
'title': 'Hello there',
'text': 'This is a post',
}
response = app.get('/add', params=entry_data, status='3*')
return response
@before.all
def init_db():
with closing(connect_db(settings)) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@after.all
def clear_db(total):
with closing(connect_db(settings)) as db:
db.cursor().execute("DROP TABLE entries")
db.commit()
@after.each_scenario
def clear_entries(scenario):
with closing(connect_db(settings)) as db:
db.cursor().execute("DELETE FROM entries")
db.commit()
@before.each_scenario
def app(scenario):
from journal import main
from webtest import TestApp
os.environ['DATABASE_URL'] = TEST_DSN
app = main()
world.test_app = TestApp(app)
@step('a journal home page')
def get_home_page(step):
response = world.test_app.get('/')
assert response.status_code == 200
actual = response.body
expected = 'No entries here so far'
assert expected in actual
# @step('I click on the entry link')
# def go_to_detail(step):
# response = world.test_app.get('/1')
# assert response.status_code == 200
# actual = response.body
# for expected in entry[:2]:
# assert expected in actual
| mit | Python |
bdb78cd1bb13981a20ecb0cf9eb981d784c95b0e | Update form to handle home_lon and home_lat | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | fellowms/forms.py | fellowms/forms.py | from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"home_lon",
"home_lat",
"inauguration_year",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
fields = '__all__'
| from django.forms import ModelForm, widgets
from .models import Fellow, Event, Expense, Blog
class FellowForm(ModelForm):
class Meta:
model = Fellow
exclude = [
"inauguration_year",
"mentor",
]
class EventForm(ModelForm):
class Meta:
model = Event
exclude = [
"status",
"budget_approve",
]
# We don't want to expose fellows' data
# so we will request the email
# and match on the database.
labels = {
'fellow': 'Fellow',
'url': "Event's homepage url",
'name': "Event's name",
}
class ExpenseForm(ModelForm):
class Meta:
model = Expense
exclude = [
'id',
'status',
]
class BlogForm(ModelForm):
class Meta:
model = Blog
fields = '__all__'
| bsd-3-clause | Python |
7451f70fc86ab2c18b0debddde5638c5dcd34b2f | Revise doc string with complexity | bowen0701/algorithms_data_structures | alg_dijkstra_shortest_path.py | alg_dijkstra_shortest_path.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def dijkstra(w_graph_d, start_vertex):
"""Dijkstra algorithm for singel-source shortest path problem
in a "weighted" graph.
Time complexity for graph G(V, W): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
distance_d = {v: np.inf for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
min_pq.insert([0, start_vertex])
distance_d[start_vertex] = 0
visited_d[start_vertex] = True
while min_pq.heap_size > 0:
k, v = min_pq.extract_min()
visited_d[v] = True
for v_neighbor in w_graph_d[v].keys():
if (not visited_d[v_neighbor] and
distance_d[v_neighbor] >
distance_d[v] + w_graph_d[v][v_neighbor]):
distance_d[v_neighbor] = distance_d[v] + w_graph_d[v][v_neighbor]
previous_d[v_neighbor] = v
min_pq.insert([distance_d[v_neighbor], v_neighbor])
return distance_d, previous_d, visited_d
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': 5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Dijkstra shortest path from {}:'.format(start_vertex))
distance_d, previous_d, visited_d = (
dijkstra(w_graph_d, start_vertex))
print('distance_d: {}'.format(distance_d))
print('previous_d: {}'.format(previous_d))
print('visited_d: {}'.format(visited_d))
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def dijkstra(w_graph_d, start_vertex):
"""Dijkstra algorithm for "weighted" graph.
Finds shortest path in a weighted graph from a particular
node to all vertices that are reachable from it.
"""
min_pq = MinPriorityQueue()
distance_d = {v: np.inf for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
min_pq.insert([0, start_vertex])
distance_d[start_vertex] = 0
visited_d[start_vertex] = True
while min_pq.heap_size > 0:
k, v = min_pq.extract_min()
visited_d[v] = True
for v_neighbor in w_graph_d[v].keys():
if (not visited_d[v_neighbor] and
distance_d[v_neighbor] >
distance_d[v] + w_graph_d[v][v_neighbor]):
distance_d[v_neighbor] = distance_d[v] + w_graph_d[v][v_neighbor]
previous_d[v_neighbor] = v
min_pq.insert([distance_d[v_neighbor], v_neighbor])
return distance_d, previous_d, visited_d
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': 5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Dijkstra shortest path from {}:'.format(start_vertex))
distance_d, previous_d, visited_d = (
dijkstra(w_graph_d, start_vertex))
print('distance_d: {}'.format(distance_d))
print('previous_d: {}'.format(previous_d))
print('visited_d: {}'.format(visited_d))
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
ca2b02d551e9bb4c8625ae79f7878892673fa731 | Add CommCare, CommTrack filters for DomainES | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq | corehq/apps/es/domains.py | corehq/apps/es/domains.py | from .es_query import HQESQuery
from . import filters
class DomainES(HQESQuery):
index = 'domains'
@property
def builtin_filters(self):
return [
real_domains,
commcare_domains,
commconnect_domains,
commtrack_domains,
created,
] + super(DomainES, self).builtin_filters
def real_domains():
return filters.term("is_test", False)
def commcare_domains():
return filters.AND(filters.term("commconnect_enabled", False),
filters.term("commtrack_enabled", False))
def commconnect_domains():
return filters.term("commconnect_enabled", True)
def commtrack_domains():
return filters.term("commtrack_enabled", True)
def created(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date_created', gt, gte, lt, lte)
| from .es_query import HQESQuery
from . import filters
class DomainES(HQESQuery):
index = 'domains'
@property
def builtin_filters(self):
return [
real_domains,
commconnect_domains,
created,
] + super(DomainES, self).builtin_filters
def real_domains():
return filters.term("is_test", False)
def commconnect_domains():
return filters.term("commconnect_enabled", True)
def created(gt=None, gte=None, lt=None, lte=None):
return filters.date_range('date_created', gt, gte, lt, lte)
| bsd-3-clause | Python |
c6ba9edaab10492e8d24db8c28771489331b22eb | Fix spacing in adminendpoints | ollien/Timpani,ollien/Timpani,ollien/Timpani | timpani/webserver/endpoints/adminendpoints.py | timpani/webserver/endpoints/adminendpoints.py | import flask
import os.path
import json
from ... import blog
import uuid
import magic
import mimetypes
from .. import webhelpers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
UPLOAD_LOCATION = os.path.abspath(os.path.join(FILE_LOCATION, "../../../static/uploads"))
blueprint = flask.Blueprint("adminEndpoints", __name__)
#Returns a JSON Object based on whether or not the user is logged in.
@blueprint.route("/delete_post/<int:postId>", methods = ["POST"])
def deletePost(postId):
session = webhelpers.checkForSession()
if session != None:
blog.deletePost(postId)
return json.dumps({"error": 0})
else:
return json.dumps({"error": 1}), 403
#Returns a JSON Object based on whether or not the user is logged in, or if it's an invalid file type.
@blueprint.route("/upload_image", methods = ["POST"])
def uploadImage():
ACCEPTED_FORMATS = ["image/jpeg", "image/png", "image/gif"]
session = webhelpers.checkForSession()
if session != None:
image = flask.request.files["image"]
mime = magic.from_buffer(image.stream.read(), mime = True)
image.stream.seek(0,0)
if type(mime) == bytes:
mime = mime.decode()
if mime in ACCEPTED_FORMATS:
extension = mimetypes.guess_extension(mime)
print(extension)
fileName = "%s%s" % (uuid.uuid4().hex, extension)
image.save(os.path.join(UPLOAD_LOCATION, fileName))
return json.dumps({"error": 0, "url": os.path.join("/static/uploads", fileName)})
else:
return json.dumps({"error": 2}), 400
else:
return json.dumps({"error": 1}), 403
| import flask
import os.path
import json
from ... import blog
import uuid
import magic
import mimetypes
from .. import webhelpers
FILE_LOCATION = os.path.abspath(os.path.dirname(__file__))
UPLOAD_LOCATION = os.path.abspath(os.path.join(FILE_LOCATION, "../../../static/uploads"))
blueprint = flask.Blueprint("adminEndpoints", __name__)
#Returns a JSON Object based on whether or not the user is logged in.
@blueprint.route("/delete_post/<int:postId>", methods = ["POST"])
def deletePost(postId):
session = webhelpers.checkForSession()
if session != None:
blog.deletePost(postId)
return json.dumps({"error": 0})
else:
return json.dumps({"error": 1}), 403
#Returns a JSON Object based on whether or not the user is logged in, or if it's an invalid file type.
@blueprint.route("/upload_image", methods = ["POST"])
def uploadImage():
ACCEPTED_FORMATS = ["image/jpeg", "image/png", "image/gif"]
session = webhelpers.checkForSession()
if session != None:
image = flask.request.files["image"]
mime = magic.from_buffer(image.stream.read(), mime = True)
image.stream.seek(0,0)
if type(mime) == bytes:
mime = mime.decode()
if mime in ACCEPTED_FORMATS:
extension = mimetypes.guess_extension(mime)
print(extension)
fileName = "%s%s" % (uuid.uuid4().hex, extension)
image.save(os.path.join(UPLOAD_LOCATION, fileName))
return json.dumps({"error": 0, "url": os.path.join("/static/uploads", fileName)})
else:
return json.dumps({"error": 2}), 400
else:
return json.dumps({"error": 1}), 403
| mit | Python |
10e86be9e8d97f2373ed9aee5925460c28f6782b | make isort happy | pennlabs/penn-mobile-server,pennlabs/penn-mobile-server | cron/save_laundry_data.py | cron/save_laundry_data.py | #!/usr/bin/env python
# Add the following line into the labs crontab.
# */15 * * * * /home/labs/penn-mobile-server/cron/save_laundry_data.py
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if True:
import server
server.laundry.save_data()
| #!/usr/bin/env python
# Add the following line into the labs crontab.
# */15 * * * * /home/labs/penn-mobile-server/cron/save_laundry_data.py
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import server # noqa
server.laundry.save_data()
| mit | Python |
f904dce3006184b464d414c0cafe930b8c10a95b | Update P03_writingExcel fixed warnings by changing depreciated methods | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter12/P03_writingExcel.py | books/AutomateTheBoringStuffWithPython/Chapter12/P03_writingExcel.py | # This program uses the OpenPyXL module to manipulate Excel documents
# Creating and Saving Excel Documents
import openpyxl
wb = openpyxl.Workbook()
print(wb.sheetnames)
sheet = wb.active
print(sheet.title)
sheet.title = "Spam Bacon Eggs Sheet"
print(wb.sheetnames)
wb = openpyxl.load_workbook("example.xlsx")
sheet = wb.active
sheet.title = "Spam Spam Spam"
wb.save("example_copy.xlsx")
# Creating and Removing Sheets
wb = openpyxl.Workbook()
print(wb.sheetnames)
wb.create_sheet()
print(wb.sheetnames)
wb.create_sheet(index=0, title="First Sheet")
print(wb.sheetnames)
wb.create_sheet(index=2, title="Middle Sheet")
print(wb.sheetnames)
wb.remove(wb["Middle Sheet"])
wb.remove(wb["Sheet1"])
print(wb.sheetnames)
| # This program uses the OpenPyXL module to manipulate Excel documents
# Creating and Saving Excel Documents
import openpyxl
wb = openpyxl.Workbook()
print(wb.sheetnames)
sheet = wb.active
print(sheet.title)
sheet.title = "Spam Bacon Eggs Sheet"
print(wb.sheetnames)
wb = openpyxl.load_workbook("example.xlsx")
sheet = wb.active
sheet.title = "Spam Spam Spam"
wb.save("example_copy.xlsx")
# Creating and Removing Sheets
wb = openpyxl.Workbook()
print(wb.get_sheet_names())
wb.create_sheet()
print(wb.get_sheet_names())
wb.create_sheet(index=0, title="First Sheet")
print(wb.get_sheet_names())
wb.create_sheet(index=2, title="Middle Sheet")
print(wb.get_sheet_names())
wb.remove_sheet(wb.get_sheet_by_name("Middle Sheet"))
wb.remove_sheet(wb.get_sheet_by_name("Sheet1"))
print(wb.get_sheet_names())
| mit | Python |
c34b1d8e2d78641fbfbf7cc644a57dde04b2e6d3 | Update 03_Proximity_Indicator.py | userdw/RaspberryPi_3_Starter_Kit | 03_Proximity_Indicator/03_Proximity_Indicator/03_Proximity_Indicator.py | 03_Proximity_Indicator/03_Proximity_Indicator/03_Proximity_Indicator.py | import MCP3202
import wiringpi,time,os
from time import strftime
wiringpi.wiringPiSetup()
wiringpi.pinMode(1,1)
wiringpi.pinMode(21,1)
def translate(value,leftMin,leftMax,rightMin,rightMax):
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
try:
while 1:
os.system('clear')
value1= MCP3202.readADC(0) # range data 0 - vref (volt)
map=translate(value1,0,4096,255,0)
print "Proximity Sensor"
print "Curent Distance : ",int(value1),int(map)
print ""
print "Press CTRL+C to exit"
if map <= 200:
wiringpi.digitalWrite(1,1)
wiringpi.digitalWrite(21,1)
time.sleep(map/1000)
# Write 0 (Low) / 1 (High) to Buzzer and LED
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
time.sleep(map/1000)
else:
# Write 1 (High) / 0 (Low) to Buzzer and Led
wiringpi.digitalWrite(1,1)
wiringpi.digitalWrite(21,1)
time.sleep(map/500)
# Write 0 (Low) / 1 (High) to Buzzer and LED
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
time.sleep(map/500)
except KeyboardInterrupt:
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
print "exit"
| import MCP3202
import wiringpi,time,os
from time import strftime
wiringpi.wiringPiSetup()
wiringpi.pinMode(1,1)
wiringpi.pinMode(21,1)
def translate(value,leftMin,leftMax,rightMin,rightMax):
# Figure out how 'wide' each range is
leftSpan = leftMax - leftMin
rightSpan = rightMax - rightMin
# Convert the left range into a 0-1 range (float)
valueScaled = float(value - leftMin) / float(leftSpan)
# Convert the 0-1 range into a value in the right range.
return rightMin + (valueScaled * rightSpan)
try:
while 1:
os.system('clear')
value1= MCP3202.readADC(0) # range data 0 - vref (volt)
map=translate(value1,0,4096,255,0)
print "Proximity Sensor"
print "Curent Distance : ",int(value1),int(map)
print ""
print "Press CTRL+C to exit"
if map <= 200:
wiringpi.digitalWrite(1,1)
wiringpi.digitalWrite(21,1)
time.sleep(map/1000)
# Write 0 (Low) / 1 (High) to Buzzer and LED
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
time.sleep(map/1000)
else:
# Write 1 (High) / 0 (Low) to Buzzer and Led
wiringpi.digitalWrite(1,1)
wiringpi.digitalWrite(21,1)
time.sleep(map/500)
# Write 0 (Low) / 1 (High) to Buzzer and LED
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
time.sleep(map/500)
except KeyboardInterrupt:
wiringpi.digitalWrite(1,0)
wiringpi.digitalWrite(21,0)
print "exit"
| mit | Python |
f6216cdd9be3db07c6f2271d7f0bbe39efa39766 | Use custom formatter in orgviz.cli | tkf/orgviz | orgviz/cli.py | orgviz/cli.py | """
OrgViz command line interface.
"""
import argparse
import textwrap
class Formatter(argparse.RawDescriptionHelpFormatter,
argparse.ArgumentDefaultsHelpFormatter):
pass
def get_parser(commands):
"""
Generate argument parser given a list of subcommand specifications.
:type commands: list of (str, function, function)
:arg commands:
Each element must be a tuple ``(name, adder, runner)``.
:param name: subcommand
:param adder: a function takes one object which is an instance
of :class:`argparse.ArgumentParser` and add
arguments to it
:param runner: a function takes keyword arguments which must be
specified by the arguments parsed by the parser
defined by `adder`. Docstring of this function
will be the description of the subcommand.
"""
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers()
for (name, adder, runner) in commands:
subp = subparsers.add_parser(
name,
formatter_class=Formatter,
description=runner.__doc__ and textwrap.dedent(runner.__doc__))
adder(subp)
subp.set_defaults(func=runner)
return parser
def main(args=None):
from orgviz import web
parser = get_parser([web.command])
ns = parser.parse_args(args=args)
applyargs = lambda func, **kwds: func(**kwds)
applyargs(**vars(ns))
if __name__ == '__main__':
main()
| """
OrgViz command line interface.
"""
def get_parser(commands):
"""
Generate argument parser given a list of subcommand specifications.
:type commands: list of (str, function, function)
:arg commands:
Each element must be a tuple ``(name, adder, runner)``.
:param name: subcommand
:param adder: a function takes one object which is an instance
of :class:`argparse.ArgumentParser` and add
arguments to it
:param runner: a function takes keyword arguments which must be
specified by the arguments parsed by the parser
defined by `adder`. Docstring of this function
will be the description of the subcommand.
"""
import argparse
import textwrap
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers()
for (name, adder, runner) in commands:
subp = subparsers.add_parser(
name,
formatter_class=argparse.RawDescriptionHelpFormatter,
description=runner.__doc__ and textwrap.dedent(runner.__doc__))
adder(subp)
subp.set_defaults(func=runner)
return parser
def main(args=None):
from orgviz import web
parser = get_parser([web.command])
ns = parser.parse_args(args=args)
applyargs = lambda func, **kwds: func(**kwds)
applyargs(**vars(ns))
if __name__ == '__main__':
main()
| mit | Python |
71e1e09f750eac6cdf8ebb718190f07ae560f6e6 | Fix message | houqp/floyd-cli,mckayward/floyd-cli,mckayward/floyd-cli,houqp/floyd-cli | floyd/cli/auth.py | floyd/cli/auth.py | import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
def login():
"""
Log into Floyd via Auth0.
"""
cli_info_url = "{}/welcome".format(floyd.floyd_web_host)
click.confirm('Authentication token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please copy and paste the token here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
| import click
import webbrowser
import floyd
from floyd.client.auth import AuthClient
from floyd.manager.auth_config import AuthConfigManager
from floyd.model.access_token import AccessToken
from floyd.log import logger as floyd_logger
@click.command()
def login():
"""
Log into Floyd via Auth0.
"""
cli_info_url = "{}/cli".format(floyd.floyd_web_host)
click.confirm('Access token page will now open in your browser. Continue?', abort=True, default=True)
webbrowser.open(cli_info_url)
access_code = click.prompt('Please paste the code here', type=str, hide_input=True)
user = AuthClient().get_user(access_code)
access_token = AccessToken(username=user.username,
token=access_code)
AuthConfigManager.set_access_token(access_token)
floyd_logger.info("Login Successful")
@click.command()
def logout():
"""
Logout of Floyd.
"""
AuthConfigManager.purge_access_token()
| apache-2.0 | Python |
3e3c885a42ac1422e6e1be763c4fb8cb58d51595 | remove print | 1ookup/shadowsocks,wan-qy/shadowsocks,ellasafy/shadowsocks,wsjmnh/shadowsocks,ITJesse/shadowsocks,weddge/shadowsocks,Peterpig/shadowsocks,zswang/shadowsocks,monalisir/shadowsocks,murmuryu/shadowsocks,Axure/shadowsocks-1,YoungGit/shadowsocks,Yexiaoxing/shadowsocks,vinceyuan/shadowsocks,Martinho0330/shadowsocks,karrra/shadowsocks,JexCheng/shadowsocks,fo0nikens/shadowsocks,sapchen/shadowsocks,meowlab/shadowsocks-comment,lzz5235/shadowsocks,BeetMan/shadowsocks,zippera/shadowsocks,MarlinL/shadowsocks,snow9312/shadowsocks,newcastlecy/shadowsocks,smartwsw/shadowsocks,paulzhousz/shadowsocks,Axure/shadowsocks-1,54Pany/shadowsocks,swordyang/shadowsocks,abbshr/shadowsocks,aCoder2013/shadowsocks,bartley-le/shadowsocks,Licshee/shadowsocks,kcyeu/shadowsocks,reorx/shadowsocks,m1a0yu3/shadowsocks,brettweir/shadowsocks,lovels/shadowsocks,outsinre/shadowsocks,zhaofengli/shadowsocks,heatonnobu/shadowsocks,kimw/shadowsocks,chobitly/shadowsocks,Velkan/shadowsocks-1,yalewoosoft/shadowsocks,dmiedema/shadowsocks,zhiying8710/shadowsocks,yuexy/shadowsocks,XiaoKa741/shadowsocks,hxdyxd/shadowsocks,congmomo/shadowsocks,baolfire/shadowsocks,WorkingOfTimtohyZhang/shadowsocks,qingxp9/shadowsocks,gameboy709494/shadowsocks,tangtaijia/shadowsocks,ergobot/shadowsocks,ghmajx/shadowsocks,yeyuexia/shadowsocks,icetoggle/shadowsocks,KagamiChan/shadowsocks,toxicming/shadowsocks,spyofchina/shadowsocks,muyexi/shadowsocks,ListFranz/shadowsocks,hellocreep/shadowsocks,zicai/shadowsocks,Long-live-shadowsocks/shadowsocks,HavoStrean/shadowsocks,MrLYC/shadowsocks,dmiedema/shadowsocks,eintr/shadowsocks,zhujiangang/shadowsocks,maintiendrai/shadowsocks,Blunt1991/shadowsocks,falseen/shadowsocks,hzy87email/shadowsocks,a358003542/shadowsocks,drewet/shadowsocks,wangxin01/shadowsocks,EvilCult/shadowsocks,husless/shadowsocks,yh453926638/shadowsocks,allweax/shadowsocks,shenfei/shadowsocks,zzvv/shadowsocks,ChengDaHaI/shadowsocks,cgcgbcbc/shadowsocks,jabez1314/shadowsocks-1,miter/shadowsocks,ditupao/shadowsocks,catinred2/shadowsocks,luckypoem/shadowsocks,houkanshan/shadowsocks,venj/shadowsocks,bygit/shadowsocks,meizhoubao/shadowsocks,kimifetch/shadowsocks,falseen/shadowsocks,sapchen/shadowsocks,Xiaolang0/shadowsocks,pjq/shadowsocks,AwwCookies/shadowsocks,denglj/shadowsocks,v42me/shadowsocks,plus1s/shadowsocks-py-mu,kimifetch/shadowsocks,newcastlecy/shadowsocks-1,hugoxia/shadowsocks,sweet3c/shadowsocks,jwu/shadowsocks,niepan1/shadowsocks,raptorz/shadowsocks,xjxuvup/shadowsocks,mike-BV/shadowsocks,jaredbonobos/shadowsocks,alayii/shadowsocks,marquisthunder/shadowsocks,jessiejea/shadowsocks,VinceZK/shadowsocks,StarDuster/shadowsocks,TaburisSAMA/shadowsocks,spyofchina/shadowsocks,wuwen5/shadowsocks,cynricshu/shadowsocks,shaneZhang/shadowsocks,jzmq/shadowsocks,lamkakyun/shadowsocks,ghostwang/shadowsocks,xjtdy888/shadowsocks,Lessica/shadowsocks,cqqccqc/shadowsocks,cornerot/shadowsocks,MingruiHua/shadowsocks,XakepSDK/shadowsocks,dak/shadowsocks,warjiang/shadowsocks,faint32/shadowsocks,Roy1993sun/shadowsocks,jfojfo/shadowsocks,mthli/shadowsocks,simple88/shadowsocks,Xarrow/shadowsocks,jik1992/fork-shadowsocks,tsunli/shadowsocks,LiuWenJu/shadowsocks,lixingcong/shadowsocks_analysis,Julyyq/shadowsocks,rainbownight/shadowsocks,babykiss4ever/shadowsocks,wingerted/shadowsocks,jorik041/shadowsocks-1,bradparks/shadowsocks,Galaxy-yy/shadowsocks,jiangzhouq/shadowsocks,Mediaeater/shadowsocks,chitanda/shadowsocks,crawlersick/shadowsocks,jasonbu/shadowsocks,aCoder2013/shadowsocks,Zozoz/shadowsocks,HsingPeng/shadowsocks,MRunFoss/shadowsocks,leotso/shadowsocks,dxq-git/shadowsocks,zhangwengame/shadowsocks,gitmithy/shadowsocks,zhenglaizhang/shadowsocks,suzp1984/shadowsocks,blueyi/shadowsocks,jwu/shadowsocks,sinperwolf/shadowsocks,BeMxself/shadowsocks,assad2012/shadowsocks,phisiart/shadowsocks,li77leprince/shadowsocks,gechdcb/shadowsocks,noblocknet/shadowsocks,touzi/shadowsocks,sntitan/shadowsocks,skyling/shadowsocks,notyal/shadowsocks,qinmenghua/shadowsocks,jiang42/shadowsocks,w1ndy/shadowsocks,haoerloveyou/shadowsocks,zilongshanren/shadowsocks,xwv/shadowsocks,zhaofengli/shadowsocks,coder-chenzhi/shadowsocks,cameronjacobson/shadowsocks,husless/shadowsocks,jmpews/shadowsocks,xh4n3/shadowsocks,magic282/shadowsocks,xh4n3/shadowsocks,kafuuchino/shadowsocks,wuwen5/shadowsocks,Roy1993sun/shadowsocks,wandergis/shadowsocks,tegusi/shadowsocks,navyjeff/shadowsocks,catscarlet/shadowsocks,sspanel/shadowsocks,cheny95/shadowsocks,w2040w/shadowsocks,Wujerry/shadowsocks,LinEvil/shadowsocks,gahoo/shadowsocks,yangwe1/shadowsocks,TheWaWaR/shadowsocks,terrychenism/shadowsocks,john123951/shadowsocks,michalliu/shadowsocks,fangdingjun/shadowsocks,zcchen/shadowsocks,bravo-t/shadowsocks,ultraseven/shadowsocks,DavidXZK/shadowsocks-1,zimhy/shadowsocks,Sigma-Algebra/ss,kunki/shadowsocks,agentmario/shadowsocks,jessiejea/shadowsocks,RoyLING/shadowsocks,kaolalotree/shadowsocks,hxx0215/shadowsocks,fengyqf/shadowsocks,eehello/shadowsocks,li4li5li6/shadowsocks-1,xialincn/shadowsocks,starvii/Canopus,handaoliang/shadowsocks,klamtlne/shadowsocks,Benyjuice/shadowsocks,coderyi/shadowsocks,rongdede/ss,0ps/shadowsocks,zhongpei/shadowsocks,ajpaulson/shadowsocks,jswxdzc/shadowsocks,egrcc/shadowsocks_analysis,shl3807/shadowsocks,kigawas/shadowsocks-learning,tedaz/shadowsocks,renzhn/shadowsocks,suclogger/shadowsocks,paomian/shadowsocks,Galaxy-yy/shadowsocks,Kenshinhu/shadowsocks,shiyanhui/shadowsocks,Julyyq/shadowsocks,monokoo/shadowsocks,notyal/shadowsocks,yuexy/shadowsocks,crvv/shadowsocks,levythu/shadowsocks,luyi7338/shadowsocks,LazyClutch/shadowsocks,shiyemin/shadowsocks,lovely3x/shadowsocks,zhufenggood/shadowsocks,yan9yu/shadowsocks,cdsama/shadowsocks,tinyao/shadowsocks,happyforhappy/shadowsocks,7uk0n/shadowsocks,galaxy001/shadowsocks,v42me/shadowsocks,coder-chenzhi/shadowsocks,yeyuexia/shadowsocks,jbaginski/shadowsocks,lepture/shadowsocks,RobberPhex/shadowsocks,JackonYang/shadowsocks,righthandabacus/shadowsocks,h404bi/shadowsocks,qingxp9/shadowsocks,uptown1919/shadowsocks,lovezz/shadowsocks,tnndwc/shadowsocks,CalvinNeo/shadowsocks,bgame/shadowsocks,uptown1919/shadowsocks,imWildCat/shadowsocks,magikpns/shadowsocks,aquasky111/shadowsocks,lialosiu/shadowsocks,nickleefly/shadowsocks,billypon/shadowsocks,lzwjava/shadowsocks,Jeromefromcn/shadowsocks,paomian/shadowsocks,kaneawk/shadowsocks,googlehim/shadowsocks,pinetum/shadowsocks,zspsky/manyuser,a358003542/shadowsocks,smaty1/shadowsocks-1,stableShip/shadowsocks,easton402/Disguise-Shadowsocks,Sigma-Algebra/ssmanyuser,k1995/shadowsocks,GOGOsu/shadowsocks,CatMe0w/shadowsocks,hymRedemption/shadowsocks,ITJesse/shadowsocks,Lchiffon/shadowsocks,xiaoshaozi52/shadowsocks,xyguo/shadowsocks,k1995/shadowsocks,lexuszhi1990/shadowsocks,flwh/shadowsocks,shadowsocksR-private/shadowsocksR,whuhzx/shadowsocks,ruikong/shadowsocks,Lautitia/shadowsocks,zhaojunmo/shadowsocks,monycn/shadowsocks,jilir/shadowsocks,tylinux/shadowsocks,llych/shadowsocks,CannonFotter/shadowsocks,ypwanghh/shadowsocks,shaunstanislaus/shadowsocks,shidao-fm/shadowsocks,sntitan/shadowsocks,shiyanhui/shadowsocks,shuihuo/shadowsocks,taizilongxu/shadowsocks,Lchiffon/shadowsocks,guyskk/shadowsocks-1,ypwanghh/shadowsocks,gengen1988/ssstudy,halfelf/shadowsocks,AmberWhiteSky/shadowsocks,EveCoffee/shadowsocks,ovear/shadowsocks,crazygold/shadowsocks-rm,zilongshanren/shadowsocks,yangwe1/shadowsocks,scutdk/shadowsocks-1,undeaht0918/shadowsocks,sjq597/shadowsocks,kenwang76/shadowsocks,66CCFF/shadowsocks,DropFan/shadowsocks,DIYgod/shadowsocks,abhishekgahlot/shadowsocks,XyuWang/shadowsocks,cgcgbcbc/shadowsocks,geligaoli/shadowsocks,GeekTheRipper/shadowsocks,nlfox/shadowsocks,Aaron1992/shadowsocks-final,shines77/shadowsocks,nnjpp/shadowsocks,Ineluctable/shadowsocks,antinucleon/shadowsocks,qida/shadowsocks,moria/shadowsocks,linkdesu/shadowsocks,begeeben/shadowsocks,kitakamiooi/shadowsocks,mengskysama/shadowsocks-rm,ligenlive/shadowsocks,Node-X/shadowsocks,liangliangyy/shadowsocks,freeznet/shadowsocks,cyandata/shadowsocks,MrLYC/shadowsocks,fdzh/shadowsocks,BeetMan/shadowsocks,Blunt1991/shadowsocks,Frederick888/shadowsocks,imcaffrey/shadowsocks,dzhuang/shadowsocks-1,djyde/shadowsocks,liuyix/shadowsocks,shidao-fm/shadowsocks,glzjin/shadowsocks,marvinlee/shadowsocks,murusu/shadowsocks,guiyubj/shadowsocks,mrmign/shadowsocks,xqq/shadowsocks,orsonwang/shadowsocks,Aaron1992/shadowsocks-final,joyechan/shadowsocks,GeekTheRipper/shadowsocks,cheny95/shadowsocks,zicai/shadowsocks,qin-nz/shadowsocks,cameronjacobson/shadowsocks,geekdada/shadowsocks,kookxiang/shadowsocks,chunqian/shadowsocks,MingruiHua/shadowsocks,EveCoffee/shadowsocks,jiagang/shadowsocks,hgh123/shadowsocks,qiqian/shadowsocks,LoongWin/shadowsocks,oukaitou/shadowsocks,gogozs/shadowsocks,handaoliang/shadowsocks,mewiteor/shadowsocks,loveward/yingsuo,ghostwang/shadowsocks,WeShadowsocks/shadowsocks,Geek000/shadowsocks,jeffcao/shadowsocks,renfufei/shadowsocks,john123951/shadowsocks,murmuryu/shadowsocks,oyido/shadowsocks,soliury/shadowsocks,WeShadowsocks/shadowsocks,hazytint/shadowsocks,qida/shadowsocks,navyjeff/shadowsocks,CecilHarvey/shadowsocks,abv76/shadowsocks,lecason/shadowsocks,tobegit3hub/shadowsocks,gdey/shadowsocks,CasparLi/shadowsocks_analysis,lovezz/shadowsocks,shuzi0/ss,zhuxiucai/shadowsocks,chenrenyi/shadowsocks,Alexoner/shadowsocks,orvice/shadowsocks,Dietr1ch/shadowsocks,hgh123/shadowsocks,wb14123/shadowsocks,ethanyoung/shadowsocks,gogoout/shadowsocks,lovels/shadowsocks,hjf9259/shadowsocks,Andy-Amoy/shadowsocks,IronXiao/shadowsocks,raptorz/shadowsocks,coderyi/shadowsocks,hyanwang/shadowsocks,alpenliebe/shadowsocks,xtypebee/shadowsocks,zhuanyi/shadowsocks,cqqccqc/shadowsocks,tesla4321/shadowsocks,sntitan/shadowsocks_analysis,esdeathlove/shadowsocks,cityofEmbera/shadowsocks,GregoryShen/shadowsocks,Antsypc/shadowsocks,hyanwang/shadowsocks,ghmajx/shadowsocks,lixingcong/shadowsocks_analysis,JianfuLi/shadowsocks,qizhihere/shadowsocks,yuxiang-zhou/shadowsocks,YoungGit/shadowsocks,WaybackMachine/shadowsocks,smy116/shadowsocks,oyido/shadowsocks,anyforever/shadowsocks,insionng/shadowsocks,xiaoshaozi52/shadowsocks,kenwang76/shadowsocks,qiuai/shadowsocks,kuangyeheng/shadowsocks,purelandbb/shadowsocks_analysis,jorik041/shadowsocks-1,troyliu0105/shadowsocks,todaylover/shadowsocks,idf-archive/shadowsocks,kH0d0r/shadowsocks,smilezino/shadowsocks,touhou-gensokyo/shadowsocks,Geek000/shadowsocks,hjie/shadowsocks,jiachenwang/shadowsocks,vashstorm/shadowsocks,huueikmz/shadowsocks,ExiaHan/shadowsocks,panpan-zhang/shadowsocks,Laforeta/shadowsocks,vanish87/shadowsocks,wangyu190810/shadowsocks,imcaffrey/shadowsocks,kid143/shadowsocks,copyliu/shadowsocks,cornerot/shadowsocks,Eagles2F/shadowsocks,dayed/shadowsocks-1,weisk/shadowsocks,flyingghost/shadowsocks,NichoZhang/shadowsocks,KublaikhanGeek/shadowsocks,tommyZZM/shadowsocks,caengcjd/shadowsocks,rhlass/shadowsocks,JackonYang/shadowsocks,soliury/shadowsocks,apanly/shadowsocks,bctnry/shadowsocks,baolfire/shadowsocks,supertanglang/shadowsocks,zpzgone/shadowsocks-1,avastms/shadowsocks,swordyang/shadowsocks,t3573393/shadowsocks,tsunli/shadowsocks,bybyby/shadowsocks,wciq1208/shadowsocks,trigged/shadowsocks,blueyi/shadowsocks,zeronat/shadowsocks,bparafina/shadowsocks,linkdesu/shadowsocks,wesley1001/shadowsocks_analysis,kaneawk/shadowsocksr,waynehuge/shadowsocks,trigged/shadowsocks,Lingku/shadowsocks,pnjgyl/shadowsocks,BMan-L/shadowsocks,zeronat/shadowsocks,tobegit3hub/shadowsocks,erizhang/shadowsocks,panpan-zhang/shadowsocks,PeterDing/shadowsocks,0xwindows/shadowsocks,yalewoosoft/shadowsocks,fangcode/shadowsocks,lj3lj3/shadowsocks,Sneezry/shadowsocks,kingname/shadowsocks,egrcc/shadowsocks,WaybackMachine/shadowsocks,i5ting/shadowsocks,vg0x00/shadowsocks,haha1903/shadowsocks,jbaginski/shadowsocks,lery3510/shadowsocks,nan86150/shadowsocks,sorz/shadowsocks,TigerHix/shadowsocks,xuzhenglun/shadowsocks,odopsha/shadowsocks,zswang/shadowsocks,weittor/shadowsocks,lasting-yang/shadowsocks,WillGhost/shadowsocks,WANG-lp/shadowsocks,hellofwy/shadowsocks,pobizhe/shadowsocks,morlay/shadowsocks,purplewall1206/shadowsocks,nteplov/example-plugin,takaaptech/shadowsocks,ywjno/shadowsocks,cmzz/shadowsocks,undeaht0918/shadowsocks,kingcc/shadowsocks,chitanda/shadowsocks,pluspku/ss,CareF/shadowsocks,aq2004723/shadowsocks,zwpaper/shadowsocks,kuangyeheng/shadowsocks,tonyseek/shadowsocks,blog2i2j/shadowsocks_analysis,mage3k/shadowsocks,sakuyaa/shadowsocks,Baoyx007/shadowsocks,sntitan/shadowsocks_analysis,tonyseek/shadowsocks,Laforeta/shadowsocks,iVanlIsh/shadowsocks,BuGoNee/shadowsocks,faint32/shadowsocks,ExiaHan/shadowsocks,crccw/shadowsocks,goodjob1114/shadowsocks,mawentao007/s_copy_s,Sigma-Algebra/ssmanyuser,purelandbb/shadowsocks_analysis,wangyu190810/shadowsocks,jiagang/shadowsocks,darouwan/shadowsocks,levythu/shadowsocks,adison/shadowsocks,Zozoz/shadowsocks,Austinpb/shadowsocks,B1gtang/shadowsocks,littlebearz/shadowsocks,shadowsocksR-private/shadowsocksR,zhangwengame/shadowsocks,hjz15k6/shadowsocks,stableShip/shadowsocks,Vayn/shadowsocks,sinuos/shadowsocks,331164885/shadowsocks,jabez1314/shadowsocks-1,StarDuster/shadowsocks,wewea/shadowsocks,vinceyuan/shadowsocks,flyoverGu/shadowsocks,vgecko/shadowsocks,LiuWenJu/shadowsocks,rongdede/ss,XyuWang/shadowsocks,Dauth/shadowsocks,littlebearz/shadowsocks,hanqi7012/shadowsocks,7demo/shadowsocks,mchome/shadowsocks,rainbownight/shadowsocks,assad2012/shadowsocks,suclogger/shadowsocks,anyforever/shadowsocks,surajx/shadowsocks,bbiao/shadowsocks,dyzdyz010/shadowsocks,agentmario/shadowsocks,0ps/shadowsocks,oday0311/shadowsocks,SatanWoo/shadowsocks,lexchou/shadowsocks,joyechan/shadowsocks,hanguofeng/shadowsocks,Uh-huh-Philip/shadowsocks,maintiendrai/shadowsocks,stkevintan/shadowsocks,wangbo5759/shadowsocks,wangxin01/shadowsocks,lazybios/shadowsocks,sspanel/shadowsocks,superlucky8848/shadowsocks,pnjgyl/shadowsocks,shawnsschen/shadowsocks,karrra/shadowsocks,aim16/shadowsocks0,hangim/shadowsocks,wswplay/shadowsocks,John-Lin/shadowsocks,ivanberry/shadowsocks,zhangf911/shadowsocks,ChiChou/shadowsocks,ibigbug/shadowsocks,HavoStrean/shadowsocks,haha1903/shadowsocks,zyingp/shadowsocks,dyzdyz010/shadowsocks,qdk0901/shadowsocks,MayQ/shadowsocks,nickleefly/shadowsocks,hjhjw1991/shadowsocks,billypon/shadowsocks,wb14123/shadowsocks,ahvonenj/shadowsocks,QGB/shadowsocks,banbanchs/shadowsocks,jfojfo/shadowsocks.breakwa11,willxiang/shadowsocks,BWITS/shadowsocks,Evegen55/shadowsocks,llych/shadowsocks,Sneezry/shadowsocks,aulphar/shadowsocks,rongdede/shadowsocks,kang000feng/shadowsocks,jcpwfloi/shadowsocks,kyokuki/shadowsocks,xjz19901211/shadowsocks,miaoski/shadowsocks,marvinlee/shadowsocks,glasslion/shadowsocks,marquisthunder/shadowsocks,murdercdh/shadowsocks,beni55/shadowsocks,liguangsheng/shadowsocks,airbreather/shadowsocks,airbai/shadowsocks,shoo-be-doo/shadowsocks,tbronchain/shadowsocks,qhwa/shadowsocks,haleylu/shadowsocks,dxq-git/shadowsocks,ShadowPower/shadowsocks,cczhong11/shadowsocks,FuckUpGFW/shadowsocks,kikyous/shadowsocks,tbronchain/shadowsocks,warmchang/shadowsocks,suzp1984/shadowsocks,kikyous/shadowsocks,lxiange/shadowsocks,ethanyoung/shadowsocks,noblocknet/shadowsocks,orvice/shadowsocks,cat9/shadowsocks,yh453926638/shadowsocks,jerryz1982/shadowsocks,Mooxe000/shadowsocks,ttkx/shadowsocks,vashstorm/shadowsocks,RockerFlower/shadowsocks,zimhy/shadowsocks,adeindie/shadowsocks,zhuxiucai/shadowsocks,alphabity/shadowsocks,whuhzx/shadowsocks,trentswd/shadowsocks,Justin-lu/shadowsocks,blog2i2j/shadowsocks_analysis,SamWanng/shadowsocks,touzi/shadowsocks,murdercdh/shadowsocks,peterzky/shadowsocks,hazytint/shadowsocks,smpetrey/shadowsocks,renyinew/shadowsocks,ddmax/shadowsocks,chenhao890411/shadowsocks,galaxy001/shadowsocks,erizhang/shadowsocks,Andy-Amoy/shadowsocks,qizhihere/shadowsocks,free2000fly/shadowsocks,h1994st/shadowsocks,ixfan/shadowsocks,shonenada/shadowsocks,jerryz1982/shadowsocks,shuihuo/shadowsocks,dongyuwei/shadowsocks,ianisme/shadowsocks,hanguofeng/shadowsocks,kigawas/shadowsocks-learning,RichardMei/shadowsocks,akar1nchan/shadowsocks,kaneawk/shadowsocksr,mrmign/shadowsocks,hzqim/mengskysama,serika00/shadowsocks,bctnry/shadowsocks,loulijun/shadowsocks,shiyemin/shadowsocks,jackjm/shadowsocks,0xwindows/shadowsocks,zankard/shadowsocks,NamedGod/shadowsocks,whilu/shadowsocks,shliujing/shadowsocks,kunki/shadowsocks,starvii/Canopus,aquasky111/shadowsocks,lulucici/shadowsocks,w2040w/shadowsocks,bparafina/shadowsocks,kcyeu/shadowsocks,WorkingOfTimtohyZhang/shadowsocks,wanghaisheng/shadowsocks,sweet3c/shadowsocks,hitrust/shadowsocksbak,030io/shadowsocks,pfctgeorge/shadowsocks,hxdyxd/shadowsocks,TimonPeng/shadowsocks,hxddh/shadowsocks,touhou-gensokyo/shadowsocks,ywjno/shadowsocks,xieshenglin/shadowsocks,yygcom/shadowsocks,dak/shadowsocks,amsuny/shadowsocks,udo-tech-team/shadowsocks_analysis,gitchs/shadowsocks,x54621/shadowsocks,ratazzi/shadowsocks,iq72/shadowsocks,skyline75489/shadowsocks,tiiime/shadowsocks,fangcode/shadowsocks,csuideal/shadowsocks,sakuyaa/shadowsocks,tfhavingfun/shadowsocks,rankun203/shadowsocks,ecode/shadowsocks,monalisir/shadowsocks,ajpaulson/shadowsocks,shoo-be-doo/shadowsocks,olivererwang/shadowsocks,shawnsschen/shadowsocks,gaotongfei/shadowsocks,yxxyun/shadowsocks,GIANTCRAB/shadowsocks,koiszzz/shadowsocks,waynehuge/shadowsocks,AwwCookies/shadowsocks,lsyiverson/shadowsocks,yuluo-ding/shadowsocks,SinTi/shadowsocks,Rand01ph/shadowsocks,yeweishuai/shadowsocks-1,alayii/shadowsocks,Martinho0330/shadowsocks,danny200309/shadowsocks,MetSystem/shadowsocks,RoyLING/shadowsocks,gengen1988/ssstudy,gogozs/shadowsocks,tuxlinuxien/shadowsocks,wewea/shadowsocks,toooonyy/shadowsocks,NamedGod/shadowsocks,i5ting/shadowsocks,KublaikhanGeek/shadowsocks,nolouch/shadowsocks,zwpaper/shadowsocks,yygcom/shadowsocks,sharpwhisper/shadowsocks,yu19930123/shadowsocks-shadowsocks,smaty1/shadowsocks-1,licess/shadowsocks,flwh/shadowsocks,lucifersun/shadowsocks,NanaLich/shadowsocks,NitroXenon/shadowsocks,stallman-cui/shadowsocks_analysis,Z4Tech/shadowsocks,hellocreep/shadowsocks,shadowsocksr-backup/shadowsocksr,sinuos/shadowsocks,udo-tech-team/shadowsocks-1,zishell/shadowsocks,lixingcong/shadowsocks,langyapojun/shadowsocks,zhaojunmo/shadowsocks,lovewitty/shadowsocks,deliangyang/shadowsocks-1,liujianpc/shadowsocks,7uk0n/shadowsocks,lxiange/shadowsocks,drakeet/shadowsocks,zcchen/shadowsocks,moria/shadowsocks,zxjcarrot/shadowsocks,glzjin/shadowsocks,quericy/shadowsocks,caengcjd/shadowsocks,ddmax/shadowsocks,insionng/shadowsocks,XiaoKa741/shadowsocks,zippera/shadowsocks,aim16/shadowsocks,La-Volpe/shadowsocks,flrngel/shadowsocks,glasslion/shadowsocks,Yexiaoxing/shadowsocks,chunqian/shadowsocks,Ivicel/shadowsocks,sjq597/shadowsocks,zhfish/shadowsocks,kaolalotree/shadowsocks,jellyshen/shadowsocks,tesla4321/shadowsocks,MRunFoss/shadowsocks,yasinn/shadowsocks,darknessomi/shadowsocks,vitamincpp/shadowsocks,gotbannedcode/shadowsocks,magikpns/shadowsocks,Justin-lu/shadowsocks,abhishekgahlot/shadowsocks,skykiny/shadowsocks,jeremiahyan/shadowsocks,leonzhouwei/you-know-ss,qinalei/shadowsocks2,cityofEmbera/shadowsocks,qiuai/shadowsocks,jerryling315/shadowsocks_analysis,lululau/shadowsocks,meizhoubao/shadowsocks,BeMxself/shadowsocks,mewiteor/shadowsocks,ShionRyuu/shadowsocks,phunterlau/shadowsocks,h1994st/shadowsocks,SamWanng/shadowsocks,OrangeJuicePlease/shadowsocks,gitHubOffical/shadowsocks_analysis,pinetum/shadowsocks,Jeromefromcn/shadowsocks,flyoverGu/shadowsocks,lsyiverson/shadowsocks,allweax/shadowsocks,Wujerry/shadowsocks,srekcah/shadowsocks,Stan-Lin-/shadowsocks,murusu/shadowsocks,tinyao/shadowsocks,ufownl/shadowsocks,GeassDB/shadowsocks,sutun2008/shadowsocks,FuriousSlade/shadowsocks,tuxlinuxien/shadowsocks,klamtlne/shadowsocks,serika00/shadowsocks,WillGhost/shadowsocks,leizongmin/shadowsocks,gdey/shadowsocks,liamchzh/shadowsocks,9hao/shadowsocks,synacks/shadowsocks-1,CareF/shadowsocks,followmaster/shadowsocks,witcxc/shadowsocks,MarlinL/shadowsocks,Mucid/shadowsocks,zhongpei/shadowsocks,superlucky8848/shadowsocks,pittacus/shadowsocks,liangliangyy/shadowsocks,netnetnet2/shadowsocks,Velkan/shadowsocks-1,ivanberry/shadowsocks,morlay/shadowsocks,gongshw/shadowsocks,wangtai/shadowsocks-1,quericy/shadowsocks,126ium/shadowsocks,shaobozi/shadowsocks,sage417/shadowsocks,Qixingchen/shadowsocks_analysis,AirScr/shadowsocks,lovejatps/shadowsocks,toxicming/shadowsocks,itplanes/shadowsocks,michaelhe/shadowsocks,tangtaijia/shadowsocks,ilovintit/shadowsocks,tommyZZM/shadowsocks,striges/shadowsocks,pandada8/shadowsocks,freedomkk-qfeng/shadowsocks,joesonw/shadowsocks,peterzky/shadowsocks_analysis,ayumilove/shadowsocks,csersoft/shadowsocks,momo173/Shadowsocks,banre123/shadowsocks,muyexi/shadowsocks,evshiron/shadowsocks,Stan-Lin-/shadowsocks,Antsypc/shadowsocks,halfelf/shadowsocks,crvv/shadowsocks,zankard/shadowsocks,IronXiao/shadowsocks,kH0d0r/shadowsocks,imWildCat/shadowsocks,newcastlecy/shadowsocks,YueHonghui/shadowsocks,rankun203/shadowsocks,cat9/shadowsocks,SHAU-LOK/shadowsocks,hjie/shadowsocks,taizilongxu/shadowsocks,justfbt/shadowsocks,jchbh-duplicate/shadowsocks,lj3lj3/shadowsocks,xidui/shadowsocks,pittacus/shadowsocks,adison/shadowsocks,JLHwung/shadowsocks-org,taoger/shadowsocks,luyi7338/shadowsocks,freeznet/shadowsocks,KeviGary/shadowsocks,GregoryShen/shadowsocks,albertgh/shadowsocks,Z4Tech/shadowsocks,freedomkk-qfeng/shadowsocks,dayed/shadowsocks-1,jqk6/shadowsocks,xtypebee/shadowsocks,fengyie007/shadowsocks,tedaz/shadowsocks,126ium/shadowsocks,zhanghonglang/shadowsocks,shliujing/shadowsocks,hqlyz/shadowsocks,LeiZeng/shadowsocks,mthli/shadowsocks,geminiwen/shadowsocks,naturs/shadowsocks,noe132/shadowsocks,hellofwy/shadowsocks,warmchang/shadowsocks,dihihi/ssb,youkochan/shadowsocks-analysis,janstk/shadowsocks,arakashic/shadowsocks,apanly/shadowsocks,Spacecup/shadowsocks,B1gtang/shadowsocks,Evegen55/shadowsocks,rocxer/shadowsocks,xjxuvup/shadowsocks,hanqi7012/shadowsocks,yan9yu/shadowsocks,intermezzo-fr/shadowsocks,aim16/shadowsocks,ruikong/shadowsocks,guyskk/shadowsocks,snowmap/shadowsocks,ergobot/shadowsocks,TOSPIO/shadowsocks,madwiki/shadowsocks,synacks/shadowsocks-1,Sigma-Algebra/ss,wangzhe0417/shadowsocks,Lautitia/shadowsocks,kaneawk/shadowsocks,zhengnanlee/shadowsocks,John-Shaw/shadowsocks,shl3807/shadowsocks,hashmaparraylist/shadowsocks,Icenowy/shadowsocks,Nymphet/shadowsocks,mengskysama/shadowsocks-rm,licheedev/shadowsocks,xlaser/shadowsocks,vg0x00/shadowsocks,bygit/shadowsocks,Ricardo666666/shadowsocks,jfojfo/shadowsocks.breakwa11,yuchuanzhen/shadowsocks,copyliu/shadowsocks,MoguCloud/shadowsocks,tsdl2013/shadowsocks,RichardMei/shadowsocks,ibigbug/shadowsocks,CalvinNeo/shadowsocks,Lucnsy/shadowsocks,edwardtoday/shadowsocks,MarshallChen/shadowsocks,Velkan/shadowsocks-2,gotbannedcode/shadowsocks,kid551/shadowsocks,kid143/shadowsocks,YueHonghui/shadowsocks,geligaoli/shadowsocks-1,babykiss4ever/shadowsocks,miyouzi/ch-docker-ssr,liujianpc/shadowsocks,331164885/shadowsocks,letup/shadowsocks,FireTercel/shadowsocks,Silianbo/shadowsocks,chobitly/shadowsocks,jamesmarva/shadowsocks,yeweishuai/shadowsocks-1,free2000fly/shadowsocks,goodbest/shadowsocks,mage3k/shadowsocks,shuzi0/ss,darouwan/shadowsocks,loveward/yingsuo,airbai/shadowsocks,shaunstanislaus/shadowsocks,mj340522/shadowsocks,TaburisSAMA/shadowsocks,li77leprince/shadowsocks,windygu/shadowsocks,MetSystem/shadowsocks,trotyl/shadowsocks,hzfywkj/shadowsocks,suntopo/shadowsocks,huntzhan/shadowsocks,sharelinuxs/shadowsocks,ntfreedom/neverendshadowsocks,wumch/shadowsocks,FuriousSlade/shadowsocks,jiangzhouq/shadowsocks,leonzhouwei/you-know-ss,intermezzo-fr/shadowsocks,jy1989/shadowsocks,egenchen/shadowsocks,Anfernee1/shadowsocks,jmpews/shadowsocks,htfy96/shadowsocks,peterzky/shadowsocks_analysis,John-Shaw/shadowsocks,mxd1971/-ssh-root-2a02-4780-1-1--1-e01,zhujunsan/shadowsocks,hxx0215/shadowsocks,ChengDaHaI/shadowsocks,ultimate010/shadowsocks,dongyuwei/shadowsocks,LimitlessYou/shadowsocks,xqq/shadowsocks,hengqujushi/shadowsocks,letup/shadowsocks,liberize/shadowsocks,toooonyy/shadowsocks,Linusp/shadowsocks,light0x/ss,jilir/shadowsocks,csersoft/shadowsocks,liguangsheng/shadowsocks,ukoinobita/shadowsocks,reorx/shadowsocks,kigawas/shadowsocks,HsingPeng/shadowsocks,wsjmnh/shadowsocks,smy116/shadowsocks,oglops/shadowsocks,WinterXMQ/shadowsocks,falseen/shadowsocks_analysis,NanaLich/shadowsocks,slurdge/shadowsocks,orsonwang/shadowsocks,justfbt/shadowsocks,nlfox/shadowsocks,bwinhwang/shadowsocks,netnetnet2/shadowsocks,smartwsw/shadowsocks,jchbh-duplicate/shadowsocks,kid551/shadowsocks-1,renfufei/shadowsocks,fengyqf/shadowsocks,abv76/shadowsocks,Frederick888/shadowsocks,samuel-liyi/shadowsocks,iVanlIsh/shadowsocks,DIYgod/shadowsocks,wanghaisheng/shadowsocks,bolabola/shadowsocks,ratazzi/shadowsocks,kmalloc/shadowsocks,KagamiChan/shadowsocks,Eagles2F/shadowsocks,ufownl/shadowsocks,instmy/shadowsocks,DropFan/shadowsocks-1,drewet/shadowsocks,MoguCloud/shadowsocks,ntfreedom/neverendshadowsocks,smilezino/shadowsocks,lancetw/shadowsocks,FuckUpGFW/shadowsocks,66CCFF/shadowsocks,gclove/shadowsocks,nolouch/shadowsocks,cielpy/shadowsocks,w1ndy/shadowsocks,lengyue524/shadowsocks,zhanghonglang/shadowsocks,hotoo/shadowsocks,newcastlecy/shadowsocks-1,pihao/shadowsocks,LazyClutch/shadowsocks,takaaptech/shadowsocks,fishky/shadowsocks,shuzi0/ssr,liamchzh/shadowsocks,Dauth/shadowsocks,hucaloof/shadowsocks,faynwol/shadowsocks,shell909090/shadowsocks,xmoeproject/shadowsocks,slurdge/shadowsocks,odopsha/shadowsocks,efateva/shadowsocks,fangdingjun/shadowsocks,ShadowPower/shadowsocks,ttkx/shadowsocks,Tinysymphony/shadowsocks,wangtai/shadowsocks-1,naturs/shadowsocks,ChiChou/shadowsocks,jik1992/fork-shadowsocks,waytai/shadowsocks,zishell/shadowsocks,ixfan/shadowsocks,shonenada/shadowsocks,qhwa/shadowsocks,oday0311/shadowsocks,followmaster/shadowsocks,bolabola/shadowsocks,JoshOY/shadowsocks,synacks/shadowsocks,huntzhan/shadowsocks,CecilHarvey/shadowsocks,ditupao/shadowsocks,brettweir/shadowsocks,chendeben/shadowsocks5,niepan1/shadowsocks,codevlabs/shadowsocks,Voidly/shadowsocks,gy6221/shadowsocks,olivererwang/shadowsocks,light0x/ss,DannyVim/shadowsocks,Mediaeater/shadowsocks,Xarrow/shadowsocks,jasonbu/shadowsocks,beizhiying7/shadowsocks,Nappp/shadowsocks,lialosiu/shadowsocks,vgecko/shadowsocks,dyhpoon/shadowsocks,udo-tech-team/shadowsocks-1,cczhong11/shadowsocks,qinmenghua/shadowsocks,hzy87email/shadowsocks,tylinux/shadowsocks,outsinre/shadowsocks,geminiwen/shadowsocks,TheWaWaR/shadowsocks,yangchaogit/shadowsocks,Jamsdid8/shadowsocks,samuel-liyi/shadowsocks,SwordYork/shadowsocks,surajx/shadowsocks,LoongWin/shadowsocks,liuquansheng47/shadowsocks,guyskk/shadowsocks,mchome/shadowsocks,plus1s/shadowsocks-py-mu,Qixingchen/shadowsocks_analysis,Mooxe000/shadowsocks,bartley-le/shadowsocks,luckypoem/shadowsocks,xyguo/shadowsocks,zhuanyi/shadowsocks,pandada8/shadowsocks,geligaoli/shadowsocks-1,zhangyubaka/Shadowsocks,xjtdy888/shadowsocks,udo-tech-team/shadowsocks_analysis,dzhuang/shadowsocks-1,guojiubo/shadowsocks,joesonw/shadowsocks,gaotongfei/shadowsocks,sorz/shadowsocks,xialincn/shadowsocks,chenfengyuan/shadowsocks,qdk0901/shadowsocks,CountMurphy/shadowsocks,michaelhe/shadowsocks,DreaminginCodeZH/shadowsocks,bbiao/shadowsocks,cynricshu/shadowsocks,zhujunsan/shadowsocks,Ricardo666666/shadowsocks,htfy96/shadowsocks,monokoo/shadowsocks,dyhpoon/shadowsocks,lexuszhi1990/shadowsocks,esdeathlove/shadowsocks,shines77/shadowsocks,abba18/shadowsocks,dihihi/ssb,madwiki/shadowsocks,elfland/Utils-shadowsocks,dengit/shadowsocks,astonfu/shadowsocks,catscarlet/shadowsocks,wrwcmaster/Shadowsocks,srekcah/shadowsocks,weittor/shadowsocks,tegusi/shadowsocks,lucifersun/shadowsocks,Austinpb/shadowsocks,zhiying8710/shadowsocks,shaobozi/shadowsocks,liberize/shadowsocks,Benyjuice/shadowsocks,xuzhenglun/shadowsocks,jerryling315/shadowsocks_analysis,hustnn/shadowsocks,aulphar/shadowsocks,yangchaogit/shadowsocks,SinTi/shadowsocks,fishky/shadowsocks,airbreather/shadowsocks,Licshee/shadowsocks,cataska/shadowsocks,Uh-huh-Philip/shadowsocks,tnndwc/shadowsocks,houkanshan/shadowsocks,sharelinuxs/shadowsocks,qiqian/shadowsocks,zhangf911/shadowsocks,edwardtoday/shadowsocks,dwhdorg/shadowsocks,dayed/shadowsocks,messense/shadowsocks,lamkakyun/shadowsocks,KeviGary/shadowsocks,zzliujianbo/shadowsocks,d4smart/shadowsocks,ilovintit/shadowsocks,opapa/shadowsocks,lucienevans/shadowsocks,lovejatps/shadowsocks,jasonslyvia/shadowsocks,WencongXiao/shadowsocks,halfcrazy/shadowsocks,fhaoquan/shadowsocks,vanish87/shadowsocks,udo-tech-team/shadowsocks-4,ListFranz/shadowsocks,cyandata/shadowsocks,Lingku/shadowsocks,kmalloc/shadowsocks,DaiYue/shadowsocks,SatanWoo/shadowsocks,kid551/shadowsocks-1,followmaster/shadowsocks_analysis,catinred2/shadowsocks,hengqujushi/shadowsocks,rhlass/shadowsocks,jellyshen/shadowsocks,Jamsdid8/shadowsocks,hucaloof/shadowsocks,gccpacman/shadowsocks,gameboy709494/shadowsocks,ethanluoyc/shadowsocks,ecode/shadowsocks,tiiime/shadowsocks,CasparLi/shadowsocks_analysis,kimw/shadowsocks,michalliu/shadowsocks,LinEvil/shadowsocks,CannonFotter/shadowsocks,ZeroClover/shadowsocksr,sage417/shadowsocks,GOGOsu/shadowsocks,goodbest/shadowsocks,sinperwolf/shadowsocks,cataska/shadowsocks,renzhn/shadowsocks,beni55/shadowsocks,luo3555/shadowsocks-bak,naruto900814/shadowsocks,kimiWu/shadowsocks,GIANTCRAB/shadowsocks,it-andy-hou/shadowsocks,lulucici/shadowsocks,sunng87/shadowsocks,liushuaikobe/shadowsocks,taoger/shadowsocks,lovewitty/shadowsocks,lery3510/shadowsocks,pein0119/shadowsocks,DuPupu/shadowsocks,nteplov/example-plugin,huueikmz/shadowsocks,Neverly/shadowsocks,mj340522/shadowsocks,hanx11/shadowsocks,heimonsy/shadowsocks,pein0119/shadowsocks,striges/shadowsocks,alpenliebe/shadowsocks,CatTail/shadowsocks,nickyfoto/shadowsocks,caoqitong826/shadowsocks,tsdl2013/shadowsocks,thanatoskira/shadowsocks,gccpacman/shadowsocks,hjf9259/shadowsocks,chendeben/shadowsocks5,firestar/shadowsocks,TOSPIO/shadowsocks,FireTercel/shadowsocks,jy1989/shadowsocks,egenchen/shadowsocks,LimitlessYou/shadowsocks,xubeiyan/shadowsocks,sutun2008/shadowsocks,ultimate010/shadowsocks,zspsky/manyuser,ukoinobita/shadowsocks,kingcc/shadowsocks,xiangpeng/shadowsocks,djyde/shadowsocks,jcpwfloi/shadowsocks,cnbeining/shadowsocks,trentswd/shadowsocks,ninjadq/shadowsocks,whilu/shadowsocks,oukaitou/shadowsocks,shenfei/shadowsocks,kafuuchino/shadowsocks,amsuny/shadowsocks,JexCheng/shadowsocks,x54621/shadowsocks,irmowan/shadowsocks,hzfywkj/shadowsocks,wshenx/shadowsocks,aisk/shadowsocks,kang000feng/shadowsocks,chaonet/shadowsocks,weddge/shadowsocks,zhfish/shadowsocks,Methol/shadowsocks,zackmore/shadowsocks,faynwol/shadowsocks,h404bi/shadowsocks,lengyue524/shadowsocks,albertgh/shadowsocks,ligenlive/shadowsocks,magic282/shadowsocks,kitakamiooi/shadowsocks,elfland/Utils-shadowsocks,jaredbonobos/shadowsocks,sunflyer/shadowsocks,monycn/shadowsocks,codeworm96/shadowsocks,blacklin/shadowsocks,rongdede/shadowsocks,Mucid/shadowsocks,synacks/shadowsocks,hjhjw1991/shadowsocks,Anebrithien/ss-last,xlaser/shadowsocks,Lucnsy/shadowsocks,falseen/shadowsocks_analysis,adeindie/shadowsocks,DannyVim/shadowsocks,nan86150/shadowsocks,happyforhappy/shadowsocks,cynhard/shadowsocks,QthCN/shadowsocks,mobdim/shadowsocks,cnbeining/shadowsocks,wesley1001/shadowsocks,shell909090/shadowsocks,AirScr/shadowsocks,vitamincpp/shadowsocks,xieshenglin/shadowsocks,Xiaolang0/shadowsocks,Ineluctable/shadowsocks,lzz5235/shadowsocks,hxddh/shadowsocks,chaonet/shadowsocks,PaytonShaw/shadowsocks1,gitHubOffical/shadowsocks_analysis,leizongmin/shadowsocks,guyskk/shadowsocks-1,hangim/shadowsocks,flrngel/shadowsocks,hotoo/shadowsocks,bgame/shadowsocks,sdingex/shadowsocks,ayumilove/shadowsocks,scutdk/shadowsocks-1,copooper/shadowsocks,licess/shadowsocks,QGB/shadowsocks,sdingex/shadowsocks,skyling/shadowsocks,EvilCult/shadowsocks,mawentao007/s_copy_s,panzer13/shadowsocks,Dietr1ch/shadowsocks,tfhavingfun/shadowsocks,Minghi/shadowsocks,hilllinux/shadowsocks,efateva/shadowsocks,jswxdzc/shadowsocks,lululau/shadowsocks,meowlab/shadowsocks-comment,QthCN/shadowsocks,paulzhousz/shadowsocks,Neverly/shadowsocks,eintr/shadowsocks,t3573393/shadowsocks,hzqim/mengskysama,ellasafy/shadowsocks,mxd1971/-ssh-root-2a02-4780-1-1--1-e01,La-Volpe/shadowsocks,drakeet/shadowsocks,yu19930123/shadowsocks-shadowsocks,bingwen/shadowsocks,crccw/shadowsocks,qin-nz/shadowsocks,Velkan/shadowsocks-2,crawlersick/shadowsocks,pihao/shadowsocks,udo-tech-team/shadowsocks-4,wciq1208/shadowsocks,wingerted/shadowsocks,easton402/Disguise-Shadowsocks,firestar/shadowsocks,wesley1001/shadowsocks_analysis,geekdada/shadowsocks,bwinhwang/shadowsocks,Wizmann/shadowsocks,zzvv/shadowsocks,lzwjava/shadowsocks,zhengnanlee/shadowsocks,hilllinux/shadowsocks,bradparks/shadowsocks,begeeben/shadowsocks,miaoski/shadowsocks,diors-t/shadowsocks,RockerFlower/shadowsocks,Rand01ph/shadowsocks,guiyubj/shadowsocks,BuGoNee/shadowsocks,chenfengyuan/shadowsocks,willxiang/shadowsocks,nickyfoto/shadowsocks,androiddream/shadowsocks,dwhdorg/shadowsocks,jasonslyvia/shadowsocks,egrcc/shadowsocks_analysis,qingfeng/shadowsocks,jin5354/shadowsocks,momo173/Shadowsocks,koiszzz/shadowsocks,fo0nikens/shadowsocks,oglops/shadowsocks,DaiYue/shadowsocks,haoerloveyou/shadowsocks,030io/shadowsocks,goodjob1114/shadowsocks,lowtalker/shadowsocks,gogoout/shadowsocks,nfjinjing/shadowsocks,cielpy/shadowsocks,ragnraok/shadowsocks,terrychenism/shadowsocks,geligaoli/shadowsocks,DavidXZK/shadowsocks-1,luo3555/shadowsocks-bak,Parareru/shadowsocks,idf-archive/shadowsocks,hitrust/shadowsocksbak,gitchs/shadowsocks,wesley1001/shadowsocks,arakashic/shadowsocks,naruto900814/shadowsocks,yaoliyc/shadowsocks,lasting-yang/shadowsocks,test1845/shadowsocks,gechdcb/shadowsocks,evshiron/shadowsocks,opapa/shadowsocks,jeremiahyan/shadowsocks,skykiny/shadowsocks,sunng87/shadowsocks,purplewall1206/shadowsocks,DropFan/shadowsocks,guojiubo/shadowsocks,xubeiyan/shadowsocks,Nappp/shadowsocks,Voidly/shadowsocks,sharpwhisper/shadowsocks,yasinn/shadowsocks,crazygold/shadowsocks-rm,witcxc/shadowsocks,abbshr/shadowsocks,ovear/shadowsocks,ethanluoyc/shadowsocks,Tinysymphony/shadowsocks,janstk/shadowsocks,qingfeng/shadowsocks,WencongXiao/shadowsocks,yxxyun/shadowsocks,mobdim/shadowsocks,skypigr/shadowsocks,fuxiuyin/shadowsocks,kimiWu/shadowsocks,7demo/shadowsocks,jfojfo/shadowsocks,jeffcao/shadowsocks,wshenx/shadowsocks,liuquansheng47/shadowsocks,aim16/shadowsocks0,gitmithy/shadowsocks,zhujiangang/shadowsocks,shadowsocksr-backup/shadowsocksr,lucienevans/shadowsocks,simple88/shadowsocks,noe132/shadowsocks,googlehim/shadowsocks,fusijie/shadowsocks,MayQ/shadowsocks,PeterDing/shadowsocks,wangadong/shadowsocks,Tonyce/shadowsocks,fuxiuyin/shadowsocks,panzer13/shadowsocks,hetong007/shadowsocks,Peterpig/shadowsocks,hetong007/shadowsocks,ShionRyuu/shadowsocks,phunterlau/shadowsocks,venj/shadowsocks,jamesmarva/shadowsocks,liushuaikobe/shadowsocks,abba18/shadowsocks,funtion/shadowsocks-1,xmoeproject/shadowsocks,John-Lin/shadowsocks,zyingp/shadowsocks,righthandabacus/shadowsocks,avastms/shadowsocks,CasparLi/shadowsocks,diors-t/shadowsocks,ushuz/shadowsocks,Silianbo/shadowsocks,WinterXMQ/shadowsocks,miter/shadowsocks,JoeCao/shadowsocks,lazybios/shadowsocks,jackjm/shadowsocks,blacklin/shadowsocks,wangadong/shadowsocks,NitroXenon/shadowsocks,fengyie007/shadowsocks,langyapojun/shadowsocks,instmy/shadowsocks,DropFan/shadowsocks-1,gclove/shadowsocks,RobberPhex/shadowsocks,zackmore/shadowsocks,snow9312/shadowsocks,supertanglang/shadowsocks,BWITS/shadowsocks,ragnraok/shadowsocks,youkochan/shadowsocks-analysis,warjiang/shadowsocks,xiangpeng/shadowsocks,pobizhe/shadowsocks,cdsama/shadowsocks,hqlyz/shadowsocks,pluspku/ss,wangzhe0417/shadowsocks,zpzgone/shadowsocks-1,it-andy-hou/shadowsocks,Kenshinhu/shadowsocks,baalchina/shadowsocks,kyokuki/shadowsocks,54Pany/shadowsocks,CatTail/shadowsocks,BMan-L/shadowsocks,alphabity/shadowsocks,SHAU-LOK/shadowsocks,Spacecup/shadowsocks,yaoliyc/shadowsocks,WANG-lp/shadowsocks,d4smart/shadowsocks,lowtalker/shadowsocks,jin5354/shadowsocks,TimonPeng/shadowsocks,zjsxzy/shadowsocks,skypigr/shadowsocks,aq2004723/shadowsocks,nnjpp/shadowsocks,todaylover/shadowsocks,hugoxia/shadowsocks,MarshallChen/shadowsocks,gongshw/shadowsocks,heimonsy/shadowsocks,bybyby/shadowsocks,baalchina/shadowsocks,stallman-cui/shadowsocks_analysis,irmowan/shadowsocks,loulijun/shadowsocks,yuluo-ding/shadowsocks,followmaster/shadowsocks_analysis,denglj/shadowsocks,wan-qy/shadowsocks,IndeedPlusPlus/shadowsocks,Coxxs/shadowsocks,antinucleon/shadowsocks,zimhy/shadowsocks_analysis,akar1nchan/shadowsocks,egrcc/shadowsocks,Ivicel/shadowsocks,xidui/shadowsocks,Anfernee1/shadowsocks,GeassDB/shadowsocks,fhaoquan/shadowsocks,zhangyubaka/Shadowsocks,ultraseven/shadowsocks,LeeRisk/shadowsocks,zxjcarrot/shadowsocks,CatMe0w/shadowsocks,wrwcmaster/Shadowsocks,windygu/shadowsocks,forfuns/shadowsocks,hanx11/shadowsocks,hymRedemption/shadowsocks,JoshOY/shadowsocks,banre123/shadowsocks,hi54yt/shadowsocks,Alexoner/shadowsocks,hustnn/shadowsocks,androiddream/shadowsocks,stkevintan/shadowsocks,miyouzi/ch-docker-ssr,1ookup/shadowsocks,darknessomi/shadowsocks,zhenglaizhang/shadowsocks,Icenowy/shadowsocks,astonfu/shadowsocks,cmzz/shadowsocks,otoil/shadowsocks,shaneZhang/shadowsocks,pjq/shadowsocks,ushuz/shadowsocks,banbanchs/shadowsocks,zjsxzy/shadowsocks,CasparLi/shadowsocks,Vayn/shadowsocks,deliangyang/shadowsocks-1,wandergis/shadowsocks,DuPupu/shadowsocks,eehello/shadowsocks,iq72/shadowsocks,CountMurphy/shadowsocks,LeeRisk/shadowsocks,SwordYork/shadowsocks,thanatoskira/shadowsocks,otoil/shadowsocks,javenfeng/shadowsocks,mthli/shadowsocks_analysis,rocxer/shadowsocks,snowmap/shadowsocks,Linusp/shadowsocks,peterzky/shadowsocks,pfctgeorge/shadowsocks,lancetw/shadowsocks,csuideal/shadowsocks,nfjinjing/shadowsocks,fusijie/shadowsocks,suntopo/shadowsocks,Tonyce/shadowsocks,smartdj/shadowsocks,icetoggle/shadowsocks,funtion/shadowsocks-1,zxteloiv/shadowsocks,bingwen/shadowsocks,jiang42/shadowsocks,itplanes/shadowsocks,kid551/shadowsocks,dengit/shadowsocks,liuyix/shadowsocks,beizhiying7/shadowsocks,chenhao890411/shadowsocks,caoqitong826/shadowsocks,zhufenggood/shadowsocks,ianisme/shadowsocks,hashmaparraylist/shadowsocks,Anebrithien/ss-last,jiachenwang/shadowsocks,trotyl/shadowsocks,qinalei/shadowsocks2,Methol/shadowsocks,IndeedPlusPlus/shadowsocks,sunflyer/shadowsocks,messense/shadowsocks,jqk6/shadowsocks,lepture/shadowsocks,aisk/shadowsocks,AmberWhiteSky/shadowsocks,danny200309/shadowsocks,gahoo/shadowsocks,VinceZK/shadowsocks,gy6221/shadowsocks,lovely3x/shadowsocks,mike-BV/shadowsocks,lixingcong/shadowsocks,xjz19901211/shadowsocks,Wizmann/shadowsocks,weisk/shadowsocks,zzliujianbo/shadowsocks,DreaminginCodeZH/shadowsocks,zxteloiv/shadowsocks,Baoyx007/shadowsocks,licheedev/shadowsocks,xwv/shadowsocks,cynhard/shadowsocks,JianfuLi/shadowsocks,kookxiang/shadowsocks,smartdj/shadowsocks,wswplay/shadowsocks,yuxiang-zhou/shadowsocks,lexchou/shadowsocks,Minghi/shadowsocks,LeiZeng/shadowsocks,ahvonenj/shadowsocks,shuzi0/ssr,leotso/shadowsocks,codeworm96/shadowsocks,waytai/shadowsocks,haleylu/shadowsocks,JoeCao/shadowsocks,PaytonShaw/shadowsocks1,javenfeng/shadowsocks,test1845/shadowsocks,m1a0yu3/shadowsocks,congmomo/shadowsocks,yuchuanzhen/shadowsocks,zimhy/shadowsocks_analysis,ninjadq/shadowsocks,Parareru/shadowsocks,smpetrey/shadowsocks,jzmq/shadowsocks,Node-X/shadowsocks,skyline75489/shadowsocks,renyinew/shadowsocks,Coxxs/shadowsocks,lecason/shadowsocks,copooper/shadowsocks,bravo-t/shadowsocks,troyliu0105/shadowsocks,heatonnobu/shadowsocks,kigawas/shadowsocks,dayed/shadowsocks,hjz15k6/shadowsocks,flyingghost/shadowsocks,li4li5li6/shadowsocks-1,mthli/shadowsocks_analysis,kingname/shadowsocks,phisiart/shadowsocks,wumch/shadowsocks,wangbo5759/shadowsocks,9hao/shadowsocks,NichoZhang/shadowsocks,XakepSDK/shadowsocks,halfcrazy/shadowsocks,TigerHix/shadowsocks,codevlabs/shadowsocks,fdzh/shadowsocks,OrangeJuicePlease/shadowsocks,JLHwung/shadowsocks-org,chenrenyi/shadowsocks,hi54yt/shadowsocks,Long-live-shadowsocks/shadowsocks,Nymphet/shadowsocks,Lessica/shadowsocks,forfuns/shadowsocks | shadowsocks/encrypt_rc4_md5.py | shadowsocks/encrypt_rc4_md5.py | #!/usr/bin/env python
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import hashlib
def create_cipher(alg, key, iv, op, key_as_bytes=0, d=None, salt=None,
i=1, padding=1):
md5 = hashlib.md5()
md5.update(key)
md5.update(iv)
rc4_key = md5.digest()
import M2Crypto.EVP
return M2Crypto.EVP.Cipher('rc4', rc4_key, '', op, key_as_bytes=0,
d='md5', salt=None, i=1, padding=1)
| #!/usr/bin/env python
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import hashlib
def create_cipher(alg, key, iv, op, key_as_bytes=0, d=None, salt=None,
i=1, padding=1):
md5 = hashlib.md5()
md5.update(key)
md5.update(iv)
rc4_key = md5.digest()
print len(rc4_key)
import M2Crypto.EVP
return M2Crypto.EVP.Cipher('rc4', rc4_key, '', op, key_as_bytes=0,
d='md5', salt=None, i=1, padding=1)
| apache-2.0 | Python |
2a662d04caf68629fe357b45450e5be3f950f03e | remove typo | gopythongo/gopythongo,gopythongo/gopythongo | src/py/gopythongo/shared/aptly_base.py | src/py/gopythongo/shared/aptly_base.py | # -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from typing import Any, List
import configargparse
import gopythongo.shared.aptly_args as _aptly_args
from gopythongo.stores import BaseStore
from gopythongo.utils import ErrorMessage, highlight
from gopythongo.utils.debversion import DebianVersion
from gopythongo.versioners import BaseVersioner
class AptlyBaseVersioner(BaseVersioner):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def add_args(self, parser: configargparse.ArgumentParser) -> None:
_aptly_args.add_shared_args(parser)
def validate_args(self, args: configargparse.Namespace) -> None:
_aptly_args.validate_shared_args(args)
def query_repo_versions(self, query: str, args: configargparse.Namespace, *,
allow_fallback_version: bool=False) -> List[DebianVersion]:
raise NotImplementedError("Each subclass of AptlyBaseVersioner must implement query_repo_versions")
def read(self, args: configargparse.Namespace) -> str:
versions = self.query_repo_versions(args.aptly_query, args, allow_fallback_version=True)
if not versions:
raise ErrorMessage("The Aptly Versioner was unable to find a base version using the specified query '%s'. "
"If the query is correct, you should specify a fallback version using %s." %
(highlight(args.aptly_query), highlight("--fallback-version")))
return str(versions[-1])
class AptlyBaseStore(BaseStore):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def add_args(self, parser: configargparse.ArgumentParser) -> None:
_aptly_args.add_shared_args(parser)
def validate_args(self, args: configargparse.Namespace) -> None:
_aptly_args.validate_shared_args(args)
| # -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from typing import Any, List
import configargparse
import gopythongo.shared.aptly_args as _aptly_args
from gopythongo.stores import BaseStore
from gopythongo.utils import ErrorMessage, highlight
from gopythongo.utils.debversion import DebianVersion
from gopythongo.versioners import BaseVersioner
class AptlyBaseVersioner(BaseVersioner):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
def add_args(self, parser: configargparse.ArgumentParser) -> None:
_aptly_args.add_shared_args(parser)
def validate_args(self, args: configargparse.Namespace) -> None:
_aptly_args.validate_shared_args(args)
def query_repo_versions(self, query: str, args: configargparse.Namespace, *,
allow_fallback_version: bool=False) -> List[DebianVersion]:
raise NotImplementedError("Each subclass of AptlyBaseVersioner must implement query_repo_versions")
def read(self, args: configargparse.Namespace) -> str:
versions = self.query_repo_versions(args.aptly_query, args, allow_fallback_version=True)
if not versions:
raise ErrorMessage("The Aptly Versioner was unable to find a base version using the specified query '%s'. "
"If the query is correct, you should specify a fallback version using %s." %
(highlight(args.aptly_query), highlight("--fallback-version")))
return str(versions[-1])
class AptlyBaseStore(BaseStore):
def __init__(self, *args: Any, **kwargs: Any) -> None:
super()..__init__(*args, **kwargs)
def add_args(self, parser: configargparse.ArgumentParser) -> None:
_aptly_args.add_shared_args(parser)
def validate_args(self, args: configargparse.Namespace) -> None:
_aptly_args.validate_shared_args(args)
| mpl-2.0 | Python |
54e618db2ef5fd226c5514dd15d8f792fd0d8320 | test org role as child for team 400s | snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx | awx/main/tests/unit/api/test_views.py | awx/main/tests/unit/api/test_views.py | import mock
import pytest
from rest_framework.test import APIRequestFactory
from rest_framework.test import force_authenticate
from awx.api.views import (
ApiV1RootView,
TeamRolesList,
)
from awx.main.models import (
User,
)
@pytest.fixture
def mock_response_new(mocker):
m = mocker.patch('awx.api.views.Response.__new__')
m.return_value = m
return m
class TestApiV1RootView:
def test_get_endpoints(self, mocker, mock_response_new):
endpoints = [
'authtoken',
'ping',
'config',
#'settings',
'me',
'dashboard',
'organizations',
'users',
'projects',
'teams',
'credentials',
'inventory',
'inventory_scripts',
'inventory_sources',
'groups',
'hosts',
'job_templates',
'jobs',
'ad_hoc_commands',
'system_job_templates',
'system_jobs',
'schedules',
'notification_templates',
'notifications',
'labels',
'unified_job_templates',
'unified_jobs',
'activity_stream',
]
view = ApiV1RootView()
ret = view.get(mocker.MagicMock())
assert ret == mock_response_new
data_arg = mock_response_new.mock_calls[0][1][1]
for endpoint in endpoints:
assert endpoint in data_arg
@pytest.mark.parametrize("url", ["/team/1/roles", "/role/1/teams"])
def test_team_roles_list_post_org_roles(url):
with mock.patch('awx.api.views.Role.objects.get', create=True) as role_get, \
mock.patch('awx.api.views.ContentType.objects.get_for_model', create=True) as ct_get:
role_mock = mock.MagicMock()
role_mock.content_type = 1
role_get.return_value = role_mock
ct_get.return_value = 1
factory = APIRequestFactory()
view = TeamRolesList.as_view()
request = factory.post(url, {'id':1}, format="json")
force_authenticate(request, User(username="root", is_superuser=True))
response = view(request)
response.render()
assert response.status_code == 400
assert 'cannot assign' in response.content
| import pytest
from awx.api.views import (
ApiV1RootView,
)
@pytest.fixture
def mock_response_new(mocker):
m = mocker.patch('awx.api.views.Response.__new__')
m.return_value = m
return m
class TestApiV1RootView:
def test_get_endpoints(self, mocker, mock_response_new):
endpoints = [
'authtoken',
'ping',
'config',
#'settings',
'me',
'dashboard',
'organizations',
'users',
'projects',
'teams',
'credentials',
'inventory',
'inventory_scripts',
'inventory_sources',
'groups',
'hosts',
'job_templates',
'jobs',
'ad_hoc_commands',
'system_job_templates',
'system_jobs',
'schedules',
'notification_templates',
'notifications',
'labels',
'unified_job_templates',
'unified_jobs',
'activity_stream',
]
view = ApiV1RootView()
ret = view.get(mocker.MagicMock())
assert ret == mock_response_new
data_arg = mock_response_new.mock_calls[0][1][1]
for endpoint in endpoints:
assert endpoint in data_arg
| apache-2.0 | Python |
26526e6860ad28588c77cd9c9ea376a636da53e8 | debug for method name. | rockers7414/xmusic-crawler,rockers7414/xmusic | daemon/database/artistrepo.py | daemon/database/artistrepo.py | import logging
from decorator.injectdbsession import inject_db_session
from .entity import Artist
from sqlalchemy.orm import lazyload
@inject_db_session()
class ArtistRepo:
logger = logging.getLogger(__name__)
def get_artists_by_page(self, index, offset):
query = self._session.query(Artist).options(lazyload("albums")).order_by(
Artist.name).limit(offset).offset((index - 1) * offset)
return query.all()
def get_artists_list(self):
query = self._session.query(Artist).options(
lazyload("albums")).order_by(Artist.name)
return query.all()
def get_artist(self, artist_name):
query = self._session.query(Artist).options(lazyload("albums")).filter(
Artist.name == artist_name)
return query.all()
def save(self, artist):
try:
self._session.add(artist)
self._session.flush()
self._session.commit()
except:
self._session.rollback()
raise
| import logging
from decorator.injectdbsession import inject_db_session
from .entity import Artist
from sqlalchemy.orm import lazyload
@inject_db_session()
class ArtistRepo:
logger = logging.getLogger(__name__)
def get_artists_by_pagecpp(self, index, offset):
query = self._session.query(Artist).options(lazyload("albums")).order_by(
Artist.name).limit(offset).offset((index - 1) * offset)
return query.all()
def get_artists_list(self):
query = self._session.query(Artist).options(
lazyload("albums")).order_by(Artist.name)
return query.all()
def get_artist(self, artist_name):
query = self._session.query(Artist).options(lazyload("albums")).filter(
Artist.name == artist_name)
return query.all()
def save(self, artist):
try:
self._session.add(artist)
self._session.flush()
self._session.commit()
except:
self._session.rollback()
raise
| apache-2.0 | Python |
ae1642842fb945013741d576022c85eba49033d8 | update Croydon import script for parl.2017-06-08 | DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_croydon.py | polling_stations/apps/data_collection/management/commands/import_croydon.py | from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter
class Command(BaseXpressDCCsvInconsistentPostcodesImporter):
council_id = 'E09000008'
addresses_name = 'parl.2017-06-08/Version 2/Democracy_Club__08June2017 (16).tsv'
stations_name = 'parl.2017-06-08/Version 2/Democracy_Club__08June2017 (16).tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate point
remove it and fall back to geocoding
"""
if record.polling_place_id == '8700':
record = record._replace(polling_place_easting = '0')
record = record._replace(polling_place_northing = '0')
return super().station_record_to_dict(record)
| """
Import Harrow
"""
from time import sleep
from django.contrib.gis.geos import Point
from data_collection.management.commands import BaseCsvStationsCsvAddressesImporter
from data_finder.helpers import geocode, geocode_point_only, PostcodeError
from addressbase.models import Address
class Command(BaseCsvStationsCsvAddressesImporter):
"""
Imports the Polling Station data from Harrow Council
"""
private = True
council_id = 'E09000015'
addresses_name = 'addresses.csv'
stations_name = 'stations.csv'
csv_delimiter = ','
elections = [
'ref.2016-06-23'
]
def station_record_to_dict(self, record):
address = record.situation_of_polling_station
while "\n\n" in address:
address = address.replace("\n\n", "\n").strip()
location = None
location_data = None
if record.postcode_if_available:
try:
postcode = record.postcode_if_available.strip()
postcode = postcode.replace('\n', '')
if len(postcode) > 5:
location_data = geocode_point_only(postcode)
except PostcodeError:
pass
if location_data:
location = Point(
location_data['wgs84_lon'],
location_data['wgs84_lat'],
srid=4326)
desc = record.description_of_persons_entitled_to_vote
district = desc.split('-')[0].strip()
return {
'internal_council_id': district,
'polling_district_id': district,
'postcode' : None,
'address' : address,
'location' : location
}
def address_record_to_dict(self, record):
address = record.address
return {
'address' : address,
'postcode' : record.postcode.strip(),
'polling_station_id': record.district
}
| bsd-3-clause | Python |
42d5553878673dcbc45e7146164c86f418312152 | Add models to Admin | Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters | tools/admin.py | tools/admin.py | from django.contrib import admin
from .models import Character,CharacterNotes,CharacterNotesAnswer
# Register your models here.
@admin.register(Character)
class CharacterAdmin(admin.ModelAdmin):
list_display = ('name','owner',)
readonly_fields = ('owner',)
@admin.register(CharacterNotes)
class CharacterNotesAdmin(admin.ModelAdmin):
list_display = ('question','is_active',)
@admin.register(CharacterNotesAnswer)
class CharacterNotesAnswerAdmin(admin.ModelAdmin):
list_display = ('character','question','answer',)
readonly_fields = ('character','question','answer',)
| from django.contrib import admin
# Register your models here.
| mit | Python |
29b8b4604460dc54331f0f10a93a6bb6803b3af3 | fix example of plot_RGB_colourspaces_gamuts | colour-science/colour | colour/examples/plotting/examples_volume_plots.py | colour/examples/plotting/examples_volume_plots.py | # -*- coding: utf-8 -*-
"""
Showcases colour models volume and gamut plotting examples.
"""
import numpy as np
from colour.plotting import (plot_RGB_colourspaces_gamuts, plot_RGB_scatter,
colour_style)
from colour.utilities import message_box
message_box('Colour Models Volume and Gamut Plots')
colour_style()
message_box(('Plotting "ITU-R BT.709" RGB colourspace volume in "CIE xyY" '
'colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', ), reference_colourspace='CIE xyY')
print('\n')
message_box(('Comparing "ITU-R BT.709" and "ACEScg" RGB colourspaces volume '
'in "CIE L*a*b*" colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', 'ACEScg'),
reference_colourspace='CIE Lab',
face_colours=(None, (0.25, 0.25, 0.25)),
edge_colours=(None, (0.25, 0.25, 0.25)),
edge_alpha=(1.0, 0.1),
face_alpha=(1.0, 0.0))
print('\n')
message_box(('Plotting "ACEScg" colourspaces values in "CIE L*a*b*" '
'colourspace.'))
RGB = np.random.random((32, 32, 3))
plot_RGB_scatter(
RGB,
'ACEScg',
reference_colourspace='CIE Lab',
colourspaces=('ACEScg', 'ITU-R BT.709'),
face_colours=((0.25, 0.25, 0.25), None),
edge_colours=((0.25, 0.25, 0.25), None),
edge_alpha=(0.1, 0.5),
face_alpha=(0.1, 0.5),
grid_face_colours=(0.1, 0.1, 0.1),
grid_edge_colours=(0.1, 0.1, 0.1),
grid_edge_alpha=0.5,
grid_face_alpha=0.1)
| # -*- coding: utf-8 -*-
"""
Showcases colour models volume and gamut plotting examples.
"""
import numpy as np
from colour.plotting import (plot_RGB_colourspaces_gamuts, plot_RGB_scatter,
colour_style)
from colour.utilities import message_box
message_box('Colour Models Volume and Gamut Plots')
colour_style()
message_box(('Plotting "ITU-R BT.709" RGB colourspace volume in "CIE xyY" '
'colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', ), reference_colourspace='CIE xyY')
print('\n')
message_box(('Comparing "ITU-R BT.709" and "ACEScg" RGB colourspaces volume '
'in "CIE L*a*b*" colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', 'ACEScg'),
reference_colourspace='CIE Lab',
style={
'face_colours': (None, (0.25, 0.25, 0.25)),
'edge_colours': (None, (0.25, 0.25, 0.25)),
'edge_alpha': (1.0, 0.1),
'face_alpha': (1.0, 0.0)
})
print('\n')
message_box(('Plotting "ACEScg" colourspaces values in "CIE L*a*b*" '
'colourspace.'))
RGB = np.random.random((32, 32, 3))
plot_RGB_scatter(
RGB,
'ACEScg',
reference_colourspace='CIE Lab',
colourspaces=('ACEScg', 'ITU-R BT.709'),
face_colours=((0.25, 0.25, 0.25), None),
edge_colours=((0.25, 0.25, 0.25), None),
edge_alpha=(0.1, 0.5),
face_alpha=(0.1, 0.5),
grid_face_colours=(0.1, 0.1, 0.1),
grid_edge_colours=(0.1, 0.1, 0.1),
grid_edge_alpha=0.5,
grid_face_alpha=0.1)
| bsd-3-clause | Python |
11f933e986dd9e2c62b852ca38a37f959c10145e | Fix FindDepotToolsInPath not working in some cases | shaochangbin/crosswalk,rakuco/crosswalk,RafuCater/crosswalk,DonnaWuDongxia/crosswalk,tomatell/crosswalk,mrunalk/crosswalk,jpike88/crosswalk,dreamsxin/crosswalk,stonegithubs/crosswalk,weiyirong/crosswalk-1,siovene/crosswalk,baleboy/crosswalk,crosswalk-project/crosswalk,fujunwei/crosswalk,ZhengXinCN/crosswalk,minggangw/crosswalk,axinging/crosswalk,RafuCater/crosswalk,darktears/crosswalk,qjia7/crosswalk,Shouqun/crosswalk,heke123/crosswalk,axinging/crosswalk,marcuspridham/crosswalk,marcuspridham/crosswalk,axinging/crosswalk,baleboy/crosswalk,marcuspridham/crosswalk,XiaosongWei/crosswalk,PeterWangIntel/crosswalk,dreamsxin/crosswalk,rakuco/crosswalk,xzhan96/crosswalk,baleboy/crosswalk,ZhengXinCN/crosswalk,baleboy/crosswalk,mrunalk/crosswalk,jondong/crosswalk,pozdnyakov/crosswalk,bestwpw/crosswalk,seanlong/crosswalk,huningxin/crosswalk,marcuspridham/crosswalk,minggangw/crosswalk,Pluto-tv/crosswalk,mrunalk/crosswalk,seanlong/crosswalk,amaniak/crosswalk,bestwpw/crosswalk,lincsoon/crosswalk,mrunalk/crosswalk,Shouqun/crosswalk,minggangw/crosswalk,fujunwei/crosswalk,lincsoon/crosswalk,lincsoon/crosswalk,seanlong/crosswalk,fujunwei/crosswalk,zeropool/crosswalk,seanlong/crosswalk,XiaosongWei/crosswalk,heke123/crosswalk,jpike88/crosswalk,stonegithubs/crosswalk,kurli/crosswalk,baleboy/crosswalk,amaniak/crosswalk,weiyirong/crosswalk-1,rakuco/crosswalk,siovene/crosswalk,jondwillis/crosswalk,xzhan96/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,rakuco/crosswalk,hgl888/crosswalk-efl,chinakids/crosswalk,jpike88/crosswalk,DonnaWuDongxia/crosswalk,PeterWangIntel/crosswalk,darktears/crosswalk,qjia7/crosswalk,jondwillis/crosswalk,Bysmyyr/crosswalk,shaochangbin/crosswalk,PeterWangIntel/crosswalk,tedshroyer/crosswalk,chinakids/crosswalk,chuan9/crosswalk,alex-zhang/crosswalk,crosswalk-project/crosswalk-efl,dreamsxin/crosswalk,fujunwei/crosswalk,zliang7/crosswalk,crosswalk-project/crosswalk,fujunwei/crosswalk,wuhengzhi/crosswalk,hgl888/crosswalk-efl,baleboy/crosswalk,weiyirong/crosswalk-1,chuan9/crosswalk,Bysmyyr/crosswalk,qjia7/crosswalk,pk-sam/crosswalk,axinging/crosswalk,huningxin/crosswalk,stonegithubs/crosswalk,tedshroyer/crosswalk,darktears/crosswalk,amaniak/crosswalk,myroot/crosswalk,tomatell/crosswalk,myroot/crosswalk,heke123/crosswalk,Pluto-tv/crosswalk,leonhsl/crosswalk,TheDirtyCalvinist/spacewalk,minggangw/crosswalk,zliang7/crosswalk,jpike88/crosswalk,hgl888/crosswalk-efl,PeterWangIntel/crosswalk,mrunalk/crosswalk,zeropool/crosswalk,heke123/crosswalk,Shouqun/crosswalk,pk-sam/crosswalk,weiyirong/crosswalk-1,baleboy/crosswalk,rakuco/crosswalk,crosswalk-project/crosswalk,zeropool/crosswalk,stonegithubs/crosswalk,jondong/crosswalk,seanlong/crosswalk,chuan9/crosswalk,dreamsxin/crosswalk,kurli/crosswalk,PeterWangIntel/crosswalk,pk-sam/crosswalk,tomatell/crosswalk,pk-sam/crosswalk,pozdnyakov/crosswalk,pk-sam/crosswalk,myroot/crosswalk,xzhan96/crosswalk,shaochangbin/crosswalk,Shouqun/crosswalk,jondwillis/crosswalk,RafuCater/crosswalk,huningxin/crosswalk,hgl888/crosswalk,kurli/crosswalk,XiaosongWei/crosswalk,hgl888/crosswalk,rakuco/crosswalk,shaochangbin/crosswalk,tedshroyer/crosswalk,dreamsxin/crosswalk,amaniak/crosswalk,crosswalk-project/crosswalk,DonnaWuDongxia/crosswalk,zeropool/crosswalk,kurli/crosswalk,siovene/crosswalk,darktears/crosswalk,zliang7/crosswalk,xzhan96/crosswalk,alex-zhang/crosswalk,tomatell/crosswalk,huningxin/crosswalk,TheDirtyCalvinist/spacewalk,xzhan96/crosswalk,XiaosongWei/crosswalk,marcuspridham/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,bestwpw/crosswalk,weiyirong/crosswalk-1,alex-zhang/crosswalk,Bysmyyr/crosswalk,zeropool/crosswalk,chinakids/crosswalk,RafuCater/crosswalk,siovene/crosswalk,XiaosongWei/crosswalk,Pluto-tv/crosswalk,wuhengzhi/crosswalk,wuhengzhi/crosswalk,weiyirong/crosswalk-1,dreamsxin/crosswalk,huningxin/crosswalk,bestwpw/crosswalk,RafuCater/crosswalk,wuhengzhi/crosswalk,Bysmyyr/crosswalk,wuhengzhi/crosswalk,fujunwei/crosswalk,chuan9/crosswalk,baleboy/crosswalk,Bysmyyr/crosswalk,TheDirtyCalvinist/spacewalk,tomatell/crosswalk,pk-sam/crosswalk,weiyirong/crosswalk-1,zliang7/crosswalk,stonegithubs/crosswalk,hgl888/crosswalk,pozdnyakov/crosswalk,xzhan96/crosswalk,minggangw/crosswalk,amaniak/crosswalk,siovene/crosswalk,xzhan96/crosswalk,tomatell/crosswalk,minggangw/crosswalk,lincsoon/crosswalk,Bysmyyr/crosswalk,leonhsl/crosswalk,zliang7/crosswalk,chuan9/crosswalk,heke123/crosswalk,Pluto-tv/crosswalk,fujunwei/crosswalk,lincsoon/crosswalk,RafuCater/crosswalk,tedshroyer/crosswalk,jpike88/crosswalk,bestwpw/crosswalk,leonhsl/crosswalk,qjia7/crosswalk,Pluto-tv/crosswalk,amaniak/crosswalk,hgl888/crosswalk,siovene/crosswalk,ZhengXinCN/crosswalk,ZhengXinCN/crosswalk,ZhengXinCN/crosswalk,stonegithubs/crosswalk,bestwpw/crosswalk,crosswalk-project/crosswalk,tomatell/crosswalk,Shouqun/crosswalk,jondong/crosswalk,rakuco/crosswalk,leonhsl/crosswalk,zeropool/crosswalk,marcuspridham/crosswalk,minggangw/crosswalk,jondwillis/crosswalk,axinging/crosswalk,qjia7/crosswalk,crosswalk-project/crosswalk,alex-zhang/crosswalk,jondwillis/crosswalk,chinakids/crosswalk,kurli/crosswalk,tedshroyer/crosswalk,marcuspridham/crosswalk,TheDirtyCalvinist/spacewalk,hgl888/crosswalk-efl,Pluto-tv/crosswalk,seanlong/crosswalk,crosswalk-project/crosswalk-efl,dreamsxin/crosswalk,jondong/crosswalk,alex-zhang/crosswalk,darktears/crosswalk,DonnaWuDongxia/crosswalk,PeterWangIntel/crosswalk,TheDirtyCalvinist/spacewalk,zeropool/crosswalk,Pluto-tv/crosswalk,zliang7/crosswalk,ZhengXinCN/crosswalk,DonnaWuDongxia/crosswalk,myroot/crosswalk,bestwpw/crosswalk,zliang7/crosswalk,hgl888/crosswalk,jondong/crosswalk,heke123/crosswalk,crosswalk-project/crosswalk,jpike88/crosswalk,DonnaWuDongxia/crosswalk,marcuspridham/crosswalk,heke123/crosswalk,shaochangbin/crosswalk,hgl888/crosswalk,axinging/crosswalk,ZhengXinCN/crosswalk,crosswalk-project/crosswalk,tedshroyer/crosswalk,mrunalk/crosswalk,siovene/crosswalk,amaniak/crosswalk,xzhan96/crosswalk,kurli/crosswalk,crosswalk-project/crosswalk-efl,jondong/crosswalk,hgl888/crosswalk-efl,pozdnyakov/crosswalk,crosswalk-project/crosswalk-efl,huningxin/crosswalk,crosswalk-project/crosswalk-efl,darktears/crosswalk,chinakids/crosswalk,crosswalk-project/crosswalk-efl,qjia7/crosswalk,rakuco/crosswalk,heke123/crosswalk,axinging/crosswalk,leonhsl/crosswalk,alex-zhang/crosswalk,jondong/crosswalk,hgl888/crosswalk,tedshroyer/crosswalk,XiaosongWei/crosswalk,lincsoon/crosswalk,wuhengzhi/crosswalk,alex-zhang/crosswalk,PeterWangIntel/crosswalk,pozdnyakov/crosswalk,pozdnyakov/crosswalk,minggangw/crosswalk,leonhsl/crosswalk,hgl888/crosswalk,DonnaWuDongxia/crosswalk,crosswalk-project/crosswalk-efl,myroot/crosswalk,chuan9/crosswalk,stonegithubs/crosswalk,shaochangbin/crosswalk,chuan9/crosswalk,XiaosongWei/crosswalk,zliang7/crosswalk,hgl888/crosswalk-efl,RafuCater/crosswalk,hgl888/crosswalk-efl,darktears/crosswalk,jpike88/crosswalk,TheDirtyCalvinist/spacewalk,myroot/crosswalk,Shouqun/crosswalk,chinakids/crosswalk,darktears/crosswalk,jondong/crosswalk,jondwillis/crosswalk,pk-sam/crosswalk,leonhsl/crosswalk,jondwillis/crosswalk | tools/utils.py | tools/utils.py | #!/usr/bin/env python
''' This script provides utils for python scripts in cameo.
'''
import os
import sys
import subprocess
def TryAddDepotToolsToPythonPath():
depot_tools = FindDepotToolsInPath()
if depot_tools:
sys.path.append(depot_tools)
def FindDepotToolsInPath():
paths = os.getenv('PATH').split(os.path.pathsep)
for path in paths:
if os.path.basename(path) == '':
# path is end with os.path.pathsep
path = os.path.dirname(path)
if os.path.basename(path) == 'depot_tools':
return path
return None
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def GitExe():
if IsWindows():
return 'git.bat'
else:
return 'git'
def GetCommandOutput(command, cwd=None):
proc = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, bufsize=1,
cwd=cwd)
output = proc.communicate()[0]
result = proc.returncode
if result:
raise Exception('%s: %s' % (subprocess.list2cmdline(command), output))
return output
| #!/usr/bin/env python
''' This script provides utils for python scripts in cameo.
'''
import os
import sys
import subprocess
def TryAddDepotToolsToPythonPath():
depot_tools = FindDepotToolsInPath()
if depot_tools:
sys.path.append(depot_tools)
def FindDepotToolsInPath():
paths = os.getenv('PATH').split(os.path.pathsep)
for path in paths:
if os.path.basename(path) == 'depot_tools':
return path
return None
def IsWindows():
return sys.platform == 'cygwin' or sys.platform.startswith('win')
def IsLinux():
return sys.platform.startswith('linux')
def IsMac():
return sys.platform.startswith('darwin')
def GitExe():
if IsWindows():
return 'git.bat'
else:
return 'git'
def GetCommandOutput(command, cwd=None):
proc = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, bufsize=1,
cwd=cwd)
output = proc.communicate()[0]
result = proc.returncode
if result:
raise Exception('%s: %s' % (subprocess.list2cmdline(command), output))
return output
| bsd-3-clause | Python |
2fe47e3b26b3fc888de4dba004cb04d31a2eb97d | Remove unnecessary getter | furbrain/tingbot-python | tingbot/platform_specific/sdl_wrapper.py | tingbot/platform_specific/sdl_wrapper.py | import pygame
import os
from ..graphics import Surface,color_map
button_callback = None
class Wrapper(Surface):
def __init__(self):
self.surface = pygame.display.set_mode((470, 353))
background = pygame.image.load(os.path.join(os.path.dirname(__file__), 'bot.png'))
self.surface.blit(background,(0,0))
self.screen = self.surface.subsurface((60,40,320,240))
xPositions = (60, 100, 320, 360)
self.buttons = []
for x in range(4):
self.buttons.append(Button(self.surface.subsurface(xPositions[x],0,22,12),x))
class Button(Surface):
def __init__(self,surface,number):
from ..input import hit_areas,HitArea
self.number = number
surface.fill(color_map['white'])
#register our button as something clickable
self.surface = surface
hit_areas.append(HitArea(pygame.Rect(surface.get_abs_offset(),surface.get_size()),self.click))
def click(self,xy,action):
if action=='down':
(w,h) = self.surface.get_size()
self.surface.fill((0,0,0,0),(0,0,w,h*0.2))
self.surface.fill(color_map['white'],(0,h*0.2,w,h))
if button_callback:
button_callback(self.number,'down')
elif action=='up':
self.surface.fill(color_map['white'])
if button_callback:
button_callback(self.number,'up')
def fixup_window():
pygame.init()
wrapper = Wrapper()
return wrapper.screen
def fixup_env():
pass
def register_button_callback(callback):
'''
callback(button_index, action)
button_index is a zero-based index that identifies which button has been pressed
action is either 'down', or 'up'.
The callback might not be called on the main thread.
Currently only 'down' is implemented.
'''
global button_callback
button_callback = callback
| import pygame
import os
from ..graphics import Surface,color_map
button_callback = None
class Wrapper(Surface):
def __init__(self):
self.surface = pygame.display.set_mode((470, 353))
background = pygame.image.load(os.path.join(os.path.dirname(__file__), 'bot.png'))
self.surface.blit(background,(0,0))
self.screen = self.surface.subsurface((60,40,320,240))
xPositions = (60, 100, 320, 360)
self.buttons = []
for x in range(4):
self.buttons.append(Button(self.surface.subsurface(xPositions[x],0,22,12),x))
def get_screen(self):
return self.screen
class Button(Surface):
def __init__(self,surface,number):
from ..input import hit_areas,HitArea
self.number = number
surface.fill(color_map['white'])
#register our button as something clickable
self.surface = surface
hit_areas.append(HitArea(pygame.Rect(surface.get_abs_offset(),surface.get_size()),self.click))
def click(self,xy,action):
if action=='down':
(w,h) = self.surface.get_size()
self.surface.fill((0,0,0,0),(0,0,w,h*0.2))
self.surface.fill(color_map['white'],(0,h*0.2,w,h))
if button_callback:
button_callback(self.number,'down')
elif action=='up':
self.surface.fill(color_map['white'])
if button_callback:
button_callback(self.number,'up')
def fixup_window():
pygame.init()
wrapper = Wrapper()
return wrapper.get_screen()
def fixup_env():
pass
def register_button_callback(callback):
'''
callback(button_index, action)
button_index is a zero-based index that identifies which button has been pressed
action is either 'down', or 'up'.
The callback might not be called on the main thread.
Currently only 'down' is implemented.
'''
global button_callback
button_callback = callback
| bsd-2-clause | Python |
caf4e1f1e6ff9eac2750920fc4c5b5596b97c3a6 | Update Input_csv_file.py | MounikaVanka/bme590hrm,MounikaVanka/bme590hrm | Code/Input_csv_file.py | Code/Input_csv_file.py |
def read_in():
import csv
import numpy
"""
Opens the ecg CSV file
:param readCSV: pointer to the file
:param times: the time from the signal
:param Voltage: the voltage from the signal
"""
# with open('ecg_data.csv') as csvfile:
# read_csv = csv.reader(csvfile, delimiter=',')
# header_line = next(read_csv)
#
# time = numpy.array([])
# voltage = numpy.array([])
#
#
# for row in read_csv:
# time1 = row[0]
# voltage1 = row[1]
#
# # time = numpy.append(time, time1)
# # voltage = numpy.append(voltage, voltage1)
dat = numpy.genfromtxt('ecg_data.csv', delimiter=',', skip_header=1, )
time = dat[:, 0]
voltage = dat[:, 1]
# for element in time:
# #parts=element.split(',')
# time = tim.astype('Float64')
# time.astype()
#
# #for element in voltage:
# #parts1=element.split(',')
# voltage = voltage.astype('Float64')
return time, voltage
|
def read_in():
import csv
import numpy
"""
Opens the ecg CSV file
:param readCSV: pointer to the file
:param times: the time from the signal
:param Voltage: the voltage from the signal
"""
# with open('ecg_data.csv') as csvfile:
# read_csv = csv.reader(csvfile, delimiter=',')
# header_line = next(read_csv)
#
# time = numpy.array([])
# voltage = numpy.array([])
#
#
# for row in read_csv:
# time1 = row[0]
# voltage1 = row[1]
#
# # time = numpy.append(time, time1)
# # voltage = numpy.append(voltage, voltage1)
dat = numpy.genfromtxt('ecg_data.csv', delimiter=',', skip_header=1, )
time = dat[:, 0]
voltage = dat[:, 1]
# for element in time:
# #parts=element.split(',')
# time = tim.astype('Float64')
# time.astype()
#
# #for element in voltage:
# #parts1=element.split(',')
# voltage = voltage.astype('Float64')
return time, voltage
| mit | Python |
079c86d124961242b89b692d5e57964babf8c45e | Add initial support for the --quiet option. Remove unused imports. Fix arguments passed to Qt : only the remaining arguments should be passed. | thomasdeniau/pyfauxfur,thomasdeniau/pyfauxfur | PyMorphogenesis.py | PyMorphogenesis.py | #!/usr/bin/env python
# encoding: utf-8
"""
PyMorphogenesis.py
Created by Olivier Le Floch on 2009-03-17.
Program written by Thomas Deniau and Olivier Le Floch.
Copyright (c) 2009. All rights reserved.
"""
import sys
from optparse import OptionParser
from PyQt4 import QtCore, QtGui
from MainWindow import MainWindow
from Controller import Controller
program = 'PyMorphogenesis'
version = 'Version 0.1, written by Thomas Deniau and Olivier Le Floch (c) 2009'
def main(argv=None):
if argv is None:
argv = sys.argv
parser = OptionParser()
parser.add_option(
'-V', '--version', dest='version', default=False,
action="store_true",
help="show version information and exit")
parser.add_option(
'-q', '--quiet', dest='quiet', default=False, action='store_true',
help='be quiet when running')
parser.add_option(
'-s', dest='D_s', type="float", default=0.04,
help="reaction rate parameter [default: %default]", metavar='RATE')
parser.add_option(
'-a', dest='D_a', type="float", default=0.25,
help="diffusion rate parameter for a [default: %default]", metavar='RATE')
parser.add_option(
'-b', dest='D_b', type="float", default=0.0625,
help="diffusion rate parameter for b [default: %default]", metavar='RATE')
parser.add_option(
'-d', dest='beta_i', type="float", default=12,
help="decay rate for b [default: %default]", metavar='DECAY')
parser.add_option(
'-x', '--width', dest='width', type="int", default=20,
help="width of the generated texture [default: %default]", metavar='WIDTH')
parser.add_option(
'-y', '--height', dest='height', type="int", default=20,
help="height of the generated texture [default: %default]", metavar='HEIGHT')
(options, args) = parser.parse_args()
if options.version:
print program + ', ' + version
quit()
app = QtGui.QApplication(args)
window = MainWindow()
controller = Controller(window)
controller.awake()
controller.setOptions(options)
window.show()
app.exec_()
if __name__ == "__main__":
import psyco
psyco.full()
sys.exit(main())
| #!/usr/bin/env python
# encoding: utf-8
"""
PyMorphogenesis.py
Created by Olivier Le Floch on 2009-03-17.
Program written by Thomas Deniau and Olivier Le Floch.
Copyright (c) 2009. All rights reserved.
"""
import sys
import math
from optparse import OptionParser
from PyQt4 import QtCore, QtGui
from MainWindow import MainWindow
from Controller import Controller
from MorphogenesisImageData import MorphogenesisImageData
program = 'PyMorphogenesis'
version = 'Version 0.1, written by Thomas Deniau and Olivier Le Floch (c) 2009'
def main(argv=None):
if argv is None:
argv = sys.argv
parser = OptionParser()
parser.add_option(
'-V', '--version', dest='version', default=False,
action="store_true",
help="show version information and exit", metavar='FILE')
parser.add_option(
'-s', dest='D_s', type="float", default=0.04,
help="reaction rate parameter [default: %default]", metavar='RATE')
parser.add_option(
'-a', dest='D_a', type="float", default=0.25,
help="diffusion rate parameter for a [default: %default]", metavar='RATE')
parser.add_option(
'-b', dest='D_b', type="float", default=0.0625,
help="diffusion rate parameter for b [default: %default]", metavar='RATE')
parser.add_option(
'-d', dest='beta_i', type="float", default=12,
help="decay rate for b [default: %default]", metavar='DECAY')
parser.add_option(
'-x', '--width', dest='width', type="int", default=20,
help="width of the generated texture [default: %default]", metavar='WIDTH')
parser.add_option(
'-y', '--height', dest='height', type="int", default=20,
help="height of the generated texture [default: %default]", metavar='HEIGHT')
(options, args) = parser.parse_args()
if options.version:
print program + ', ' + version
quit()
app = QtGui.QApplication(sys.argv)
window = MainWindow()
controller = Controller(window)
controller.awake()
controller.setOptions(options)
window.show()
app.exec_()
if __name__ == "__main__":
import psyco
psyco.full()
sys.exit(main())
| bsd-3-clause | Python |
11f3320413b9746d75470e623809e5e28ebbc712 | Fix python run_webkit_tests wrapper script to work correctly regardless of whether you're running cygwin python or win32 python and regardless of where you're invoking the script from. | wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser | webkit/tools/layout_tests/run_webkit_tests.py | webkit/tools/layout_tests/run_webkit_tests.py | #!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/WebKitTools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "WebKitTools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/WebKitTools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = sys.argv[0];
if sys.platform != 'cygwin':
src_dir = os.path.abspath(sys.argv[0])
print src_dir
src_dir=os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.dirname(src_dir)))))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "WebKitTools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
1086652e37d8f0d23199e9d2f7ed0be51e17c905 | change the update time | elixirhub/events-portal-scraping-scripts | ScheduleAddData.py | ScheduleAddData.py | __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import EventsPortal
import sys
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
# logger.info('***Starting update every hour***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.addDataToSolrFromUrl, 'interval', minutes= 60, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
scheduleUpdateSolr("http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents-portal.org/events",
"139.162.217.53:8983/solr/eventsportal/"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| __author__ = 'chuqiao'
from apscheduler.schedulers.blocking import BlockingScheduler
import EventsPortal
import sys
def scheduleUpdateSolr(sourceUrl,patternUrl,solrUrl):
"""
"""
# logger.info('***Starting update every hour***')
sched = BlockingScheduler()
sched.add_job(EventsPortal.addDataToSolrFromUrl, 'interval', minutes= 1, args=[sourceUrl,patternUrl,solrUrl])
sched.start()
try:
# Keeps the main thread alive.
while True:
time.sleep(20)
except (KeyboardInterrupt, SystemExit):
pass
if __name__ == '__main__':
scheduleUpdateSolr("http://bioevents-portal.org/eventsfull/test?state=published&field_type_tid=All",
"http://bioevents-portal.org/events",
"139.162.217.53:8983/solr/eventsportal/"
)
# scheduleUpdateSolr(sys.argv[1],sys.argv[2])
| mit | Python |
6522a63d7431e5cddc59a8b04b32c3c3cdcb8352 | disable a test on skipif_circleci | sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs,sdpython/ensae_teaching_cs | _unittests/ut_dnotebooks/test_nb_coverage_2018_2019.py | _unittests/ut_dnotebooks/test_nb_coverage_2018_2019.py | # -*- coding: utf-8 -*-
"""
@brief test log(time=88s)
"""
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.ipythonhelper import test_notebook_execution_coverage
from pyquickhelper.pycode import (
add_missing_development_version, ExtTestCase,
skipif_circleci)
import ensae_teaching_cs
class TestNotebookCov_2018_2019(ExtTestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "jyquickhelper"],
__file__, hide=True)
@skipif_circleci("stuck")
def test_notebook_2018_2019(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
def valid(cell):
if "nuplet[1] = 5" in cell:
return False
if "dico[0] ##" in cell:
return False
if "dico[ [4,6] ] = 6" in cell:
return False
return True
self.assertTrue(ensae_teaching_cs is not None)
folder = os.path.join(os.path.dirname(__file__),
"..", "..", "_doc", "notebooks", "notebook_eleves", "2018-2019")
test_notebook_execution_coverage(__file__, "", folder, valid=valid,
this_module_name="ensae_teaching_cs", fLOG=fLOG,
copy_files=['titanic.csv/titanic.csv'])
if __name__ == "__main__":
unittest.main()
| # -*- coding: utf-8 -*-
"""
@brief test log(time=88s)
"""
import os
import unittest
from pyquickhelper.loghelper import fLOG
from pyquickhelper.ipythonhelper import test_notebook_execution_coverage
from pyquickhelper.pycode import add_missing_development_version, ExtTestCase
import ensae_teaching_cs
class TestNotebookCov_2018_2019(ExtTestCase):
def setUp(self):
add_missing_development_version(["pymyinstall", "pyensae", "jyquickhelper"],
__file__, hide=True)
def test_notebook_2018_2019(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
def valid(cell):
if "nuplet[1] = 5" in cell:
return False
if "dico[0] ##" in cell:
return False
if "dico[ [4,6] ] = 6" in cell:
return False
return True
self.assertTrue(ensae_teaching_cs is not None)
folder = os.path.join(os.path.dirname(__file__),
"..", "..", "_doc", "notebooks", "notebook_eleves", "2018-2019")
test_notebook_execution_coverage(__file__, "", folder, valid=valid,
this_module_name="ensae_teaching_cs", fLOG=fLOG,
copy_files=['titanic.csv/titanic.csv'])
if __name__ == "__main__":
unittest.main()
| mit | Python |
81229e13aa3c9c8ef277bfb24a615c367164545c | Add multi cursor support, close #21 | johyphenel/sublime-expand-region,johyphenel/sublime-expand-region,aronwoost/sublime-expand-region | ExpandRegion.py | ExpandRegion.py | import sublime, sublime_plugin, os
try:
import expand_region_handler
except:
from . import expand_region_handler
class ExpandRegionCommand(sublime_plugin.TextCommand):
def run(self, edit, debug=False):
extension = ""
if (self.view.file_name()):
name, fileex = os.path.splitext(self.view.file_name())
extension = fileex[1:]
for region in self.view.sel():
string = self.view.substr(sublime.Region(0, self.view.size()))
start = region.begin()
end = region.end()
result = expand_region_handler.expand(string, start, end, extension)
if result:
self.view.sel().add(sublime.Region(result["start"], result["end"]))
if debug:
print("startIndex: {0}, endIndex: {1}, type: {2}".format(result["start"], result["end"], result["type"])) | import sublime, sublime_plugin, os
try:
import expand_region_handler
except:
from . import expand_region_handler
class ExpandRegionCommand(sublime_plugin.TextCommand):
def run(self, edit, debug=False):
extension = ""
if (self.view.file_name()):
name, fileex = os.path.splitext(self.view.file_name())
extension = fileex[1:]
region = self.view.sel()[0]
string = self.view.substr(sublime.Region(0, self.view.size()))
start = region.begin()
end = region.end()
result = expand_region_handler.expand(string, start, end, extension)
if result:
self.view.sel().add(sublime.Region(result["start"], result["end"]))
if debug:
print("startIndex: {0}, endIndex: {1}, type: {2}".format(result["start"], result["end"], result["type"])) | mit | Python |
91ff0fcb40d5d5318b71f0eb4b0873fb470265a0 | Add downgrade started applications migration | loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin,loomchild/jenca-puffin,loomchild/puffin | migrations/versions/f0c9c797c230_populate_application_settings_with_.py | migrations/versions/f0c9c797c230_populate_application_settings_with_.py | """populate application_settings with started apps
Revision ID: f0c9c797c230
Revises: 31850461ed3
Create Date: 2017-02-16 01:02:02.951573
"""
# revision identifiers, used by Alembic.
revision = 'f0c9c797c230'
down_revision = '31850461ed3'
from alembic import op
import sqlalchemy as sa
from puffin.core import docker, applications
def upgrade():
running_applications = docker.get_all_running_applications()
for a in running_applications:
user = a[0]
application = a[1]
applications.set_application_started(user, application, True)
def downgrade():
started_applications = applications.get_all_started_applications()
for a in started_applications:
user = a[0]
application = a[1]
applications.set_application_started(user, application, False)
| """populate application_settings with started apps
Revision ID: f0c9c797c230
Revises: 31850461ed3
Create Date: 2017-02-16 01:02:02.951573
"""
# revision identifiers, used by Alembic.
revision = 'f0c9c797c230'
down_revision = '31850461ed3'
from alembic import op
import sqlalchemy as sa
from puffin.core import docker, applications
def upgrade():
running_applications = docker.get_all_running_applications()
for running_application in running_applications:
user = running_application[0]
application = running_application[1]
applications.set_application_started(user, application, True)
def downgrade():
pass
| agpl-3.0 | Python |
ee4ab950e2a6748af9aefea0dd1cd8f227d6bca8 | Fix linting issues | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0099_update_program_model.py | accelerator/migrations/0099_update_program_model.py | # Generated by Django 2.2.28 on 2022-04-20 13:05
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=models.ImageField(null=True, upload_to=''),
),
]
| # Generated by Django 2.2.28 on 2022-04-20 13:05
from django.db import (
migrations,
models,
)
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0098_update_startup_update_20220408_0441'),
]
operations = [
migrations.AddField(
model_name='program',
name='hubspot_url',
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name='program',
name='program_image',
field=models.ImageField(null=True, upload_to=''),
),
] | mit | Python |
beae821d6997bf8257673c391ed6a1fe7f38c682 | change sequence | yasokada/python-151127-7segLed_IPadrDisplay | IPadrDisplay.py | IPadrDisplay.py | from util7SegLED import info7seg_init, info7seg_on, info7seg_allOff
from util7SegLED import info7seg_decimalPoint
from util7SegLED import info7seg_onOff
from util7SegLED import info7seg_onDecimalPointOff
from utilNetworkIP import NetworkIP_get_ipAddress_eth0
import time
'''
v0.2 2015/11/28
- disp IP address using utilNetworkIP
- add disp_ipAddress()
- extract method to disp_0_9_DP()
- use all off function
v0.1 2015/11/28
- display 0..9 and decimal point
'''
def disp_0_9_DP(intvl):
for idx in range(0, 11):
info7seg_onOff(idx, intvl)
def disp_ipAddress(ipadr, intvl_sec):
for idx in range(0, len(ipadr)):
if "." in ipadr[idx]:
info7seg_onDecimalPointOff(intvl_sec)
else:
val = int(ipadr[idx])
info7seg_onOff(val, intvl_sec)
ipadr = NetworkIP_get_ipAddress_eth0()
info7seg_init()
disp_0_9_DP(0.5) # initial test
for loop in range(0,3):
info7seg_onDecimalPointOff(0.5)
info7seg_allOff()
time.sleep(0.5)
while True:
disp_ipAddress(ipadr, 0.5)
info7seg_allOff()
time.sleep(2.0)
| from util7SegLED import info7seg_init, info7seg_on, info7seg_allOff
from util7SegLED import info7seg_decimalPoint
from util7SegLED import info7seg_onOff
from util7SegLED import info7seg_onDecimalPointOff
from utilNetworkIP import NetworkIP_get_ipAddress_eth0
import time
'''
v0.2 2015/11/28
- add disp_ipAddress()
- extract method to disp_0_9_DP()
- use all off function
v0.1 2015/11/28
- display 0..9 and decimal point
'''
def disp_0_9_DP(intvl):
for idx in range(0, 11):
info7seg_onOff(idx, intvl)
def disp_ipAddress(ipadr, intvl_sec):
for idx in range(0, len(ipadr)):
if "." in ipadr[idx]:
info7seg_onDecimalPointOff(intvl_sec)
else:
val = int(ipadr[idx])
info7seg_onOff(val, intvl_sec)
ipadr = NetworkIP_get_ipAddress_eth0()
info7seg_init()
for loop in range(0,3):
info7seg_onDecimalPointOff(0.5)
info7seg_allOff()
time.sleep(0.5)
disp_ipAddress(ipadr, 0.5)
disp_0_9_DP(0.5)
| mit | Python |
3d2dc3b0d4116e6a875b8f8d8878e0e82639a1d1 | update document | cnits/PyUtil | Lib/CPyMongo.py | Lib/CPyMongo.py | from pymongo import MongoClient
try:
from urllib.parse import quote_plus
except Exception as e:
from urllib import quote_plus
class CPyMongo:
def __init__(self, db_name, user=None, password=None, host=None, port=None):
if host is None or host == "":
host = 'localhost'
if port is None or port == "":
port = 27017
try:
if user is None or password is None:
self.dbm = MongoClient(host, port)[db_name]
else:
self.dbm = MongoClient("mongodb://" + user + ":" + quote_plus(password) + "@" + str(host) + ":" + str(port))[db_name]
except Exception as ex:
print(str(ex))
def find(self, collection, _filter):
if _filter is None:
_filter = {}
result = self.dbm[collection].find(_filter)
data = []
for i in result:
data.append(i)
return data
def find_one(self, collection, _filter):
data = self.dbm[collection].find_one(_filter)
return data
def save(self, collection, data, multiple=False):
if multiple is False:
return self.dbm[collection].insert_one(data)
else:
return self.dbm[collection].insert_many(data)
def update(self, collection, _filter, update, multiple=False, upsert=False):
if multiple is False:
return self.dbm[collection].update_one(_filter, update, upsert)
else:
return self.dbm[collection].update_many(_filter, update, upsert)
def delete(self, collection, _filter, multiple=False):
if multiple is False:
return self.dbm[collection].delete_one(_filter)
else:
return self.dbm[collection].delete_many(_filter)
def count(self, collection, _filter):
return self.dbm[collection].count(_filter)
def distinct(self, collection, key, _filter):
return self.dbm[collection].distinct(key, _filter)
def drop(self, collection):
self.dbm[collection].drop()
| from pymongo import MongoClient
try:
from urllib.parse import quote_plus
except Exception:
from urllib import quote_plus
class CPyMongo:
def __init__(self, db_name, user=None, password=None, host=None, port=None):
if host is None or host == "":
host = 'localhost'
if port is None or port == "":
port = 27017
try:
if user is None or password is None:
self.dbm = MongoClient(host, port)[db_name]
else:
self.dbm = MongoClient("mongodb://" + user + ":" + quote_plus(password) + "@" + host + ":" + port)[db_name]
except Exception as ex:
print(str(ex))
def find(self, collection, _filter):
if _filter is None:
_filter = {}
result = self.dbm[collection].find(_filter)
data = []
for i in result:
data.append(i)
return data
def find_one(self, collection, _filter):
data = self.dbm[collection].find_one(_filter)
return data
def save(self, collection, data, multiple=False):
if multiple is False:
return self.dbm[collection].insert_one(data)
else:
return self.dbm[collection].insert_many(data)
def update(self, collection, _filter, update, multiple=False, upsert=False):
if multiple is False:
return self.dbm[collection].update_one(_filter, update, upsert)
else:
return self.dbm[collection].update_many(_filter, update, upsert)
def delete(self, collection, _filter, multiple=False):
if multiple is False:
return self.dbm[collection].delete_one(_filter)
else:
return self.dbm[collection].delete_many(_filter)
def count(self, collection, _filter):
return self.dbm[collection].count(_filter)
def distinct(self, collection, key, _filter):
return self.dbm[collection].distinct(key, _filter)
def drop(self, collection):
self.dbm[collection].drop()
| apache-2.0 | Python |
4ef288afb497d20f31c1a8c68226b74a465f3d01 | Update piece.py | xpchess/xpchess | Pieces/piece.py | Pieces/piece.py | class piece(object):
def __init__(self,col):
if col in ["Black","White"]:
self.color = col
else:
raise TypeError
def __str__(self):
typ = str(type(self)).split(".")[1][:-2]
return "{} {}".format(self.color,typ)
def __repr__(self):
typ = str(type(self)).split(".")[1][:-2]
return "{} {}".format(self.color,typ)
def pohyb(self,c1,c2,field):
pass
pass
| class piece(object):
def __init__(self,col):
if col in ["Black","White"]:
self.color = col
else:
raise TypeError
def __str__(self):
typ = str(type(self)).split(".")[1][:-2]
return "{} {}".format(self.color,typ)
def __repr__(self):
typ = str(type(self)).split(".")[1][:-2]
return "{} {}".format(self.color,typ)
def pohyb(self,c1,c2):
pass
pass
| bsd-2-clause | Python |
35eca403a1ab9817b7e1538b84827c5e64ddab0a | Add suport for MERGE_MSG | aristidesfl/sublime-git-commit-message-auto-save | gitcommitautosave.py | gitcommitautosave.py | """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo', 'MERGE_MSG')
if path and any(path.endswith(name) for name in git_files):
return True
| """Git Commit Auto Save.
Sublime Text 3 package to auto save commit messages when the window is closed.
This allows the user to close the window without having to save before,
or having to deal with the "Save File" popup.
"""
import sublime_plugin
class GitCommitAutoSave(sublime_plugin.EventListener):
def on_load(self, view):
if is_git_file(view.file_name()):
view.set_scratch(True) # disable save file dialog on exit
def on_pre_close(self, view):
if is_git_file(view.file_name()):
view.run_command("save")
def is_git_file(path):
git_files = ('COMMIT_EDITMSG', 'git-rebase-todo')
if path and any(path.endswith(name) for name in git_files):
return True
| mit | Python |
726eb82758ba6ceec8e31611b2854e5f4c0cee72 | Mark RPI Power binary sensor as diagnostic (#76198) | w1ll1am23/home-assistant,mezz64/home-assistant,nkgilley/home-assistant,w1ll1am23/home-assistant,mezz64/home-assistant,nkgilley/home-assistant | homeassistant/components/rpi_power/binary_sensor.py | homeassistant/components/rpi_power/binary_sensor.py | """
A sensor platform which detects underruns and capped status from the official Raspberry Pi Kernel.
Minimal Kernel needed is 4.14+
"""
import logging
from rpi_bad_power import UnderVoltage, new_under_voltage
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_NORMALIZED = "Voltage normalized. Everything is working as intended."
DESCRIPTION_UNDER_VOLTAGE = "Under-voltage was detected. Consider getting a uninterruptible power supply for your Raspberry Pi."
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up rpi_power binary sensor."""
under_voltage = await hass.async_add_executor_job(new_under_voltage)
async_add_entities([RaspberryChargerBinarySensor(under_voltage)], True)
class RaspberryChargerBinarySensor(BinarySensorEntity):
"""Binary sensor representing the rpi power status."""
_attr_device_class = BinarySensorDeviceClass.PROBLEM
_attr_entity_category = EntityCategory.DIAGNOSTIC
_attr_icon = "mdi:raspberry-pi"
_attr_name = "RPi Power status"
_attr_unique_id = "rpi_power" # only one sensor possible
def __init__(self, under_voltage: UnderVoltage) -> None:
"""Initialize the binary sensor."""
self._under_voltage = under_voltage
def update(self) -> None:
"""Update the state."""
value = self._under_voltage.get()
if self._attr_is_on != value:
if value:
_LOGGER.warning(DESCRIPTION_UNDER_VOLTAGE)
else:
_LOGGER.info(DESCRIPTION_NORMALIZED)
self._attr_is_on = value
| """
A sensor platform which detects underruns and capped status from the official Raspberry Pi Kernel.
Minimal Kernel needed is 4.14+
"""
import logging
from rpi_bad_power import UnderVoltage, new_under_voltage
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
_LOGGER = logging.getLogger(__name__)
DESCRIPTION_NORMALIZED = "Voltage normalized. Everything is working as intended."
DESCRIPTION_UNDER_VOLTAGE = "Under-voltage was detected. Consider getting a uninterruptible power supply for your Raspberry Pi."
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up rpi_power binary sensor."""
under_voltage = await hass.async_add_executor_job(new_under_voltage)
async_add_entities([RaspberryChargerBinarySensor(under_voltage)], True)
class RaspberryChargerBinarySensor(BinarySensorEntity):
"""Binary sensor representing the rpi power status."""
_attr_device_class = BinarySensorDeviceClass.PROBLEM
_attr_icon = "mdi:raspberry-pi"
_attr_name = "RPi Power status"
_attr_unique_id = "rpi_power" # only one sensor possible
def __init__(self, under_voltage: UnderVoltage) -> None:
"""Initialize the binary sensor."""
self._under_voltage = under_voltage
def update(self) -> None:
"""Update the state."""
value = self._under_voltage.get()
if self._attr_is_on != value:
if value:
_LOGGER.warning(DESCRIPTION_UNDER_VOLTAGE)
else:
_LOGGER.info(DESCRIPTION_NORMALIZED)
self._attr_is_on = value
| apache-2.0 | Python |
f65cb4fa50c67ffdaf1b5ef26534bc03e8751dd0 | add tests for program categories | RouxRC/weboob,laurent-george/weboob,frankrousseau/weboob,sputnick-dev/weboob,nojhan/weboob-devel,laurent-george/weboob,sputnick-dev/weboob,RouxRC/weboob,RouxRC/weboob,willprice/weboob,nojhan/weboob-devel,willprice/weboob,frankrousseau/weboob,nojhan/weboob-devel,Konubinix/weboob,sputnick-dev/weboob,Konubinix/weboob,Konubinix/weboob,laurent-george/weboob,willprice/weboob,frankrousseau/weboob | modules/arte/test.py | modules/arte/test.py | # -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
from weboob.capabilities.video import BaseVideo
class ArteTest(BackendTest):
BACKEND = 'arte'
def test_search(self):
l = list(self.backend.search_videos('a'))
assert len(l)
v = l[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
def test_live(self):
l1 = list(self.backend.iter_resources([BaseVideo], [u'arte-live']))
assert len(l1)
l2 = list(self.backend.iter_resources([BaseVideo], l1[0].split_path))
assert len(l2)
v = l2[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
def test_latest(self):
l = list(self.backend.iter_resources([BaseVideo], [u'arte-latest']))
assert len(l)
v = l[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
def test_program(self):
l1 = list(self.backend.iter_resources([BaseVideo], [u'arte-program']))
assert len(l1)
# some categories may contain no available videos (during summer period for example)
for l in l1:
l2 = list(self.backend.iter_resources([BaseVideo], l.split_path))
if len(l2) == 0:
continue
break
assert len(l2)
v = l2[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
| # -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
from weboob.capabilities.video import BaseVideo
class ArteTest(BackendTest):
BACKEND = 'arte'
def test_search(self):
l = list(self.backend.search_videos('a'))
assert len(l)
v = l[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
def test_live(self):
l1 = list(self.backend.iter_resources([BaseVideo], [u'arte-live']))
assert len(l1)
l2 = list(self.backend.iter_resources([BaseVideo], l1[0].split_path))
assert len(l2)
v = l2[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
def test_latest(self):
l = list(self.backend.iter_resources([BaseVideo], [u'arte-latest']))
assert len(l)
v = l[0]
self.backend.fillobj(v, ('url',))
self.assertTrue(v.url, 'URL for video "%s" not found' % (v.id))
| agpl-3.0 | Python |
a0ec9e4faba5f5342a7b0d2653f7552e2d19a67d | change prod setting | sunForest/AviPost,sunForest/AviPost | avipost/avipost/settings/prod.py | avipost/avipost/settings/prod.py | from .base import *
INSTALLED_APPS += (
'corsheaders',
)
# need to be before django.middleware.common.CommonMiddleware
MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
) + MIDDLEWARE_CLASSES
CORS_ORIGIN_ALLOW_ALL = True
# TODO: set the database parameters
ALLOWED_HOSTS = ["52.16.214.13", "127.0.0.1", "localhost"]
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'postgres',
'USER': 'postcard_admin',
'PASSWORD': get_env_variable('DB_PASSWORD'),
'HOST': 'postcarddb.cqlnuay9niw1.eu-west-1.rds.amazonaws.com',
'PORT': '5432'
}
}
SECRET_KEY = get_env_variable('SECRET_KEY')
| from .base import *
INSTALLED_APPS += (
'corsheaders',
)
# need to be before django.middleware.common.CommonMiddleware
MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
) + MIDDLEWARE_CLASSES
CORS_ORIGIN_ALLOW_ALL = True
# TODO: set the database parameters
| apache-2.0 | Python |
28c39136aadd34253ff0c925504427a19bccfd49 | Update to version v0.0.4 | jeffknupp/domain-parser,jeffknupp/domain-parser | domain_parser/__init__.py | domain_parser/__init__.py | __version__ = '0.0.4'
| __version__ = '0.0.3.4'
| apache-2.0 | Python |
8beee9b0b61789d0b54f4b3cd83f2d8d450da77d | Disable inbox_benchmark.Inbox. | axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,ltilve/chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ltilve/chromium,ltilve/chromium | tools/perf/benchmarks/inbox_benchmark.py | tools/perf/benchmarks/inbox_benchmark.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# TODO: Benchmark is failing; see crbug.com/452257. Comment out instead of
# disabling because the failure happens before the Disabled check.
#from page_sets import inbox
#from telemetry import benchmark
#from telemetry.web_perf import timeline_based_measurement
#
#@benchmark.Disabled('android')
#class Inbox(benchmark.Benchmark):
# """Runs the timeline based measurement against inbox pageset."""
# test = timeline_based_measurement.TimelineBasedMeasurement
#
# def CreatePageSet(self, options):
# return inbox.InboxPageSet()
| # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from page_sets import inbox
from telemetry import benchmark
from telemetry.web_perf import timeline_based_measurement
@benchmark.Disabled('android')
class Inbox(benchmark.Benchmark):
"""Runs the timeline based measurement against inbox pageset."""
test = timeline_based_measurement.TimelineBasedMeasurement
def CreatePageSet(self, options):
return inbox.InboxPageSet()
| bsd-3-clause | Python |
d4eb6ddeb95a80b843c7dcbe9d92d483d13e8284 | Rename variable | pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality | tracker/app.py | tracker/app.py | """The app module, containing the app factory function."""
from flask import Flask, render_template
from flask_security import SQLAlchemyUserDatastore
from . import commands, core, survey, iati
from .security.models import User, Role
from .extensions import cache, db, debug_toolbar, migrate, webpack, security
from .database import BaseModel
def create_app(config_object='tracker.settings'):
"""An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
cache.init_app(app)
db.init_app(app)
migrate.init_app(app, db)
BaseModel.set_session(db.session)
webpack.init_app(app)
debug_toolbar.init_app(app)
datastore = SQLAlchemyUserDatastore(db, User, Role)
security.init_app(app, datastore)
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(survey.views.blueprint)
app.register_blueprint(core.views.blueprint)
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {
'db': db,
'Organisation': core.models.Organisation,
}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(core.commands.setup_cli)
app.cli.add_command(iati.commands.iati_cli)
| """The app module, containing the app factory function."""
from flask import Flask, render_template
from flask_security import SQLAlchemyUserDatastore
from . import commands, core, survey, iati
from .security.models import User, Role
from .extensions import cache, db, debug_toolbar, migrate, webpack, security
from .database import BaseModel
def create_app(config_object='tracker.settings'):
"""An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
cache.init_app(app)
db.init_app(app)
migrate.init_app(app, db)
BaseModel.set_session(db.session)
webpack.init_app(app)
debug_toolbar.init_app(app)
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security.init_app(app, user_datastore)
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(survey.views.blueprint)
app.register_blueprint(core.views.blueprint)
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {
'db': db,
'Organisation': core.models.Organisation,
}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(core.commands.setup_cli)
app.cli.add_command(iati.commands.iati_cli)
| agpl-3.0 | Python |
51c25fba0ecfa627ac91e6a9ab83c2bd44fab29c | remove debug | podhub-io/follower | podhub/follower/views.py | podhub/follower/views.py | from . import app
from feed import Feed
from flask import jsonify, request
@app.route('/')
def index():
return jsonify()
@app.route('/audio')
def feed():
url = request.args.get('feed_url')
index = request.args.get('index')
if not index:
index = -1
feed = Feed(url=url)
try:
entry = feed.entries[index]
except TypeError:
return jsonify(error_message='index must be an integer.'), 400
except IndexError:
return jsonify(error_message='episode {} not found'.format(index)), 400
return jsonify(feed_url=entry.audio)
| from . import app
from feed import Feed
from flask import jsonify, request
@app.route('/')
def index():
return jsonify()
@app.route('/audio')
def feed():
url = request.args.get('feed_url')
index = request.args.get('index')
if not index:
index = -1
feed = Feed(url=url)
entry = feed.entries[index]
"""
except TypeError:
return jsonify(error_message='index must be an integer.'), 400
except IndexError:
return jsonify(error_message='episode {} not found'.format(index)), 400
"""
return jsonify(feed_url=entry.audio)
| bsd-3-clause | Python |
f1b93779783f462fb270393e6614b4a00f9c91e8 | Refactor caching behaviour from api endpoints. | ecolell/pfamserver,ecolell/pfamserver,ecolell/pfamserver | backend/pfamserver/extensions.py | backend/pfamserver/extensions.py | """Extensions builders."""
from flask_caching import Cache
from flask_sqlalchemy import SQLAlchemy
from flask_wtf.csrf import CSRFProtect
from flask import request
cache = Cache()
csrf = CSRFProtect()
db = SQLAlchemy() # session_options={"autoflush": False})
def make_cache_key(*args, **kwargs):
path = request.path
args = str(list(request.args.items()))
return path + args
| """Extensions builders."""
from flask_caching import Cache
from flask_sqlalchemy import SQLAlchemy
from flask_wtf.csrf import CSRFProtect
cache = Cache()
csrf = CSRFProtect()
db = SQLAlchemy() # session_options={"autoflush": False})
| agpl-3.0 | Python |
13639181c2c63a800fdad754259b7282b0fbf63a | fix race condition in open-remote | nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js | test/sanity/open-remote/test.py | test/sanity/open-remote/test.py | import time
import os
import subprocess
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from nw_util import *
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common import utils
chrome_options = Options()
chrome_options.add_argument("nwapp=" + os.path.dirname(os.path.abspath(__file__)))
testdir = os.path.dirname(os.path.abspath(__file__))
os.chdir(testdir)
port = str(utils.free_port())
server = subprocess.Popen(['python', 'http-server.py', port])
html = open('index.html', 'w')
html.write('''
<script>
nw.Window.open('http://localhost:%s/remote.html', function(win) {
document.write('<h1 id="res">returned window is ' + typeof win + '</h1>');
win.y = 0;
});
</script>
''' % (port))
html.close()
driver = webdriver.Chrome(executable_path=os.environ['CHROMEDRIVER'], chrome_options=chrome_options)
time.sleep(1)
try:
wait_window_handles(driver, 2)
driver.switch_to_window(driver.window_handles[0])
print driver.current_url
time.sleep(1)
result = driver.find_element_by_id('res').get_attribute('innerHTML')
print result
assert("object" in result)
driver.switch_to_window(driver.window_handles[-1])
for id in ['res', 'res2', 'res3']:
result = driver.find_element_by_id(id).get_attribute('innerHTML')
print result
assert("DISABLED" in result)
finally:
server.terminate()
driver.quit()
| import time
import os
import subprocess
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from nw_util import *
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common import utils
chrome_options = Options()
chrome_options.add_argument("nwapp=" + os.path.dirname(os.path.abspath(__file__)))
testdir = os.path.dirname(os.path.abspath(__file__))
os.chdir(testdir)
port = str(utils.free_port())
server = subprocess.Popen(['python', 'http-server.py', port])
html = open('index.html', 'w')
html.write('''
<script>
nw.Window.open('http://localhost:%s/remote.html', function(win) {
document.write('<h1 id="res">returned window is ' + typeof win + '</h1>');
win.y = 0;
});
</script>
''' % (port))
html.close()
driver = webdriver.Chrome(executable_path=os.environ['CHROMEDRIVER'], chrome_options=chrome_options)
time.sleep(1)
try:
print driver.current_url
time.sleep(1)
result = driver.find_element_by_id('res').get_attribute('innerHTML')
print result
assert("object" in result)
wait_window_handles(driver, 2)
driver.switch_to_window(driver.window_handles[-1])
for id in ['res', 'res2', 'res3']:
result = driver.find_element_by_id(id).get_attribute('innerHTML')
print result
assert("DISABLED" in result)
finally:
server.terminate()
driver.quit()
| mit | Python |
6ef73bb54f0e5f403012d672a756591969567698 | Add symbol method to Terminal class | PatrikValkovic/grammpy | grammpy/Terminal.py | grammpy/Terminal.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
import copy
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
def symbol(self):
return copy.deepcopy(self.__symbol)
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
class Terminal:
def __init__(self, symbol, grammar):
self.__symbol = symbol
self.__grammar = grammar
def __hash__(self):
return hash((self.__symbol, id(self.__grammar)))
def __eq__(self, other):
return isinstance(other, Terminal) and hash(self) == hash(other)
| mit | Python |
dae5bb7174959aa50ee65b5992bae3d4fce8da18 | Use the KeyBindingManager. | bitmonk/pgcli,thedrow/pgcli,johshoff/pgcli,bitemyapp/pgcli,TamasNo1/pgcli,w4ngyi/pgcli,MattOates/pgcli,dbcli/vcli,bitemyapp/pgcli,zhiyuanshi/pgcli,j-bennet/pgcli,nosun/pgcli,janusnic/pgcli,j-bennet/pgcli,johshoff/pgcli,lk1ngaa7/pgcli,dbcli/pgcli,lk1ngaa7/pgcli,bitmonk/pgcli,darikg/pgcli,dbcli/pgcli,darikg/pgcli,suzukaze/pgcli,n-someya/pgcli,TamasNo1/pgcli,janusnic/pgcli,w4ngyi/pgcli,dbcli/vcli,suzukaze/pgcli,nosun/pgcli,n-someya/pgcli,d33tah/pgcli,thedrow/pgcli,d33tah/pgcli,koljonen/pgcli,yx91490/pgcli,MattOates/pgcli,joewalnes/pgcli,joewalnes/pgcli,yx91490/pgcli,koljonen/pgcli,zhiyuanshi/pgcli | pgcli/key_bindings.py | pgcli/key_bindings.py | import logging
from prompt_toolkit.keys import Keys
from prompt_toolkit.key_binding.manager import KeyBindingManager
_logger = logging.getLogger(__name__)
def pgcli_bindings():
"""
Custom key bindings for pgcli.
"""
key_binding_manager = KeyBindingManager()
@key_binding_manager.registry.add_binding(Keys.F2)
def _(event):
"""
Enable/Disable SmartCompletion Mode.
"""
_logger.debug('Detected F2 key.')
buf = event.cli.current_buffer
buf.completer.smart_completion = not buf.completer.smart_completion
@key_binding_manager.registry.add_binding(Keys.F3)
def _(event):
"""
Enable/Disable Multiline Mode.
"""
_logger.debug('Detected F3 key.')
buf = event.cli.current_buffer
buf.always_multiline = not buf.always_multiline
@key_binding_manager.registry.add_binding(Keys.ControlSpace)
def _(event):
"""
Force autocompletion at cursor.
"""
_logger.debug('Detected <C-Space> key.')
event.cli.current_buffer.complete_next()
return key_binding_manager.registry
| import logging
from prompt_toolkit import filters
from prompt_toolkit.keys import Keys
from prompt_toolkit.key_binding.registry import Registry
from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings
_logger = logging.getLogger(__name__)
def pgcli_bindings():
"""
Custom key bindings for pgcli.
"""
registry = Registry()
load_emacs_bindings(registry)
handle = registry.add_binding
@handle(Keys.F2)
def _(event):
"""
Enable/Disable SmartCompletion Mode.
"""
_logger.debug('Detected F2 key.')
buf = event.cli.current_buffer
buf.completer.smart_completion = not buf.completer.smart_completion
@handle(Keys.F3)
def _(event):
"""
Enable/Disable Multiline Mode.
"""
_logger.debug('Detected F3 key.')
buf = event.cli.current_buffer
buf.always_multiline = not buf.always_multiline
@handle(Keys.ControlSpace, filter=~filters.HasSelection())
def _(event):
"""
Force autocompletion at cursor.
"""
_logger.debug('Detected <C-Space> key.')
event.cli.current_buffer.complete_next()
return registry
| bsd-3-clause | Python |
8d1516535a499b65441ee39873f03e4d19a7426e | Bump version | virtuald/greenado,virtuald/greenado | greenado/version.py | greenado/version.py | __version__ = '0.2.1'
| __version__ = '0.2.0'
| apache-2.0 | Python |
864ad2dbb7e58b6aac7f9b02ba2babddeb7f8d53 | rename sandbox to server for run command | plone/plone.server,plone/plone.server | src/plone.server/setup.py | src/plone.server/setup.py | # -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name='plone.server',
version=open('VERSION').read().strip(),
long_description=(open('README.rst').read() + '\n' +
open('CHANGELOG.rst').read()),
classifiers=[
'Framework :: Plone :: 7.0',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
],
url='https://pypi.python.org/pypi/plone.server',
license='GPL version 3',
setup_requires=[
'pytest-runner',
],
zip_safe=True,
include_package_data=True,
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['plone'],
install_requires=[
'aiohttp',
'BTrees',
'cchardet',
'plone.dexterity',
'plone.jsonserializer',
'plone.registry',
'plone.supermodel',
'pyjwt',
'setuptools',
'transaction',
'ZODB',
'zope.component',
'zope.component',
'zope.configuration',
'zope.configuration',
'zope.dottedname',
'zope.event',
'zope.i18n',
'zope.i18nmessageid',
'zope.interface',
'zope.location',
'zope.schema',
'zope.security',
],
tests_require=[
'pytest',
],
entry_points={
'console_scripts': [
'server = plone.server.server:main',
]
}
)
| # -*- coding: utf-8 -*-
from setuptools import find_packages
from setuptools import setup
setup(
name='plone.server',
version=open('VERSION').read().strip(),
long_description=(open('README.rst').read() + '\n' +
open('CHANGELOG.rst').read()),
classifiers=[
'Framework :: Plone :: 7.0',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
],
url='https://pypi.python.org/pypi/plone.server',
license='GPL version 3',
setup_requires=[
'pytest-runner',
],
zip_safe=True,
include_package_data=True,
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['plone'],
install_requires=[
'aiohttp',
'BTrees',
'cchardet',
'plone.dexterity',
'plone.jsonserializer',
'plone.registry',
'plone.supermodel',
'pyjwt',
'setuptools',
'transaction',
'ZODB',
'zope.component',
'zope.component',
'zope.configuration',
'zope.configuration',
'zope.dottedname',
'zope.event',
'zope.i18n',
'zope.i18nmessageid',
'zope.interface',
'zope.location',
'zope.schema',
'zope.security',
],
tests_require=[
'pytest',
],
entry_points={
'console_scripts': [
'sandbox = plone.server.server:main',
]
}
)
| bsd-2-clause | Python |
ee9a30d2c6d6c7e7c9e7f073d1898ff016583142 | Update database name | fernando24164/flask_api,fernando24164/flask_api | config.py | config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'weather.sqlite')
config = {
'default': DevelopmentConfig
}
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = 'f63f65a3f7274455bfd49edf9c6b36bd'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir,
'data-dev.sqlite')
config = {
'default': DevelopmentConfig
}
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.