commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
dfa291d70cb86d1d8ab0b17cc3273fcc942a2751 | Add some resilience to create_dict.py, resisting crashing on ZeroDivisionExceptions | shellphish/driller | bin/create_dict.py | bin/create_dict.py | #!/usr/bin/env pypy
import angr
import string
import sys
'''
create AFL dictionary of string references found in the binary. should allow AFL to explore more paths
without having to request symbolic execution.
'''
def hexescape(s):
out = [ ]
acceptable = string.letters + string.digits + " ."
for c in s:
if c not in acceptable:
out.append("\\x%02x" % ord(c))
else:
out.append(c)
return ''.join(out)
def main(argc, argv):
if (argc < 3):
print "usage: %s <binary> <dictfile>" % argv[1]
sys.exit(1)
binary = argv[1]
dictfile = argv[2]
b = angr.Project(binary)
cfg = b.analyses.CFG(keep_input_state=True)
string_references = [ ]
for f in cfg.function_manager.functions.values():
try:
string_references.append(f.string_references())
except ZeroDivisionError:
pass
string_references = sum(string_references, [])
strings = [] if len(string_references) == 0 else zip(*string_references)[1]
dictfp = open(dictfile, "w")
for i, string in enumerate(strings):
s = hexescape(string)
dictfp.write("driller_%d=\"%s\"\n" % (i, s))
dictfp.close()
return 0
if __name__ == "__main__":
sys.exit(main(len(sys.argv), sys.argv))
| #!/usr/bin/env pypy
import angr
import string
import sys
'''
create AFL dictionary of string references found in the binary. should allow AFL to explore more paths
without having to request symbolic execution.
'''
def hexescape(s):
out = [ ]
acceptable = string.letters + string.digits + " ."
for c in s:
if c not in acceptable:
out.append("\\x%02x" % ord(c))
else:
out.append(c)
return ''.join(out)
def main(argc, argv):
if (argc < 3):
print "usage: %s <binary> <dictfile>"
sys.exit(1)
binary = argv[1]
dictfile = argv[2]
try:
b = angr.Project(binary)
cfg = b.analyses.CFG(keep_input_state=True)
string_references = sum([f.string_references() for f in cfg.function_manager.functions.values()], [])
except:
sys.exit(1)
strings = [] if len(string_references) == 0 else zip(*string_references)[1]
dictfp = open(dictfile, "w")
for i, string in enumerate(strings):
s = hexescape(string)
dictfp.write("driller_%d=\"%s\"\n" % (i, s))
dictfp.close()
return 0
if __name__ == "__main__":
sys.exit(main(len(sys.argv), sys.argv))
| bsd-2-clause | Python |
5ae9153a196a2d6a445364bdc40ea6e428bf35ff | Switch to matplotlib agg backend before importing pyplot | brendan-ward/rasterio,brendan-ward/rasterio,brendan-ward/rasterio | tests/__init__.py | tests/__init__.py | #
import matplotlib as mpl
mpl.use('agg') | #
| bsd-3-clause | Python |
3c28250fdda760f1997e6cf198657b3a7dc11bab | Remove #TODO for CLI test - done | PyThaiNLP/pythainlp | tests/__init__.py | tests/__init__.py | # -*- coding: utf-8 -*-
"""
Unit test.
Each file in tests/ is for each main package.
"""
import sys
import unittest
sys.path.append("../pythainlp")
loader = unittest.TestLoader()
testSuite = loader.discover("tests")
testRunner = unittest.TextTestRunner(verbosity=2)
testRunner.run(testSuite)
| # -*- coding: utf-8 -*-
"""
Unit test.
Each file in tests/ is for each main package.
#TODO Test for CLI
"""
import sys
import unittest
sys.path.append("../pythainlp")
loader = unittest.TestLoader()
testSuite = loader.discover("tests")
testRunner = unittest.TextTestRunner(verbosity=2)
testRunner.run(testSuite)
| apache-2.0 | Python |
056ca70c96390af73954b86c1143160a94f030a9 | Test fix | acrispin/python-react,abdelouahabb/python-react,arceduardvincent/python-react,abdelouahabb/python-react,acrispin/python-react,markfinger/python-react,arceduardvincent/python-react,markfinger/python-react | tests/__init__.py | tests/__init__.py | import os
import atexit
import subprocess
process = subprocess.Popen(
args=('node', os.path.join(os.path.dirname(__file__), 'test_server.js'),),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
# Ensure the process is killed on exit
atexit.register(lambda _process: _process.kill(), process)
output = process.stdout.readline().decode('utf-8')
if output.strip() == '':
output += process.stdout.readline().decode('utf-8')
if 'python-react test render server' not in output:
raise Exception('Unexpected output: "{}"'.format(output))
| import os
import atexit
import subprocess
process = subprocess.Popen(
args=('node', os.path.join(os.path.dirname(__file__), '..', 'example', 'server.js'),),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
# Ensure the process is killed on exit
atexit.register(lambda _process: _process.kill(), process)
output = process.stdout.readline().decode('utf-8')
if output.strip() == '':
output += process.stdout.readline().decode('utf-8')
if 'python-react test render server' not in output:
raise Exception('Unexpected output: "{}"'.format(output))
| mit | Python |
545dbf5f702ff6857d5c8ace98524a875559e408 | fix another import | biokit/biokit,biokit/biokit | biokit/__init__.py | biokit/__init__.py | """Main entry point to biokit
::
import biokit as bk
from bk import bioservices
from bk import sequence
from bioservices.apps import get_fasta
fasta = get_fasta("P43403")
seq = sequence.FASTA(fasta)
seq.plot()
"""
__version__ = "0.1"
import pkg_resources
try:
version = pkg_resources.require(biokit)[0].version
except:
version = __version__
# Creates the data directory if it does not exist
from easydev import CustomConfig
biokitPATH = CustomConfig("biokit").user_config_dir
from biokit import viz
from biokit import io
from biokit import services
from biokit.viz import *
from biokit.services import EUtils, KEGG, UniProt
from biokit import stats
from biokit.network import *
from biokit import sequence
from biokit.sequence import *
from biokit import goid
from biokit.goid import *
from biokit import taxonomy
from biokit.taxonomy import Taxonomy
| """Main entry point to biokit
::
import biokit as bk
from bk import bioservices
from bk import sequence
from bioservices.apps import get_fasta
fasta = get_fasta("P43403")
seq = sequence.FASTA(fasta)
seq.plot()
"""
__version__ = "0.1"
import pkg_resources
try:
version = pkg_resources.require(biokit)[0].version
except:
version = __version__
# Creates the data directory if it does not exist
from easydev import CustomConfig
biokitPATH = CustomConfig("biokit").user_config_dir
from biokit import viz
from biokit import io
from biokit import services
from biokit.viz import *
from biokit.services import EUtils, KEGG, UniProt
from biokit import stats
from biokit.network import *
from biokit import sequence
from biokit.sequence import *
from biokit import goid
from goid import *
from biokit import taxonomy
from taxonomy import Taxonomy
| bsd-2-clause | Python |
106b4166c9eb84ff068466444506a3bafa192a52 | Increase the maximum waylength | janLo/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool,eXma/meet-and-eat-distribution-tool,janLo/meet-and-eat-distribution-tool,janLo/meet-and-eat-distribution-tool | mue/integration_test.py | mue/integration_test.py | from random import choice
import json
import sys
import pymue
from collections import defaultdict
MAX_WAY = 9.3
def way_cost(way_length):
if way_length <= 1:
return way_length * 100
elif way_length < MAX_WAY:
return (way_length * 100) ** 2
return sys.float_info.max
print("read data....")
pref = ""
if len(sys.argv):
pref = "_%s" % sys.argv[1]
with open("teams%s.json" % pref, "r") as f:
team_data = json.load(f)
with open("distances%s.json" % pref, "r") as f:
distance_data = json.load(f)
print("map teams....")
team_map = dict()
team_map_reverse = dict()
for (idx, team) in enumerate(team_data):
team_map[team["id"]] = idx
team_map_reverse[idx] = team["id"]
cnt = len(team_data)
teams = sorted(team_map.values())
print("build distance matrix...")
distance_matrix = pymue.DistanceMatrix(cnt)
for distance_run in distance_data:
for src in distance_run:
for dst in distance_run[src]:
distance_matrix.set_cost(team_map[int(src)],
team_map[int(dst)],
way_cost(distance_run[src][dst]))
print("Minimum cost is : %02f" % distance_matrix.min_cost())
print("Minimum result is: %02f" % (distance_matrix.min_cost() * 2 * cnt))
calculation = pymue.Calculation(cnt, distance_matrix, way_cost(MAX_WAY - 0.1))
print("calculate best routes....")
def generate_plan(calculation):
plan = []
for round_num in range(3):
round_set = defaultdict(set)
for (team_id, station) in enumerate(calculation.round_stations(pymue.Round(round_num))):
round_set[station].add(team_id)
plan.append(round_set.values())
return plan
best_plan = None
def test():
pymue.calculate(calculation)
best_plan = generate_plan(calculation)
print("")
print("======best plan======")
print("1st round:", best_plan[0])
print("2nd round:", best_plan[1])
print("3rd round:", best_plan[2])
test()
print("")
print("teams:", cnt)
print("solutions that where calculated:", calculation.solutions())
| from random import choice
import json
import sys
import pymue
from collections import defaultdict
MAX_WAY = 7.8
def way_cost(way_length):
if way_length <= 1:
return way_length * 100
elif way_length < MAX_WAY:
return (way_length * 100) ** 2
return sys.float_info.max
print("read data....")
pref = ""
if len(sys.argv):
pref = "_%s" % sys.argv[1]
with open("teams%s.json" % pref, "r") as f:
team_data = json.load(f)
with open("distances%s.json" % pref, "r") as f:
distance_data = json.load(f)
print("map teams....")
team_map = dict()
team_map_reverse = dict()
for (idx, team) in enumerate(team_data):
team_map[team["id"]] = idx
team_map_reverse[idx] = team["id"]
cnt = len(team_data)
teams = sorted(team_map.values())
print("build distance matrix...")
distance_matrix = pymue.DistanceMatrix(cnt)
for distance_run in distance_data:
for src in distance_run:
for dst in distance_run[src]:
distance_matrix.set_cost(team_map[int(src)],
team_map[int(dst)],
way_cost(distance_run[src][dst]))
print("Minimum cost is : %02f" % distance_matrix.min_cost())
print("Minimum result is: %02f" % (distance_matrix.min_cost() * 2 * cnt))
calculation = pymue.Calculation(cnt, distance_matrix, way_cost(MAX_WAY - 0.1))
print("calculate best routes....")
def generate_plan(calculation):
plan = []
for round_num in range(3):
round_set = defaultdict(set)
for (team_id, station) in enumerate(calculation.round_stations(pymue.Round(round_num))):
round_set[station].add(team_id)
plan.append(round_set.values())
return plan
best_plan = None
def test():
pymue.calculate(calculation)
best_plan = generate_plan(calculation)
print("")
print("======best plan======")
print("1st round:", best_plan[0])
print("2nd round:", best_plan[1])
print("3rd round:", best_plan[2])
test()
print("")
print("teams:", cnt)
print("solutions that where calculated:", calculation.solutions())
| bsd-3-clause | Python |
700c64f1c238cdd80e46649ab8c989bf290cde68 | fix imports | RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu | rasa_nlu/evaluate.py | rasa_nlu/evaluate.py | import logging
from rasa_nlu.test import main
logger = logging.getLogger(__name__)
if __name__ == '__main__': # pragma: no cover
logger.warning("Calling `rasa_nlu.evaluate` is deprecated. "
"Please use `rasa_nlu.test` instead.")
main()
| import logging
import rasa_nlu.test as test
logger = logging.getLogger(__name__)
if __name__ == '__main__': # pragma: no cover
logger.warning("Calling `rasa_nlu.evaluate` is deprecated. "
"Please use `rasa_nlu.test` instead.")
test.main()
| apache-2.0 | Python |
6e921cd2f1f6954a51ba7861a3d58fe21bd91017 | Reduce penalties for high difficulty | vkramskikh/cgminer-pool-chooser | rating_calculator.py | rating_calculator.py | from math import exp
import logging
logger = logging.getLogger(__name__)
class RatingCalculator(object):
@staticmethod
def analyze_exchange_volume(currency):
# price of coins with low exchange volume is usually not stable, reduce rating
exchange_ratio = currency['exchange_ratio'] = currency['exchange_volume'] / currency['coins_per_day']
exchange_volume_change = (exp(-1000.0 / exchange_ratio) - 0.6) / 2
return exchange_volume_change
@staticmethod
def analyze_profit_growth(currency):
# price of coins with profit grow ratio > 1.5 is not stable, reduce rating
profit_growth = currency['profit_growth']
profit_growth_change = 0
if profit_growth > 1:
profit_growth_change = -exp(-1.5 / (profit_growth - 1))
return profit_growth_change
@staticmethod
def analyze_difficulty(currency):
# coins with high difficulty don't let me switch PPLNS pools frequently, slightly reduce rating
difficulty = currency['difficulty']
difficulty_change = -exp(-200.0 / difficulty) / 10
return difficulty_change
@classmethod
def rate_currency(cls, currency):
rating = currency['usd_per_day']
logger.debug('%s original rating is %f', currency['name'], rating)
for method in ('analyze_exchange_volume', 'analyze_profit_growth', 'analyze_difficulty'):
rating_change = getattr(cls, method)(currency)
rating *= (rating_change + 1)
logger.debug('%s rating changed by %s by %.2f%% to %f', currency['name'], method, rating_change * 100, rating)
return rating
| from math import exp
import logging
logger = logging.getLogger(__name__)
class RatingCalculator(object):
@staticmethod
def analyze_exchange_volume(currency):
# price of coins with low exchange volume is usually not stable, reduce rating
exchange_ratio = currency['exchange_ratio'] = currency['exchange_volume'] / currency['coins_per_day']
exchange_volume_change = (exp(-1000.0 / exchange_ratio) - 0.6) / 2
return exchange_volume_change
@staticmethod
def analyze_profit_growth(currency):
# price of coins with profit grow ratio > 1.5 is not stable, reduce rating
profit_growth = currency['profit_growth']
profit_growth_change = 0
if profit_growth > 1:
profit_growth_change = -exp(-1.5 / (profit_growth - 1))
return profit_growth_change
@staticmethod
def analyze_difficulty(currency):
# coins with high difficulty don't let me switch PPLNS pools frequently, slightly reduce rating
difficulty = currency['difficulty']
difficulty_change = -exp(-200.0 / difficulty) / 5
return difficulty_change
@classmethod
def rate_currency(cls, currency):
rating = currency['usd_per_day']
logger.debug('%s original rating is %f', currency['name'], rating)
for method in ('analyze_exchange_volume', 'analyze_profit_growth', 'analyze_difficulty'):
rating_change = getattr(cls, method)(currency)
rating *= (rating_change + 1)
logger.debug('%s rating changed by %s by %.2f%% to %f', currency['name'], method, rating_change * 100, rating)
return rating
| mit | Python |
28ccd5b5dec0ac6e8724af3c61d167771b4d9c77 | bump version 0.1.8 | sripathikrishnan/redis-rdb-tools | rdbtools/__init__.py | rdbtools/__init__.py | from rdbtools.parser import RdbCallback, RdbParser, DebugCallback
from rdbtools.callbacks import JSONCallback, DiffCallback, ProtocolCallback
from rdbtools.memprofiler import MemoryCallback, PrintAllKeys, StatsAggregator
__version__ = '0.1.8'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'RdbParser', 'RdbCallback', 'JSONCallback', 'DiffCallback', 'MemoryCallback', 'ProtocolCallback', 'PrintAllKeys']
| from rdbtools.parser import RdbCallback, RdbParser, DebugCallback
from rdbtools.callbacks import JSONCallback, DiffCallback, ProtocolCallback
from rdbtools.memprofiler import MemoryCallback, PrintAllKeys, StatsAggregator
__version__ = '0.1.7'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = [
'RdbParser', 'RdbCallback', 'JSONCallback', 'DiffCallback', 'MemoryCallback', 'ProtocolCallback', 'PrintAllKeys']
| mit | Python |
f5e2a65abbfb956ee6837763dc289d0f5bb68453 | Update prices.py | architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS | cea/optimization/prices.py | cea/optimization/prices.py | # -*- coding: utf-8 -*-
"""
This file imports the price details from the cost database as a class. This helps in preventing multiple importing
of the corresponding values in individual files.
"""
from __future__ import division
import numpy as np
__author__ = "Jimeno A. Fonseca"
__copyright__ = "Copyright 2019, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno A. Fonseca"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "thomas@arch.ethz.ch"
__status__ = "Production"
class Prices(object):
def __init__(self, supply_systems):
pricing = supply_systems.FEEDSTOCKS
self.NG_PRICE = np.tile(
pricing['NATURALGAS']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.BG_PRICE = np.tile(
pricing['BIOGAS']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.WB_PRICE = np.tile(
pricing['WETBIOMASS']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.DB_PRICE = np.tile(
pricing['DRYBIOMASS']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.SOLAR_PRICE = np.tile(
pricing['SOLAR']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.SOLAR_PRICE_EXPORT = np.tile(
pricing['SOLAR']['Opex_var_sell_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.ELEC_PRICE = np.tile(
pricing['GRID']['Opex_var_buy_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
self.ELEC_PRICE_EXPORT = np.tile(
pricing['GRID']['Opex_var_sell_USD2015perkWh'].values / 1000, 365) # in USD/Wh for every hour of a year
| # -*- coding: utf-8 -*-
"""
This file imports the price details from the cost database as a class. This helps in preventing multiple importing
of the corresponding values in individual files.
"""
from __future__ import division
__author__ = "Jimeno A. Fonseca"
__copyright__ = "Copyright 2019, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Jimeno A. Fonseca"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "thomas@arch.ethz.ch"
__status__ = "Production"
class Prices(object):
def __init__(self, supply_systems):
pricing = supply_systems.FEEDSTOCKS
self.NG_PRICE = list(
pricing['NATURALGAS']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.BG_PRICE = list(
pricing['BIOGAS']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.WB_PRICE = list(
pricing['WETBIOMASS']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.DB_PRICE = list(
pricing['DRYBIOMASS']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.SOLAR_PRICE = list(
pricing['SOLAR']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.SOLAR_PRICE_EXPORT = list(
pricing['SOLAR']['Opex_var_sell_USD2015perkWh']) * 365 # in USD/Wh for every hour of a year
self.ELEC_PRICE = list(
pricing['GRID']['Opex_var_buy_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
self.ELEC_PRICE_EXPORT = list(
pricing['GRID']['Opex_var_sell_USD2015perkWh'] / 1000) * 365 # in USD/Wh for every hour of a year
| mit | Python |
81f4f4b1318ff800e3febbc1bd7bbd9ff8e868b1 | Add some exception handling for dict | muddyfish/PYKE,muddyfish/PYKE | node/dictionary.py | node/dictionary.py | #!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
if word not in words:
rtn += "Word %s not in wordlist" % word
else:
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
| #!/usr/bin/env python
from nodes import Node
import json
class Dictionary(Node):
char = ".d"
args = 0
results = 1
def __init__(self, word_ids:Node.IntList):
if not hasattr(Dictionary, "word_list"):
Dictionary.word_list = init_words()
self.words = " ".join(Dictionary.word_list[i] for i in word_ids)
def func(self):
return self.words
def compress(inp):
words = init_words()
inp_words = [word.lower()for word in inp.split(" ")]
rtn = chr(len(inp_words))
for word in inp_words:
assert(word in words)
rtn += chr(words.index(word))
return rtn
def init_words(dict_file = "dictionary.json"):
words_f = open(dict_file)
words = json.load(words_f)
words_f.close()
return words
| mit | Python |
bb3605bd99892bed37ecb2b6371d2bc88d599e1a | Include "OpenStack" string in the user agent | alvarolopez/caso,IFCA/caso,IFCA/caso | caso/__init__.py | caso/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s (OpenStack)" % __version__
| # -*- coding: utf-8 -*-
# Copyright 2014 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
__version__ = pbr.version.VersionInfo(
'caso').version_string()
user_agent = "caso/%s" % __version__
| apache-2.0 | Python |
9bc822e4e602682ae4ee8cf782436898e54c1761 | fix typo | cloudfleet/marina-registry-web | marina_web.py | marina_web.py | from flask import Flask, jsonify, request
import settings, traceback, getopt, sys, os
import repositories
import github
app = Flask(__name__)
api_base = "/api/v1"
@app.route(api_base + '/repos/', methods=['GET'])
def list_repositories():
repository_list = repositories.load_repository_list()
return jsonify({"repositories": repository_list})
@app.route(api_base + '/repos/<namespace>/<repository>', methods=['GET'])
def get_repository(namespace, repository):
repository_info = repositories.load_repository_info('%s/%s' % (namespace, repository))
return jsonify(repository_info)
@app.route(api_base + '/repos/<namespace>/<repository>/builds/<build_id>/logs', methods=['GET'])
def get_build_logs(namespace, repository, build_id):
return jsonify(repositories.load_build_logs('%s/%s' % (namespace, repository), build_id))
@app.route(api_base + '/github/pushes/<organization>/', methods=['POST'])
def receive_github_webhook(organization):
print request.get_json()
github.handle_push(request.get_json())
return jsonify({"success": True})
if __name__ == '__main__':
print os.environ
app.run(host='0.0.0.0', debug=True)
| from flask import Flask, jsonify, request
import settings, traceback, getopt, sys, os
import repositories
import github
app = Flask(__name__)
api_base = "/api/v1"
@app.route(api_base + '/repos/', methods=['GET'])
def list_repositories():
repository_list = repositories.load_repository_list()
return jsonify({"repositories": repository_list})
@app.route(api_base + '/repos/<namespace>/<repository>', methods=['GET'])
def get_repository(namespace, repository):
repository_info = repositories.load_repository_info('%s/%s' % (namespace, repository))
return jsonify(repository_info)
@app.route(api_base + '/repos/<namespace>/<repository>/builds/<build_id>/logs', methods=['GET'])
def get_build_logs(namespace, repository, build_id):
return jsonify(repositories.load_build_logs('%s/%s' % (namespace, repository), build_id))
@app.route(api_base + '/github/pushes/<organization>/', methods=['POST'])
def receive_github_webhook(organization):
print request.get_json()
github.handle_push(request.get_json()
return jsonify({"success": True})
if __name__ == '__main__':
print os.environ
app.run(host='0.0.0.0', debug=True)
| agpl-3.0 | Python |
20e4ef8b4f717a4dae43c6eff16a88ec48ec0066 | Remove unused code | lamyj/redmill,lamyj/redmill,lamyj/redmill | src/redmill/models/item.py | src/redmill/models/item.py | # This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sqlalchemy
import sqlalchemy.orm
from . import Base
class Item(Base):
sub_types = []
__tablename__ = "item"
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.Unicode, nullable=False)
type = sqlalchemy.Column(sqlalchemy.String)
parent_id = sqlalchemy.Column(
sqlalchemy.Integer, sqlalchemy.ForeignKey("item.id"))
Status = ("published", "archived")
status = sqlalchemy.Column(sqlalchemy.Enum(*Status), default="published")
created_at = sqlalchemy.Column(
sqlalchemy.DateTime, default=lambda: datetime.datetime.now())
modified_at = sqlalchemy.Column(
sqlalchemy.DateTime, nullable=True)
children = sqlalchemy.orm.relationship("Item", lazy="immediate")
__mapper_args__ = { "polymorphic_identity": "item", "polymorphic_on": type }
def __eq__(self, other):
return self.id == other.id
def _get_parent(self):
if self.parent_id is None:
parent = None
else:
parent = sqlalchemy.orm.object_session(self).query(
sqlalchemy.orm.with_polymorphic(Item, Item.sub_types))\
.filter_by(id=self.parent_id).one()
return parent
def _get_parents(self):
current = self
parents = []
while current.parent_id is not None:
parents.insert(0, current.parent)
current = current.parent
return parents
parent = property(_get_parent)
parents = property(_get_parents)
| # This file is part of Redmill.
#
# Redmill is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Redmill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Redmill. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sqlalchemy
import sqlalchemy.orm
from . import Base
class Status(sqlalchemy.types.TypeDecorator):
impl = sqlalchemy.types.String
def process_bind_param(self, value, dialect):
return json.dumps(value)
def process_result_value(self, value, dialect):
return json.loads(value)
class Item(Base):
sub_types = []
__tablename__ = "item"
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column(sqlalchemy.Unicode, nullable=False)
type = sqlalchemy.Column(sqlalchemy.String)
parent_id = sqlalchemy.Column(
sqlalchemy.Integer, sqlalchemy.ForeignKey("item.id"))
Status = ("published", "archived")
status = sqlalchemy.Column(sqlalchemy.Enum(*Status), default="published")
created_at = sqlalchemy.Column(
sqlalchemy.DateTime, default=lambda: datetime.datetime.now())
modified_at = sqlalchemy.Column(
sqlalchemy.DateTime, nullable=True)
children = sqlalchemy.orm.relationship("Item", lazy="immediate")
__mapper_args__ = { "polymorphic_identity": "item", "polymorphic_on": type }
def __eq__(self, other):
return self.id == other.id
def _get_parent(self):
if self.parent_id is None:
parent = None
else:
parent = sqlalchemy.orm.object_session(self).query(
sqlalchemy.orm.with_polymorphic(Item, Item.sub_types))\
.filter_by(id=self.parent_id).one()
return parent
def _get_parents(self):
current = self
parents = []
while current.parent_id is not None:
parents.insert(0, current.parent)
current = current.parent
return parents
parent = property(_get_parent)
parents = property(_get_parents)
| agpl-3.0 | Python |
fa48bf16d975b5149871901a567dd6ec5c1bc56f | Fix inspector.get_config | csomh/atomic-reactor-inspect-plugins | inspectors.py | inspectors.py | """
Copyright (c) 2017 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
from __future__ import print_function
def get_config(self, module):
self.log.info('get_config on {0}'.format(module.__name__))
self.log.info(module.get_config(self.workflow).cluster_configs)
self.log.info(module.get_config(self.workflow).conf)
| """
Copyright (c) 2017 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
from __future__ import print_function
def get_config(self, module):
self.log.info('get_config on {0}'.format(module.__name__))
self.log.info(module.get_config(self.workspace).cluster_configs)
self.log.info(module.get_config(self.workspace).conf)
| bsd-3-clause | Python |
45968d8075ac419f0651a41981e75f58d70411cf | add random_subset() | rlowrance/mlpack,rlowrance/mlpack | numpy_utilities.py | numpy_utilities.py | '''utility functions that take numpy array's as arguments'''
import numpy as np
import unittest
def random_subset(matrix, n):
'''pick a random subset without replacement of size n form the matrix'''
selected = np.random.choice(matrix.shape[0], size=n, replace=False)
return matrix[selected]
class TestRandomSubset(unittest.TestCase):
def test(self):
# ref: stackoverflow at numpty-get-random-set-of-rows-from-2d-array
a = np.random.randint(5, size=(100, 3))
s = random_subset(a, 10)
self.assertEqual(s.shape[0], 10)
self.assertEqual(s.shape[1], 3)
def almost_equal(a, b, tolerance, verbose=False):
return almostEqual(a, b, tolerance, verbose)
def almostEqual(a, b, tolerance, verbose=False):
'''Check if |a - b| < tolerance.'''
diff = np.linalg.norm(a - b, 2)
ok = diff < tolerance
if verbose and not ok:
print 'a', a
print 'b', b
print 'diff', diff
print 'tolerance', tolerance
return ok
class TestAlmostEqual(unittest.TestCase):
def test(self):
a = np.array([1, 1])
b = np.array([2, 2])
self.assertTrue(almostEqual(a, a, 1e-5))
self.assertTrue(almostEqual(a, b, 1.5))
self.assertFalse(almostEqual(a, b, .99))
def assertAlmostEqual(self, a, b):
'''
Assert that two numpy arrays have same shape and elements.
Args
self: unittest object
a : numpy array
b : numpy array
'''
self.assertEqual(a.ndim, b.ndim)
ndim = a.ndim
for d in range(ndim):
self.assertEqual(a.shape[d], b.shape[d])
a_flattened = a.flatten()
b_flattened = b.flatten()
for index in xrange(a_flattened.size):
self.assertAlmostEqual(a_flattened[index], b_flattened[index])
class TestAssertAlmostEqual(unittest.TestCase):
def setUp(self):
self.a1d = np.array([1, 2, 3])
self.b1d = np.array([4, 5, 6])
self.a2d = np.array([[1, 2, 3],
[4, 5, 6]])
self.b2d = np.array([[1, 2, 3],
[4, 5, 99]])
def test_assertAlmostEqual_equal(self):
assertAlmostEqual(self, self.a1d, self.a1d)
assertAlmostEqual(self, self.a2d, self.a2d)
if __name__ == '__main__':
unittest.main()
| import numpy as np
import unittest
def almost_equal(a, b, tolerance, verbose=False):
return almostEqual(a, b, tolerance, verbose)
def almostEqual(a, b, tolerance, verbose=False):
'''Check if |a - b| < tolerance.'''
diff = np.linalg.norm(a - b, 2)
ok = diff < tolerance
if verbose and not ok:
print 'a', a
print 'b', b
print 'diff', diff
print 'tolerance', tolerance
return ok
class TestAlmostEqual(unittest.TestCase):
def test(self):
a = np.array([1, 1])
b = np.array([2, 2])
self.assertTrue(almostEqual(a, a, 1e-5))
self.assertTrue(almostEqual(a, b, 1.5))
self.assertFalse(almostEqual(a, b, .99))
def assertAlmostEqual(self, a, b):
'''
Assert that two numpy arrays have same shape and elements.
Args
self: unittest object
a : numpy array
b : numpy array
'''
self.assertEqual(a.ndim, b.ndim)
ndim = a.ndim
for d in range(ndim):
self.assertEqual(a.shape[d], b.shape[d])
a_flattened = a.flatten()
b_flattened = b.flatten()
for index in xrange(a_flattened.size):
self.assertAlmostEqual(a_flattened[index], b_flattened[index])
class TestAssertAlmostEqual(unittest.TestCase):
def setUp(self):
self.a1d = np.array([1, 2, 3])
self.b1d = np.array([4, 5, 6])
self.a2d = np.array([[1, 2, 3],
[4, 5, 6]])
self.b2d = np.array([[1, 2, 3],
[4, 5, 99]])
def test_assertAlmostEqual_equal(self):
assertAlmostEqual(self, self.a1d, self.a1d)
assertAlmostEqual(self, self.a2d, self.a2d)
if __name__ == '__main__':
unittest.main()
| mit | Python |
b988a19583d7153f71a15d34f83f63ba7004fe68 | Improve my_menu.py. It's not looking terrible. | codypiersall/platformer | lib/my_menu.py | lib/my_menu.py | import pygame
pygame.init()
if not pygame.display.get_init():
pygame.display.init()
if not pygame.font.get_init():
pygame.font.init()
class Menu(object):
FONT = pygame.font.Font('../coders_crux.ttf', 32)
SPACE = 10
UP = pygame.K_UP
DOWN = pygame.K_DOWN
def __init__(self, screen, items, font=FONT):
self.screen = screen
self.items = items
self.selected = 0
self.surfaces = []
self.font = font
self.initial_repeat = pygame.key.get_repeat()
print(self.initial_repeat)
pygame.key.set_repeat(200, 70)
self.draw()
self.mainloop()
def draw(self):
self.surfaces.extend([self.font.render(str(i), 1, (255, 255, 255)) for i in self.items])
self.screen.fill((0, 0, 0))
self.screen.blit(self.surfaces[0], (25, 25))
self.screen.blit(self.surfaces[1], (25, 75))
def change_select(self, direction):
if direction == self.UP:
if self.selected == 0:
self.selected = len(self.items) - 1
else:
self.selected -= 1
elif direction == self.DOWN:
if self.selected == len(self.items) - 1:
self.selected = 0
else:
self.selected += 1
print(self.selected)
def seeya(self):
"""Clean up code when the menu is destroyed."""
if self.initial_repeat == (0, 0):
pygame.key.set_repeat()
else:
pygame.key.set_repeat(self.initial_repeat)
def mainloop(self):
pygame.display.update()
clock = pygame.time.Clock()
while True:
clock.tick(30)
for e in pygame.event.get():
if e.type == pygame.QUIT:
self.seeya()
return
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_ESCAPE:
self.seeya()
return
elif e.key == self.UP or e.key == self.DOWN:
self.change_select(e.key)
self.draw()
if __name__ == '__main__':
screen = pygame.display.set_mode((640, 480))
menu = Menu(screen, 'this that theother'.split())
| import pygame
pygame.init()
if not pygame.display.get_init():
pygame.display.init()
if not pygame.font.get_init():
pygame.font.init()
class Menu(object):
FONT = pygame.font.Font('../coders_crux.ttf', 32)
def __init__(self, screen, items, font=FONT):
self.items = items
self.selected = 0
self.surfaces = []
self.font = font
self.draw(screen)
self.mainloop()
def draw(self, screen):
self.surfaces.append(self.font.render(self.items[0], 1, (255, 255, 255)))
screen.fill((0, 0, 0))
screen.blit(self.surfaces[0], (25, 25))
def mainloop(self):
pygame.display.update()
clock = pygame.time.Clock()
while True:
clock.tick(30)
for e in pygame.event.get():
if e.type == pygame.QUIT:
return
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_ESCAPE:
return
if __name__ == '__main__':
screen = pygame.display.set_mode((640, 480))
menu = Menu(screen, 'this that theother'.split())
| bsd-3-clause | Python |
aba6fadf2ac4064a3ec7fdac05ea1b00a9591627 | add defensive unicode | cds-amal/addressparser,cds-amal/addressparser,cds-amal/addressparser,cds-amal/addressparser | nyctext/adparse.py | nyctext/adparse.py | """
Usage:
adparse --file=<infile> [--trace]
adparse [--trace] [--geo] <text>
Options:
-h --help Show this screen.
--version Show version.
--file=<infile> Input file.
--trace Print trace statement
--geo Return Geolocation attributes [default: False]
"""
from docopt import docopt
from os import environ
import codecs
from nyc_geoclient import Geoclient
from nycaddress import parse, parse_with_geo
def do_adhoc(text, g, trace=False, geo=False):
ads = []
if args['--geo']:
ads = parse_with_geo(text, g, trace)
else:
ads = parse(text, trace)
for ad in ads:
print ad
def do_file(fn, g, trace=False, geo=False):
total, parsed, failed = 0, 0, 0
ads = []
for line in codecs.open(fn, encoding='utf-8'):
line = line.encode('ascii', 'ignore').strip()
if line == '':
continue
total += 1
if geo:
ads = parse_with_geo(line, g, trace)
else:
ads = parse(line, trace)
if ads:
parsed += 1
print 'ok: [%s]' % line
else:
failed += 1
print 'Summary:\n\t%04d parsed\n\t%04d failed\n\t%04d Total' % (parsed, failed, total)
if __name__ == '__main__':
# https://urllib3.readthedocs.org/en/latest/security.html#pyopenssl
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
appid = environ['DOITT_CROL_APP_ID']
appkey = environ['DOITT_CROL_APP_KEY']
g = Geoclient(appid, appkey)
# import ipdb; ipdb.set_trace()
args = docopt(__doc__, version='0.1.1rc')
ads = []
if args['--file']:
ads = do_file(args['--file'], g,
trace=args['--trace'], geo=args['--geo'])
else:
do_adhoc(args['<text>'], g,
trace=args['--trace'], geo=args['--geo'])
| """
Usage:
adparse --file=<infile> [--trace]
adparse [--trace] [--geo] <text>
Options:
-h --help Show this screen.
--version Show version.
--file=<infile> Input file.
--trace Print trace statement
--geo Return Geolocation attributes [default: False]
"""
from docopt import docopt
from os import environ
import codecs
from nyc_geoclient import Geoclient
from nycaddress import parse, parse_with_geo
def do_adhoc(text, g, trace=False, geo=False):
ads = []
if args['--geo']:
ads = parse_with_geo(text, g, trace)
else:
ads = parse(text, trace)
for ad in ads:
print ad
def do_file(fn, g, trace=False, geo=False):
total, parsed, failed = 0, 0, 0
ads = []
for line in codecs.open(fn, encoding='utf-8'):
line = line.strip()
if line == '':
continue
total += 1
if geo:
ads = parse_with_geo(line, g, trace)
else:
ads = parse(line, trace)
if ads:
parsed += 1
print 'ok: [%s]' % line
else:
failed += 1
print 'Summary:\n\t%04d parsed\n\t%04d failed\n\t%04d Total' % (parsed, failed, total)
if __name__ == '__main__':
# https://urllib3.readthedocs.org/en/latest/security.html#pyopenssl
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
appid = environ['DOITT_CROL_APP_ID']
appkey = environ['DOITT_CROL_APP_KEY']
g = Geoclient(appid, appkey)
# import ipdb; ipdb.set_trace()
args = docopt(__doc__, version='0.1.1rc')
ads = []
if args['--file']:
ads = do_file(args['--file'], g,
trace=args['--trace'], geo=args['--geo'])
else:
do_adhoc(args['<text>'], g,
trace=args['--trace'], geo=args['--geo'])
| mit | Python |
707bbb0667edfc9df15863286a02f64adfb5544d | leverage os.path.relpath if available | Martix/Eonos,libo/openembedded,thebohemian/openembedded,JamesAng/goe,hulifox008/openembedded,sutajiokousagi/openembedded,JamesAng/goe,John-NY/overo-oe,xifengchuo/openembedded,scottellis/overo-oe,sampov2/audio-openembedded,buglabs/oe-buglabs,yyli/overo-oe,scottellis/overo-oe,SIFTeam/openembedded,thebohemian/openembedded,nx111/openembeded_openpli2.1_nx111,mrchapp/arago-oe-dev,SIFTeam/openembedded,anguslees/openembedded-android,xifengchuo/openembedded,dellysunnymtech/sakoman-oe,libo/openembedded,rascalmicro/openembedded-rascal,dellysunnymtech/sakoman-oe,buglabs/oe-buglabs,mrchapp/arago-oe-dev,openembedded/openembedded,anguslees/openembedded-android,rascalmicro/openembedded-rascal,SIFTeam/openembedded,mrchapp/arago-oe-dev,sentient-energy/emsw-oe-mirror,openembedded/openembedded,buglabs/oe-buglabs,sledz/oe,giobauermeister/openembedded,JamesAng/oe,rascalmicro/openembedded-rascal,sentient-energy/emsw-oe-mirror,hulifox008/openembedded,openembedded/openembedded,sentient-energy/emsw-oe-mirror,sentient-energy/emsw-oe-mirror,trini/openembedded,sledz/oe,bticino/openembedded,anguslees/openembedded-android,giobauermeister/openembedded,BlackPole/bp-openembedded,giobauermeister/openembedded,John-NY/overo-oe,xifengchuo/openembedded,Martix/Eonos,BlackPole/bp-openembedded,crystalfontz/openembedded,dave-billin/overo-ui-moos-auv,crystalfontz/openembedded,John-NY/overo-oe,rascalmicro/openembedded-rascal,openpli-arm/openembedded,yyli/overo-oe,yyli/overo-oe,rascalmicro/openembedded-rascal,bticino/openembedded,dellysunnymtech/sakoman-oe,trini/openembedded,yyli/overo-oe,yyli/overo-oe,buglabs/oe-buglabs,buglabs/oe-buglabs,JamesAng/oe,bticino/openembedded,SIFTeam/openembedded,openembedded/openembedded,openembedded/openembedded,anguslees/openembedded-android,sledz/oe,giobauermeister/openembedded,trini/openembedded,sampov2/audio-openembedded,sentient-energy/emsw-oe-mirror,scottellis/overo-oe,dave-billin/overo-ui-moos-auv,thebohemian/openembedded,bticino/openembedded,BlackPole/bp-openembedded,JamesAng/goe,buglabs/oe-buglabs,John-NY/overo-oe,yyli/overo-oe,nx111/openembeded_openpli2.1_nx111,bticino/openembedded,sutajiokousagi/openembedded,SIFTeam/openembedded,JamesAng/goe,dave-billin/overo-ui-moos-auv,crystalfontz/openembedded,mrchapp/arago-oe-dev,crystalfontz/openembedded,hulifox008/openembedded,scottellis/overo-oe,trini/openembedded,John-NY/overo-oe,crystalfontz/openembedded,trini/openembedded,JamesAng/oe,openembedded/openembedded,openembedded/openembedded,JamesAng/goe,xifengchuo/openembedded,nx111/openembeded_openpli2.1_nx111,sledz/oe,openembedded/openembedded,openpli-arm/openembedded,xifengchuo/openembedded,thebohemian/openembedded,rascalmicro/openembedded-rascal,yyli/overo-oe,Martix/Eonos,SIFTeam/openembedded,trini/openembedded,buglabs/oe-buglabs,giobauermeister/openembedded,hulifox008/openembedded,anguslees/openembedded-android,JamesAng/oe,Martix/Eonos,dellysunnymtech/sakoman-oe,anguslees/openembedded-android,rascalmicro/openembedded-rascal,openpli-arm/openembedded,sledz/oe,sledz/oe,sentient-energy/emsw-oe-mirror,crystalfontz/openembedded,BlackPole/bp-openembedded,dave-billin/overo-ui-moos-auv,JamesAng/goe,openpli-arm/openembedded,John-NY/overo-oe,BlackPole/bp-openembedded,BlackPole/bp-openembedded,giobauermeister/openembedded,hulifox008/openembedded,nx111/openembeded_openpli2.1_nx111,dave-billin/overo-ui-moos-auv,dave-billin/overo-ui-moos-auv,xifengchuo/openembedded,mrchapp/arago-oe-dev,dellysunnymtech/sakoman-oe,mrchapp/arago-oe-dev,sutajiokousagi/openembedded,sampov2/audio-openembedded,buglabs/oe-buglabs,scottellis/overo-oe,hulifox008/openembedded,openembedded/openembedded,openpli-arm/openembedded,thebohemian/openembedded,sledz/oe,dellysunnymtech/sakoman-oe,openpli-arm/openembedded,sutajiokousagi/openembedded,JamesAng/oe,SIFTeam/openembedded,Martix/Eonos,openpli-arm/openembedded,libo/openembedded,nx111/openembeded_openpli2.1_nx111,openembedded/openembedded,mrchapp/arago-oe-dev,trini/openembedded,hulifox008/openembedded,xifengchuo/openembedded,xifengchuo/openembedded,crystalfontz/openembedded,Martix/Eonos,dellysunnymtech/sakoman-oe,JamesAng/oe,libo/openembedded,John-NY/overo-oe,bticino/openembedded,sampov2/audio-openembedded,sampov2/audio-openembedded,nx111/openembeded_openpli2.1_nx111,sentient-energy/emsw-oe-mirror,Martix/Eonos,scottellis/overo-oe,scottellis/overo-oe,thebohemian/openembedded,BlackPole/bp-openembedded,JamesAng/goe,giobauermeister/openembedded,openembedded/openembedded,anguslees/openembedded-android,yyli/overo-oe,sutajiokousagi/openembedded,sampov2/audio-openembedded,dellysunnymtech/sakoman-oe,dellysunnymtech/sakoman-oe,libo/openembedded,xifengchuo/openembedded,sutajiokousagi/openembedded,libo/openembedded,sutajiokousagi/openembedded,dave-billin/overo-ui-moos-auv,thebohemian/openembedded,nx111/openembeded_openpli2.1_nx111,JamesAng/oe,bticino/openembedded,giobauermeister/openembedded,giobauermeister/openembedded,rascalmicro/openembedded-rascal,libo/openembedded,nx111/openembeded_openpli2.1_nx111,sampov2/audio-openembedded | lib/oe/path.py | lib/oe/path.py | def join(*paths):
"""Like os.path.join but doesn't treat absolute RHS specially"""
import os.path
return os.path.normpath("/".join(paths))
def relative(src, dest):
""" Return a relative path from src to dest.
>>> relative("/usr/bin", "/tmp/foo/bar")
../../tmp/foo/bar
>>> relative("/usr/bin", "/usr/lib")
../lib
>>> relative("/tmp", "/tmp/foo/bar")
foo/bar
"""
import os.path
if hasattr(os.path, "relpath"):
return os.path.relpath(dest, src)
else:
destlist = os.path.normpath(dest).split(os.path.sep)
srclist = os.path.normpath(src).split(os.path.sep)
# Find common section of the path
common = os.path.commonprefix([destlist, srclist])
commonlen = len(common)
# Climb back to the point where they differentiate
relpath = [ pardir ] * (len(srclist) - commonlen)
if commonlen < len(destlist):
# Add remaining portion
relpath += destlist[commonlen:]
return sep.join(relpath)
def format_display(path, metadata):
""" Prepare a path for display to the user. """
rel = relative(metadata.getVar("TOPDIR", 1), path)
if len(rel) > len(path):
return path
else:
return rel
| def join(*paths):
"""Like os.path.join but doesn't treat absolute RHS specially"""
import os.path
return os.path.normpath("/".join(paths))
def relative(src, dest):
""" Return a relative path from src to dest.
>>> relative("/usr/bin", "/tmp/foo/bar")
../../tmp/foo/bar
>>> relative("/usr/bin", "/usr/lib")
../lib
>>> relative("/tmp", "/tmp/foo/bar")
foo/bar
"""
from os.path import sep, pardir, normpath, commonprefix
destlist = normpath(dest).split(sep)
srclist = normpath(src).split(sep)
# Find common section of the path
common = commonprefix([destlist, srclist])
commonlen = len(common)
# Climb back to the point where they differentiate
relpath = [ pardir ] * (len(srclist) - commonlen)
if commonlen < len(destlist):
# Add remaining portion
relpath += destlist[commonlen:]
return sep.join(relpath)
def format_display(path, metadata):
""" Prepare a path for display to the user. """
rel = relative(metadata.getVar("TOPDIR", 1), path)
if len(rel) > len(path):
return path
else:
return rel
| mit | Python |
ffa31e55c8444b5203a0308910cf315765add708 | add working methods, except remove | palindromed/data-structures | linked_list.py | linked_list.py | class LinkedList(object):
def __init__(self, iterable=None):
self.head_node = Node(None, None)
if iterable is not None:
for n in range(0, len(iterable)):
self.insert(iterable[n])
def insert(self, val):
"""Insert a new value at the head of the list."""
self.head_node = Node(val, self.head_node)
def pop(self):
"""Remove the first value of list and return it."""
try:
return_value = self.head_node.value
self.head_node = self.head_node.next
return return_value
except AttributeError:
return "The list is empty."
def size(self):
"""Return the length of the list."""
value = self.head_node
counter = -1
while value is not None:
value = value.next
counter += 1
return counter
def search(self, val):
"""Return the node containing val."""
value = self.head_node.value
node = self.head_node
try:
while val != value:
node = value.next
value = value.next.value
return node
except AttributeError:
return "That value is not in the list."
def remove(self, node):
"""Remove given node from list if it exists."""
this = self.head_node
if node != this:
self.head_node = this.next
try:
while node != this:
prev_node = this
this = this.next
except AttributeError:
return "That node does not exist in the list."
else:
next_node = this.next
prev_node.update_next(next_node)
def display(self):
"""Print list as a Python tuple literal."""
print("()")
pass
class Node(object):
def __init__(self, value, next):
self.value = value
self.next = next
def update_next(self, next):
self.next = next
| class LinkedList(object):
def __init__(self, iterable=None):
if iterable is None:
self.head_node = Node(None, None)
else:
pass
def insert(self, val):
"""Insert a new value at the head of the list."""
self.head_node = Node(val, self.head_node)
def pop(self):
"""Remove the first value of list and return it."""
pass
def size(self):
"""Return the length of the list."""
pass
def search(self, val):
"""Return the node containing val."""
pass
def remove(self, node):
"""Remove given node from list if it exists."""
pass
def display(self):
"""Print list as a Python tuple literal."""
pass
class Node(object):
def __init__(self, value, next):
self.value = value
self.next = next
| mit | Python |
1acf3ac3a90764899a616e44baf0fa5b7dac44c2 | Fix batch test | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/cli/batch.py | tests/integration/cli/batch.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <nicole@saltstack.com>`
'''
# Import Salt Libs
import integration
# Import Salt Testing Libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
class BatchTest(integration.ShellCase):
'''
Integration tests for the salt.cli.batch module
'''
def test_batch_run(self):
'''
Tests executing a simple batch command to help catch regressions
'''
ret = ['sub_minion Detected for this batch run',
'minion Detected for this batch run',
'',
"Executing run on ['sub_minion']",
'',
'sub_minion:',
' batch testing',
'',
"Executing run on ['minion']",
'',
'minion:',
' batch testing']
ret = sorted(ret)
cmd = sorted(self.run_salt('\'*\' test.echo \'batch testing\' -b 50%'))
self.assertListEqual(cmd, ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(BatchTest)
| # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <nicole@saltstack.com>`
'''
# Import Salt Libs
import integration
# Import Salt Testing Libs
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
class BatchTest(integration.ShellCase):
'''
Integration tests for the salt.cli.batch module
'''
def test_batch_run(self):
'''
Tests executing a simple batch command to help catch regressions
'''
ret = ['sub_minion Detected for this batch run',
'minion Detected for this batch run',
'',
"Executing run on ['sub_minion']",
'',
'sub_minion:',
' batch testing',
'',
"Executing run on ['minion']",
'',
'minion:',
' batch testing']
self.assertEqual(self.run_salt('\'*\' test.echo \'batch testing\' -b 50%'), ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(BatchTest)
| apache-2.0 | Python |
ebf5e05acfb7f1edce0c0987576ee712f3fdea54 | Fix tests to use pytest | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | test/scripts/test_sequana_coverage.py | test/scripts/test_sequana_coverage.py | from sequana.scripts import coverage
from sequana import sequana_data
import pytest
prog = "sequana_coverage"
@pytest.fixture
def coveragefix():
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def test_version():
try:
coverage.main([prog, '--version'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_help():
try:
coverage.main([prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(tmpdir):
import os
directory = tmpdir.mkdir("report")
name = directory.__str__()
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([prog, '-i', filename, "-o", "--output-directory", name])
assert os.path.exists(name + os.sep + "coverage_mapping.chrom1.html")
| from sequana.scripts import coverage
from nose.plugins.attrib import attr
from sequana import sequana_data
#@attr("skip")
class TestPipeline(object):
@classmethod
def setup_class(klass):
"""This method is run once for each class before any tests are run"""
klass.prog = "sequana_coverage"
klass.params = {'prog': klass.prog}
@classmethod
def teardown_class(klass):
"""This method is run once for each class _after_ all tests are run"""
import os
# local nosetests execution
try:os.remove('README')
except:pass
try:os.remove('quality.rules')
except:pass
try:os.remove('config.yaml')
except:pass
def _test_version(self):
coverage.main([self.prog, '--version'])
def test_help(self):
try:
coverage.main([self.prog, '--help'])
assert False
except SystemExit:
pass
else:
raise Exception
def test_input(self):
filename = sequana_data('virus.bed', 'data')
reference = sequana_data('tofill.fa', 'data')
coverage.main([self.prog, '-i', filename, "-o"]) # "-r", reference])
| bsd-3-clause | Python |
1984ed2b1bf91137280582558437d8a507d88b14 | Fix in TBTestSuite to call TBTestCase | S41nz/TBTAF,S41nz/TBTAF | tbtaf/executor/TBTestSuite.py | tbtaf/executor/TBTestSuite.py | from TBTAFTrace import *
from TBTestCase import *
class TBTestSuite:
''' getSuiteResult() --- addTestCase(TBTestCase) --- getTestCases():TBTestCase[] --- getSuiteTrace():TBTAFTrace[] '''
def getSuiteResult(self):
return "This is a result"
def addTestCase(self,TBTestCase):
return "Test case added"
def getTestCases(self):
result = [TBTestCase(), TBTestCase()]
return result
def getSuiteTrace(self):
'''Array of TBTAFTrace'''
result = [TBTAFTrace(), TBTAFTrace()]
return result
testCaseStub = TBTestCase()
test = TBTestSuite()
print test.getSuiteResult()
print test.addTestCase(testCaseStub)
print test.getTestCases()
print test.getSuiteTrace() | from TBTAFTrace import *
from TBTestCase import *
class TBTestSuite:
''' getSuiteResult() --- addTestCase(TBTestCase) --- getTestCases():TBTestCase[] --- getSuiteTrace():TBTAFTrace[] '''
def getSuiteResult(self):
return "This is a result"
def addTestCase(self,TBTestCase):
return "Test case added"
def getTestCases(self):
'''This should return a TBTestCase[], but I don't have the class yet'''
return "An array of TBTestCase"
def getSuiteTrace(self):
'''Array of TBTAFTrace'''
result = [TBTAFTrace(), TBTAFTrace()]
return result
testCaseStub = TBTestCase()
test = TBTestSuite()
print test.getSuiteResult()
print test.addTestCase(testCaseStub)
print test.getTestCases()
print test.getSuiteTrace() | apache-2.0 | Python |
26d1d6de4e75766fe1e543ee3af2d6016c087e5b | Update tests | guillermooo/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle | tests/test_dartlint.py | tests/test_dartlint.py | import sublime
import unittest
from Dart.lib.path import is_dart_script
from Dart.lib.path import is_view_dart_script
from Dart.lib.path import extension_equals
from Dart.lib.path import view_extension_equals
class Test_is_dart_script(unittest.TestCase):
def testSucceedsIfDartScript(self):
self.assertTrue(is_dart_script("/some/path/foo.dart"))
def testFailsIfNotDartScript(self):
self.assertFalse(is_dart_script("/some/path/foo.txt"))
def testFailsWithEmtpyPath(self):
self.assertFalse(is_dart_script(""))
class Test_is_view_dart_script(unittest.TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def testFailsIfFileNotOnDisk(self):
self.assertFalse(is_view_dart_script(self.view))
def tearDown(self):
self.view.close()
class Test_extension_equals(unittest.TestCase):
def testCanDetectSameExtension(self):
self.assertTrue(extension_equals("foo.dart", ".dart"))
def testCanDetectDifferentExtension(self):
self.assertFalse(extension_equals("foo.dart", ".txt"))
class Test_view_extension_equals(unittest.TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def testFailsIfFileNotOnDisk(self):
self.assertFalse(view_extension_equals(self.view, '.dart'))
def tearDown(self):
self.view.close()
| import unittest
import sublime
from Dart.dartlint import is_dart_script
from Dart.dartlint import is_view_dart_script
from Dart.dartlint import extension_equals
from Dart.dartlint import view_extension_equals
class Test_is_dart_script(unittest.TestCase):
def testSucceedsIfDartScript(self):
self.assertTrue(is_dart_script("/some/path/foo.dart"))
def testFailsIfNotDartScript(self):
self.assertFalse(is_dart_script("/some/path/foo.txt"))
def testFailsWithEmtpyPath(self):
self.assertFalse(is_dart_script(""))
class Test_is_view_dart_script(unittest.TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def testFailsIfFileNotOnDisk(self):
self.assertFalse(is_view_dart_script(self.view))
def tearDown(self):
self.view.close()
class Test_extension_equals(unittest.TestCase):
def testCanDetectSameExtension(self):
self.assertTrue(extension_equals("foo.dart", ".dart"))
def testCanDetectDifferentExtension(self):
self.assertFalse(extension_equals("foo.dart", ".txt"))
class Test_view_extension_equals(unittest.TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def testFailsIfFileNotOnDisk(self):
self.assertFalse(view_extension_equals(self.view, '.dart'))
def tearDown(self):
self.view.close()
| bsd-3-clause | Python |
2b5c186337bcb396f630c0b86938e43eb06d3e5b | Add test checking only for imports | dls-controls/i10switching,dls-controls/i10switching | tests/test_i10knobs.py | tests/test_i10knobs.py | from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_import(self):
pass
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
| from pkg_resources import require
require("cothread")
require("mock")
import unittest
import mock
import sys
# Mock out catools as it requires EPICS binaries at import
sys.modules['cothread.catools'] = mock.MagicMock()
import cothread
import sys
import os
from PyQt4 import QtGui
sys.path.append(os.path.join(os.path.dirname(__file__), "..", ".."))
import i10knobs
class I10KnobsTest(unittest.TestCase):
def test_init(self):
cothread.iqt()
window = QtGui.QMainWindow()
_ = i10knobs.KnobsUi(window)
| apache-2.0 | Python |
a55dd124d54955476411ee8ae830c9fd3c4f00dc | Test get_errors() method of LatexBuildError. | mbr/latex | tests/test_pdfbuild.py | tests/test_pdfbuild.py | from latex import build_pdf, LatexBuildError
from latex.errors import parse_log
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
def test_finds_errors_correctly():
broken_latex = r"""
\documentclass{article}
\begin{document}
All good
\undefinedcontrolsequencehere
\end{document}
"""
try:
build_pdf(broken_latex)
except LatexBuildError as e:
assert parse_log(e.log) == e.get_errors()
else:
assert False, 'no exception raised'
| from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
| bsd-3-clause | Python |
8535c59c26e2c5badfd3637d41901f1bc987e200 | Add a test for the __call__ method of the APIRequest class. | openspending/gobble | tests/test_requests.py | tests/test_requests.py | """Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
def test_call():
request = APIRequest('google.com')
assert request().status_code == 200
| """Test the api_requests module."""
from pytest import mark
from gobble.api_requests import APIRequest
SIMPLE = ('foo.bar', dict(), ['https://foo.bar'])
LOCAL = ('0.0.0.0', dict(port=5000, schema='http'), ['http://0.0.0.0:5000'])
LONG = (
'foo.bar',
dict(
path=['spam', 'eggs'],
query={'foo': 'bar', 'spam': 'eggs'}
),
[
'https://foo.bar/spam/eggs?spam=eggs&foo=bar',
'https://foo.bar/spam/eggs?foo=bar&spam=eggs'
]
)
TEST_CASES = [SIMPLE, LONG, LOCAL]
# noinspection PyShadowingNames
@mark.parametrize('host, parameters, urls', TEST_CASES)
def test_url(host, parameters, urls):
assert APIRequest(host, **parameters).url in urls
| mit | Python |
acc64adea162a6920143c0b2416e05095859814b | Disable Shell for subprocess.call. | inetprocess/docker-lamp,inetprocess/docker-lamp,edyan/stakkr,edyan/stakkr,inetprocess/docker-lamp,edyan/stakkr | lib/docker.py | lib/docker.py | from json import loads as json_loads
import subprocess
def get_vms():
cmd = ['python', 'bin/compose', 'ps', '-q']
vms_id = subprocess.check_output(cmd).splitlines()
vms_info = dict()
for vm_id in vms_id:
vm_id = vm_id.decode('utf-8', 'strict')
vms_info[vm_id] = extract_vm_info(vm_id)
return vms_info
def extract_vm_info(vm_id: str):
try:
result = subprocess.check_output(['docker', 'inspect', vm_id], stderr=subprocess.STDOUT)
data = json_loads(result.decode("utf-8", "strict").rstrip('\n'))
vm_info = {
'name': data[0]['Name'].lstrip('/'),
'compose_name': data[0]['Config']['Labels']['com.docker.compose.service'],
'ports': data[0]['Config']['ExposedPorts'].keys() if 'ExposedPorts' in data[0]['Config'] else [],
'image': data[0]['Config']['Image'],
'ip': data[0]['NetworkSettings']['IPAddress'],
'running': data[0]['State']['Running'],
}
return vm_info
except subprocess.CalledProcessError as e:
return None
def container_running(name: str):
cmd = ['docker', 'inspect', '-f', '{{.State.Running}}', name]
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT).splitlines()[0]
return False if result.decode("utf-8", "strict") == 'false' else True
except subprocess.CalledProcessError as e:
return False
| from json import loads as json_loads
import subprocess
def get_vms():
cmd = ['python', 'bin/compose', 'ps', '-q']
vms_id = subprocess.check_output(cmd, shell=True).splitlines()
vms_info = dict()
for vm_id in vms_id:
vm_id = vm_id.decode('utf-8', 'strict')
vms_info[vm_id] = extract_vm_info(vm_id)
return vms_info
def extract_vm_info(vm_id: str):
try:
result = subprocess.check_output(['docker', 'inspect', vm_id], stderr=subprocess.STDOUT)
data = json_loads(result.decode("utf-8", "strict").rstrip('\n'))
vm_info = {
'name': data[0]['Name'].lstrip('/'),
'compose_name': data[0]['Config']['Labels']['com.docker.compose.service'],
'ports': data[0]['Config']['ExposedPorts'].keys() if 'ExposedPorts' in data[0]['Config'] else [],
'image': data[0]['Config']['Image'],
'ip': data[0]['NetworkSettings']['IPAddress'],
'running': data[0]['State']['Running'],
}
return vm_info
except subprocess.CalledProcessError as e:
return None
def container_running(name: str):
cmd = ['docker', 'inspect', '-f', '{{.State.Running}}', name]
try:
result = subprocess.check_output(cmd, stderr=subprocess.STDOUT).splitlines()[0]
return False if result.decode("utf-8", "strict") == 'false' else True
except subprocess.CalledProcessError as e:
return False
| apache-2.0 | Python |
443373703ae321c66076644c4c97abde3e693133 | Add fix for broken test post submission | ollien/Timpani,ollien/Timpani,ollien/Timpani | tests/tests/addpost.py | tests/tests/addpost.py | import sqlalchemy
import selenium
from selenium.webdriver.common import keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
from timpani import database
LOGIN_TITLE = "Login - Timpani"
ADD_POST_TITLE = "Add Post - Timpani"
POST_TITLE = "Test post, please ignore."
POST_BODY = "This is a test post. There is no reason you should be paying attention to it."
POST_TAGS = ["test", "post", "selenium"]
def test(driver, username, password):
databaseConnection = database.DatabaseConnection()
driver.get("http://127.0.0.1:8080/add_post")
#The head will usually load before the body, meaning that the title will be present
WebDriverWait(driver, 10).until(expected_conditions.title_contains("Timpani"))
#Check that we were redirected to the login page, as we are not logged in.
assert driver.title == LOGIN_TITLE, "Title is %s" % driver.title
loginForm = driver.find_element_by_id("login-form")
usernameField = driver.find_element_by_id("username-field")
passwordField = driver.find_element_by_id("password-field")
usernameField.send_keys(username)
passwordField.send_keys(password)
loginForm.submit()
WebDriverWait(driver, 10).until_not(expected_conditions.title_is(LOGIN_TITLE))
#We should have been redirected to the add_post page.
assert driver.title == ADD_POST_TITLE, "Title is %s" % driver.title
postForm = driver.find_element_by_id("post-form")
titleInput = driver.find_element_by_id("title-input")
editorField = driver.find_element_by_css_selector("#editor > .ql-editor")
tagsInput = driver.find_element_by_id("tag-input-div")
titleInput.click()
titleInput.send_keys(POST_TITLE)
editorField.click()
actionChain = selenium.webdriver.ActionChains(driver)
actionChain.send_keys(POST_BODY)
actionChain.perform()
tagsInput.click()
actionChain = selenium.webdriver.ActionChains(driver)
for tag in POST_TAGS:
actionChain.send_keys(tag)
actionChain.send_keys(keys.Keys.SPACE)
actionChain.perform()
postForm.submit()
post = databaseConnection.session.query(database.tables.Post).order_by(sqlalchemy.desc(database.tables.Post.id)).first()
tags = databaseConnection.session.query(database.tables.Tag.name).filter(database.tables.Tag.post_id == post.id).all()
tags = [tag[0] for tag in tags] #Resolve sqlalchemy tuples
assert post != None
assert post.title == POST_TITLE, "Title is %s" % post.title
assert tags == POST_TAGS, "Tags are %s" % tags
| import sqlalchemy
import selenium
from selenium.webdriver.common import keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.by import By
from timpani import database
LOGIN_TITLE = "Login - Timpani"
ADD_POST_TITLE = "Add Post - Timpani"
POST_TITLE = "Test post, please ignore."
POST_BODY = "This is a test post. There is no reason you should be paying attention to it."
POST_TAGS = ["test", "post", "selenium"]
def test(driver, username, password):
databaseConnection = database.DatabaseConnection()
driver.get("http://127.0.0.1:8080/add_post")
#The head will usually load before the body, meaning that the title will be present
WebDriverWait(driver, 10).until(expected_conditions.title_contains("Timpani"))
#Check that we were redirected to the login page, as we are not logged in.
assert driver.title == LOGIN_TITLE, "Title is %s" % driver.title
loginForm = driver.find_element_by_id("login-form")
usernameField = driver.find_element_by_id("username-field")
passwordField = driver.find_element_by_id("password-field")
usernameField.send_keys(username)
passwordField.send_keys(password)
loginForm.submit()
WebDriverWait(driver, 10).until_not(expected_conditions.title_is(LOGIN_TITLE))
#We should have been redirected to the add_post page.
assert driver.title == ADD_POST_TITLE, "Title is %s" % driver.title
postForm = driver.find_element_by_id("post-form")
titleInput = driver.find_element_by_id("title-input")
editorField = driver.find_element_by_id("editor")
tagsInput = driver.find_element_by_id("tag-input-div")
titleInput.click()
titleInput.send_keys(POST_TITLE)
editorField.click()
actionChain = selenium.webdriver.ActionChains(driver)
actionChain.send_keys(POST_BODY)
actionChain.perform()
tagsInput.click()
actionChain = selenium.webdriver.ActionChains(driver)
for tag in POST_TAGS:
actionChain.send_keys(tag)
actionChain.send_keys(keys.Keys.SPACE)
actionChain.perform()
postForm.submit()
post = databaseConnection.session.query(database.tables.Post).order_by(sqlalchemy.desc(database.tables.Post.id)).first()
tags = databaseConnection.session.query(database.tables.Tag.name).filter(database.tables.Tag.post_id == post.id).all()
tags = [tag[0] for tag in tags] #Resolve sqlalchemy tuples
assert post != None
assert post.title == POST_TITLE, "Title is %s" % post.title
assert tags == POST_TAGS, "Tags are %s" % tags
| mit | Python |
b8ac8ad15bf0fa8687527b2a180355211c2955b3 | Update refraction_test_input.py | DQE-Polytech-University/Beamplex | tests/refraction_test_input.py | tests/refraction_test_input.py | import unittest
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
from src.refraction import *
class TestRefractionInput(unittest.TestCase):
def testInputCorrect(self):
try:
self.calc = RefractionCalc(0.85, [0.1, 0.1, 0.1, 0.1, 0.1])
except Exception, error:
self.fail(error)
def testInitLambda(self):
self.assertRaises(TypeError, RefractionCalc, '0.85', [0.1, 0.1, 0.1, 0.1, 0.1])
self.assertRaises(TypeError, RefractionCalc, None, [0.1, 0.1, 0.1, 0.1, 0.1])
self.assertRaises(ValueError, RefractionCalc, 0.5, [0.1, 0.1, 0.1, 0.1, 0.1])
self.assertRaises(ValueError, RefractionCalc, 2, [0.1, 0.1, 0.1, 0.1, 0.1])
def testInitConcentration(self):
self.assertRaises(TypeError, RefractionCalc, 0.85, '[1]')
self.assertRaises(TypeError, RefractionCalc, 0.85, None)
self.assertRaises(TypeError, RefractionCalc, 0.85, [0.1, 0.2, '0.3', 0.1, 0.1])
self.assertRaises(ValueError, RefractionCalc, 0.85, [-0.1, 0.2, 0.3, 0.1, 0.1])
self.assertRaises(ValueError, RefractionCalc, 0.85, [0.1, 1.2, 0.3, 0.1, 0.1])
def testAlGaAs(self):
self.calc = RefractionCalc(0.85, [0.1, 0.1, 0.1, 0.1, 0.1])
with self.assertRaises(TypeError):
self.calc.refraction_AlGaAs('5')
with self.assertRaises(TypeError):
self.calc.refraction_AlGaAs(None)
with self.assertRaises(ValueError):
self.calc.refraction_AlGaAs(-1)
with self.assertRaises(ValueError):
self.calc.refraction_AlGaAs(2)
def testInGaAs(self):
self.calc = RefractionCalc(0.85, [0.1, 0.1, 0.1, 0.1, 0.1])
print(self.calc.wavelength)
with self.assertRaises(TypeError):
self.calc.refraction_InGaAs('5')
with self.assertRaises(TypeError):
self.calc.refraction_InGaAs(None)
with self.assertRaises(ValueError):
self.calc.refraction_InGaAs(-1)
with self.assertRaises(ValueError):
self.calc.refraction_InGaAs(2)
if __name__ == '__main__':
unittest.main()
| mit | Python | |
0bee13a823ec41fcb4a899bcd96cd05e511c8abc | Fix the data resizer test. | berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop | tests/test_ImageDataResizer.py | tests/test_ImageDataResizer.py | import unittest
from core.data.DataResizer import DataResizer
from vtk import vtkImageData
class DataResizerTest(unittest.TestCase):
def setUp(self):
self.imageResizer = DataResizer()
def tearDown(self):
del self.imageResizer
def testDataResizer(self):
self.assertTrue(True)
def testDataResizerDimensions(self):
dimensions = [512, 512, 196]
factor = 0.125
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, factor)
newDimensions = resizedData.GetDimensions()
self.assertEquals(newDimensions[0], 256)
self.assertEquals(newDimensions[1], 256)
self.assertEquals(newDimensions[2], 98)
def testCalculateFactor(self):
dimensions = [512, 512, 196]
maxVoxels = 17500000
factor = self.imageResizer.calculateFactor(dimensions, maxVoxels)
voxels = int(factor * dimensions[0] * dimensions[1] * dimensions[2])
self.assertLessEqual(voxels, maxVoxels)
def testDataResizerMaxVoxels(self):
dimensions = [512, 512, 196]
maxVoxels = 17500000
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, maximum=maxVoxels)
newDimensions = resizedData.GetDimensions()
numberOfVoxels = newDimensions[0] * newDimensions[1] * newDimensions[2]
self.assertLessEqual(numberOfVoxels, maxVoxels)
def testResizerShouldNotEnlarge(self):
dimensions = [12, 13, 14]
maxVoxels = 17500000
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, maximum=maxVoxels)
newDimensions = resizedData.GetDimensions()
self.assertEquals(dimensions[0], newDimensions[0])
self.assertEquals(dimensions[1], newDimensions[1])
self.assertEquals(dimensions[2], newDimensions[2])
# Helper method
def createImageData(dimensions):
imageData = vtkImageData()
imageData.Initialize()
imageData.SetDimensions(dimensions)
imageData.SetNumberOfScalarComponents(1)
imageData.AllocateScalars()
imageData.Update()
return imageData
| import unittest
from core.data.DataResizer import DataResizer
from vtk import vtkImageData
class DataResizerTest(unittest.TestCase):
def setUp(self):
self.imageResizer = DataResizer()
def tearDown(self):
del self.imageResizer
def testDataResizer(self):
self.assertTrue(True)
def testDataResizerDimensions(self):
dimensions = [512, 512, 196]
factor = 0.5
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, factor)
newDimensions = resizedData.GetDimensions()
self.assertEquals(newDimensions[0], 256)
self.assertEquals(newDimensions[1], 256)
self.assertEquals(newDimensions[2], 98)
def testCalculateFactor(self):
dimensions = [512, 512, 196]
maxVoxels = 17500000
factor = self.imageResizer.calculateFactor(dimensions, maxVoxels)
voxels = int(factor * dimensions[0] * dimensions[1] * dimensions[2])
self.assertLessEqual(voxels, maxVoxels)
def testDataResizerMaxVoxels(self):
dimensions = [512, 512, 196]
maxVoxels = 17500000
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, maximum=maxVoxels)
newDimensions = resizedData.GetDimensions()
numberOfVoxels = newDimensions[0] * newDimensions[1] * newDimensions[2]
self.assertLessEqual(numberOfVoxels, maxVoxels)
def testResizerShouldNotEnlarge(self):
dimensions = [12, 13, 14]
maxVoxels = 17500000
imageData = createImageData(dimensions)
resizedData = self.imageResizer.ResizeData(imageData, maximum=maxVoxels)
newDimensions = resizedData.GetDimensions()
self.assertEquals(dimensions[0], newDimensions[0])
self.assertEquals(dimensions[1], newDimensions[1])
self.assertEquals(dimensions[2], newDimensions[2])
# Helper method
def createImageData(dimensions):
imageData = vtkImageData()
imageData.Initialize()
imageData.SetDimensions(dimensions)
imageData.SetNumberOfScalarComponents(1)
imageData.AllocateScalars()
imageData.Update()
return imageData
| mit | Python |
03a4f552088ee6a0ab44e199035bd8fc70fbb9d5 | fix broken test | Jeff-Wang93/vent,cglewis/vent,CyberReboot/vent,CyberReboot/vent,cglewis/vent,CyberReboot/vent,cglewis/vent,Jeff-Wang93/vent,Jeff-Wang93/vent | tests/test_api_menu_helpers.py | tests/test_api_menu_helpers.py | from vent.api.menu_helpers import MenuHelper
from vent.api.plugins import Plugin
def test_cores():
""" Test the cores function """
instance = MenuHelper()
cores = instance.cores('install')
assert cores[0] == True
cores = instance.cores('build')
assert cores[0] == True
cores = instance.cores('start')
assert cores[0] == True
cores = instance.cores('stop')
assert cores[0] == True
cores = instance.cores('clean')
assert cores[0] == True
def test_repo_branches():
""" Test the repo_branches function """
instance = MenuHelper()
status = instance.repo_branches('https://github.com/cyberreboot/vent')
assert isinstance(status, tuple)
assert status[0] == True
def test_repo_commits():
""" Test the repo_commits function """
instance = Plugin()
status = instance.add('https://github.com/cyberreboot/vent', build=False)
assert isinstance(status, tuple)
assert status[0] == True
instance = MenuHelper()
status = instance.repo_commits('https://github.com/cyberreboot/vent')
assert isinstance(status, tuple)
assert status[0] == True
def test_repo_tools():
""" Test the repo_tools function """
instance = MenuHelper()
status = instance.repo_tools('https://github.com/cyberreboot/vent',
'master', 'HEAD')
assert isinstance(status, tuple)
assert status[0] == True
def test_tools_status():
""" Test the tools_status function """
instance = MenuHelper()
core = instance.tools_status(True)
assert isinstance(core, tuple)
plugins = instance.tools_status(False)
assert isinstance(plugins, tuple)
| from vent.api.menu_helpers import MenuHelper
def test_cores():
""" Test the cores function """
instance = MenuHelper()
cores = instance.cores('install')
assert cores[0] == True
cores = instance.cores('build')
assert cores[0] == True
cores = instance.cores('start')
assert cores[0] == True
cores = instance.cores('stop')
assert cores[0] == True
cores = instance.cores('clean')
assert cores[0] == True
def test_repo_branches():
""" Test the repo_branches function """
instance = MenuHelper()
status = instance.repo_branches('https://github.com/cyberreboot/vent')
assert isinstance(status, tuple)
assert status[0] == True
def test_repo_commits():
""" Test the repo_commits function """
instance = MenuHelper()
status = instance.repo_commits('https://github.com/cyberreboot/vent')
assert isinstance(status, tuple)
assert status[0] == True
def test_repo_tools():
""" Test the repo_tools function """
instance = MenuHelper()
status = instance.repo_tools('https://github.com/cyberreboot/vent',
'master', 'HEAD')
assert isinstance(status, tuple)
assert status[0] == True
def test_tools_status():
""" Test the tools_status function """
instance = MenuHelper()
core = instance.tools_status(True)
assert isinstance(core, tuple)
plugins = instance.tools_status(False)
assert isinstance(plugins, tuple)
| apache-2.0 | Python |
c81655b2853ec4e072bff79e2600b4ec5d1063d5 | Fix test_observers_winapi.py::test___init__() on Windows | gorakhargosh/watchdog,gorakhargosh/watchdog | tests/test_observers_winapi.py | tests/test_observers_winapi.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from watchdog.utils import platform
if not platform.is_windows():
pytest.skip("Windows only.", allow_module_level=True)
import os.path
from time import sleep
from watchdog.events import (
DirCreatedEvent,
DirMovedEvent,
)
from watchdog.observers.api import ObservedWatch
from watchdog.observers.read_directory_changes import WindowsApiEmitter
from . import Empty, Queue
from .shell import (
mkdir,
mkdtemp,
mv
)
temp_dir = mkdtemp()
def p(*args):
"""
Convenience function to join the temporary directory path
with the provided arguments.
"""
return os.path.join(temp_dir, *args)
@pytest.fixture
def event_queue():
yield Queue()
@pytest.fixture
def emitter(event_queue):
watch = ObservedWatch(temp_dir, True)
em = WindowsApiEmitter(event_queue, watch, timeout=0.2)
yield em
em.stop()
def test___init__(event_queue, emitter):
SLEEP_TIME = 2
emitter.start()
sleep(SLEEP_TIME)
mkdir(p('fromdir'))
sleep(SLEEP_TIME)
mv(p('fromdir'), p('todir'))
sleep(SLEEP_TIME)
emitter.stop()
# What we need here for the tests to pass is a collection type
# that is:
# * unordered
# * non-unique
# A multiset! Python's collections.Counter class seems appropriate.
expected = {
DirCreatedEvent(p('fromdir')),
DirMovedEvent(p('fromdir'), p('todir')),
}
got = set()
while True:
try:
event, _ = event_queue.get_nowait()
except Empty:
break
else:
got.add(event)
assert expected == got
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from watchdog.utils import platform
if not platform.is_windows():
pytest.skip("Windows only.", allow_module_level=True)
import os.path
from time import sleep
from watchdog.events import (
DirCreatedEvent,
DirMovedEvent,
)
from watchdog.observers.api import ObservedWatch
from watchdog.observers.read_directory_changes import WindowsApiEmitter
from . import Empty, Queue
from .shell import (
mkdir,
mkdtemp,
mv
)
temp_dir = mkdtemp()
def p(*args):
"""
Convenience function to join the temporary directory path
with the provided arguments.
"""
return os.path.join(temp_dir, *args)
@pytest.fixture
def event_queue():
yield Queue()
@pytest.fixture
def emitter(event_queue):
watch = ObservedWatch(temp_dir, True)
em = WindowsApiEmitter(event_queue, watch, timeout=0.2)
yield em
em.stop()
def test___init__(event_queue, emitter):
SLEEP_TIME = 2
sleep(SLEEP_TIME)
mkdir(p('fromdir'))
sleep(SLEEP_TIME)
mv(p('fromdir'), p('todir'))
sleep(SLEEP_TIME)
emitter.stop()
# What we need here for the tests to pass is a collection type
# that is:
# * unordered
# * non-unique
# A multiset! Python's collections.Counter class seems appropriate.
expected = {
DirCreatedEvent(p('fromdir')),
DirMovedEvent(p('fromdir'), p('todir')),
}
got = set()
while True:
try:
event, _ = event_queue.get_nowait()
except Empty:
break
else:
got.add(event)
assert expected == got
| apache-2.0 | Python |
4c993698a334eac29bd8b64fd199a2caa869657b | Put the libc loading in the test. | IDSIA/sacred,IDSIA/sacred | tests/test_stdout_capturing.py | tests/test_stdout_capturing.py | #!/usr/bin/env python
# coding=utf-8
import os
import sys
import pytest
from sacred.stdout_capturing import get_stdcapturer
from sacred.optional import libc
def test_python_tee_output(capsys):
expected_lines = {
"captured stdout",
"captured stderr"}
capture_mode, capture_stdout = get_stdcapturer("sys")
with capsys.disabled():
print('before (stdout)')
print('before (stderr)')
with capture_stdout() as out:
print("captured stdout")
print("captured stderr")
output = out.get()
print('after (stdout)')
print('after (stderr)')
assert set(output.strip().split("\n")) == expected_lines
@pytest.mark.skipif(sys.platform.startswith('win'),
reason="does not run on windows")
def test_fd_tee_output(capsys):
# Get libc in a cross-platform way and use it to also flush the c stdio buffers
# credit to J.F. Sebastians SO answer from here:
# http://stackoverflow.com/a/22434262/1388435
import ctypes
from ctypes.util import find_library
try:
libc = ctypes.cdll.msvcrt # Windows
except OSError:
libc = ctypes.cdll.LoadLibrary(find_library('c'))
expected_lines = {
"captured stdout",
"captured stderr",
"stdout from C",
"and this is from echo"}
capture_mode, capture_stdout = get_stdcapturer("fd")
output = ""
with capsys.disabled():
print('before (stdout)')
print('before (stderr)')
with capture_stdout() as out:
print("captured stdout")
print("captured stderr", file=sys.stderr)
output += out.get()
libc.puts(b'stdout from C')
libc.fflush(None)
os.system('echo and this is from echo')
output += out.get()
output += out.get()
print('after (stdout)')
print('after (stderr)')
assert set(output.strip().split("\n")) == expected_lines
| #!/usr/bin/env python
# coding=utf-8
import os
import sys
import pytest
from sacred.stdout_capturing import get_stdcapturer
from sacred.optional import libc
def test_python_tee_output(capsys):
expected_lines = {
"captured stdout",
"captured stderr"}
capture_mode, capture_stdout = get_stdcapturer("sys")
with capsys.disabled():
print('before (stdout)')
print('before (stderr)')
with capture_stdout() as out:
print("captured stdout")
print("captured stderr")
output = out.get()
print('after (stdout)')
print('after (stderr)')
assert set(output.strip().split("\n")) == expected_lines
@pytest.mark.skipif(sys.platform.startswith('win'),
reason="does not run on windows")
def test_fd_tee_output(capsys):
expected_lines = {
"captured stdout",
"captured stderr",
"stdout from C",
"and this is from echo"}
capture_mode, capture_stdout = get_stdcapturer("fd")
output = ""
with capsys.disabled():
print('before (stdout)')
print('before (stderr)')
with capture_stdout() as out:
print("captured stdout")
print("captured stderr", file=sys.stderr)
output += out.get()
libc.puts(b'stdout from C')
libc.fflush(None)
os.system('echo and this is from echo')
output += out.get()
output += out.get()
print('after (stdout)')
print('after (stderr)')
assert set(output.strip().split("\n")) == expected_lines
| mit | Python |
b9af71959bca5ccd6536994d8f56715bc70b0a57 | Fix loading config | P1X-in/Tanks-of-Freedom-Server | tof_server/__init__.py | tof_server/__init__.py | """Module init."""
from flask import Flask
from flask.ext.mysqldb import MySQL
app = Flask(__name__)
mysql = MySQL()
app.config.from_pyfile('config.py')
mysql.init_app(app)
from tof_server import views
| """Module init."""
from flask import Flask
from flask.ext.mysqldb import MySQL
app = Flask(__name__)
mysql = MySQL()
app.config.from_object('config')
mysql.init_app(app)
from tof_server import views
| mit | Python |
ca7403462588f374cf1af39d537765c02fc7726c | Fix status codes of handled responses | h2020-endeavour/endeavour,h2020-endeavour/endeavour | mctrl/rest.py | mctrl/rest.py | from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return self.handle_response(success, data)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success, data):
json_data = json.dumps(data)
if success:
return Response("OK\n" + json_data, status=200)
else:
return Response("BAD REQUEST\n" + json_data, status=400)
| from flask import Flask, url_for, Response, json, request
class MonitorApp(object):
def __init__(self, monitor):
self.app = Flask(__name__)
self.app.monitor = monitor
self.setup()
def setup(self):
@self.app.route('/anomaly', methods = ['POST'])
def api_anomaly():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_anomaly_data(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
@self.app.route('/monitor', methods = ['POST'])
def api_monitor():
data = request.json
if request.headers['Content-Type'] == 'application/json':
success = self.app.monitor.process_monitor_flows(data)
return handle_response(success)
else:
return Response("Unsupported media type\n" + data, status=415)
def handle_response(self, success):
if success:
return Response("OK\n" + data, status=status)
else:
return Response("BAD REQUEST\n" + data, status=status)
| apache-2.0 | Python |
2717d0eec58fcce31740aa7eed863d59ca53344c | Change error | MKFMIKU/RAISR,MKFMIKU/RAISR | code/hashTable.py | code/hashTable.py | # -*- coding: utf-8 -*-
import numpy as np
def hashTable(patch,Qangle,Qstrenth,Qcoherence):
[gx,gy] = np.gradient(patch)
G = np.matrix((gx.ravel(),gy.ravel())).T
x = G.T*G
[eigenvalues,eigenvectors] = np.linalg.eig(x)
#For angle
angle = np.math.atan2(eigenvectors[0,1],eigenvectors[0,0])
if angle<0:
angle += np.pi
#For strength
strength = eigenvalues.max()/eigenvalues.sum()
#For coherence
lamda1 = np.math.sqrt(eigenvalues.max())
lamda2 = np.math.sqrt(eigenvalues.min())
coherence = np.abs((lamda1-lamda2)/(lamda1+lamda2))
#Quantization
angle = np.floor(angle/(np.pi/Qangle)-1)
strength = np.floor(strength/(1/Qstrenth)-1)
coherence = np.floor(coherence/(1/Qcoherence)-1)
return angle,strength,coherence | # -*- coding: utf-8 -*-
import numpy as np
def hashTable(patch,Qangle,Qstrenth,Qcoherence):
[gx,gy] = np.gradient(patch)
G = np.matrix((gx.ravel(),gy.ravel())).T
x = G.T*G
[eigenvalues,eigenvectors] = np.linalg.eig(x)
#For angle
angle = np.math.atan2(eigenvectors[0,1],eigenvectors[0,0])
if angle<0:
angle += np.pi
#For strength
strength = eigenvalues.max()
#For coherence
lamda1 = np.math.sqrt(eigenvalues.max())
lamda2 = np.math.sqrt(eigenvalues.min())
coherence = np.abs((lamda1-lamda2)/(lamda1+lamda2))
#Quantization
angle = np.floor(angle/(np.pi/Qangle)-1)
strength = np.floor(strength/(1/Qstrenth)-1)
coherence = np.floor(coherence/(1/Qcoherence)-1)
return angle,strength,coherence | mit | Python |
0d7d5746ccff0e933a5bdfa22f3fd63914593ded | Use correct class to avoid side effects | mardiros/aioxmlrpc | tests/unittests/test_client.py | tests/unittests/test_client.py | import pytest
from httpx import Request, Response
from aioxmlrpc.client import Fault, ProtocolError, ServerProxy
RESPONSES = {
"http://localhost/test_xmlrpc_ok": {
"status": 200,
"body": """<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><int>1</int></value>
</param>
</params>
</methodResponse>""",
},
"http://localhost/test_xmlrpc_fault": {
"status": 200,
"body": """<?xml version="1.0"?>
<methodResponse>
<fault>
<value>
<struct>
<member>
<name>faultCode</name>
<value><int>4</int></value>
</member>
<member>
<name>faultString</name>
<value><string>You are not lucky</string></value>
</member>
</struct>
</value>
</fault>
</methodResponse>
""",
},
"http://localhost/test_http_500": {
"status": 500,
"body": """
I am really broken
""",
},
}
# class Response:
# def __init__(self, url):
# response = RESPONSES[url]
# self.status_code = response["status"]
# self.text = response["body"]
# self.headers = {}
class DummyAsyncClient:
async def post(self, url, *args, **kwargs):
response = RESPONSES[url]
return Response(
status_code=response["status"], headers={}, text=response["body"],
request=Request("POST", url)
)
@pytest.mark.asyncio
async def test_xmlrpc_ok():
client = ServerProxy("http://localhost/test_xmlrpc_ok", session=DummyAsyncClient())
response = await client.name.space.proxfyiedcall()
assert response == 1
@pytest.mark.asyncio
async def test_xmlrpc_fault():
client = ServerProxy(
"http://localhost/test_xmlrpc_fault", session=DummyAsyncClient()
)
with pytest.raises(Fault):
await client.name.space.proxfyiedcall()
@pytest.mark.asyncio
async def test_http_500():
client = ServerProxy("http://localhost/test_http_500", session=DummyAsyncClient())
with pytest.raises(ProtocolError):
await client.name.space.proxfyiedcall()
@pytest.mark.asyncio
async def test_network_error():
client = ServerProxy("http://nonexistent/nonexistent")
with pytest.raises(ProtocolError):
await client.name.space.proxfyiedcall()
| import httpx
import pytest
from aioxmlrpc.client import Fault, ProtocolError, ServerProxy
RESPONSES = {
"http://localhost/test_xmlrpc_ok": {
"status": 200,
"body": """<?xml version="1.0"?>
<methodResponse>
<params>
<param>
<value><int>1</int></value>
</param>
</params>
</methodResponse>""",
},
"http://localhost/test_xmlrpc_fault": {
"status": 200,
"body": """<?xml version="1.0"?>
<methodResponse>
<fault>
<value>
<struct>
<member>
<name>faultCode</name>
<value><int>4</int></value>
</member>
<member>
<name>faultString</name>
<value><string>You are not lucky</string></value>
</member>
</struct>
</value>
</fault>
</methodResponse>
""",
},
"http://localhost/test_http_500": {
"status": 500,
"body": """
I am really broken
""",
},
}
class Response:
def __init__(self, url):
response = RESPONSES[url]
self.status_code = response["status"]
self.text = response["body"]
self.headers = {}
class DummyAsyncClient:
async def post(self, url, *args, **kwargs):
return Response(url)
@pytest.mark.asyncio
async def test_xmlrpc_ok():
client = ServerProxy("http://localhost/test_xmlrpc_ok", session=DummyAsyncClient())
response = await client.name.space.proxfyiedcall()
assert response == 1
@pytest.mark.asyncio
async def test_xmlrpc_fault():
client = ServerProxy(
"http://localhost/test_xmlrpc_fault", session=DummyAsyncClient()
)
with pytest.raises(Fault):
await client.name.space.proxfyiedcall()
@pytest.mark.asyncio
async def test_http_500():
client = ServerProxy("http://localhost/test_http_500", session=DummyAsyncClient())
with pytest.raises(ProtocolError):
await client.name.space.proxfyiedcall()
@pytest.mark.asyncio
async def test_network_error():
client = ServerProxy("http://nonexistent/nonexistent")
with pytest.raises(ProtocolError):
await client.name.space.proxfyiedcall()
| bsd-3-clause | Python |
87bf261345919e90cb88853165fb1556046c80ef | Fix typo in mock usage | hkariti/mopidy,bencevans/mopidy,diandiankan/mopidy,dbrgn/mopidy,kingosticks/mopidy,mopidy/mopidy,ali/mopidy,jmarsik/mopidy,quartz55/mopidy,mopidy/mopidy,vrs01/mopidy,diandiankan/mopidy,ali/mopidy,adamcik/mopidy,pacificIT/mopidy,tkem/mopidy,pacificIT/mopidy,dbrgn/mopidy,adamcik/mopidy,hkariti/mopidy,jmarsik/mopidy,vrs01/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bacontext/mopidy,jcass77/mopidy,pacificIT/mopidy,jodal/mopidy,ZenithDK/mopidy,swak/mopidy,kingosticks/mopidy,bacontext/mopidy,jcass77/mopidy,diandiankan/mopidy,quartz55/mopidy,swak/mopidy,kingosticks/mopidy,mopidy/mopidy,quartz55/mopidy,jodal/mopidy,dbrgn/mopidy,dbrgn/mopidy,mokieyue/mopidy,ali/mopidy,tkem/mopidy,swak/mopidy,jcass77/mopidy,SuperStarPL/mopidy,bacontext/mopidy,adamcik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,ZenithDK/mopidy,jmarsik/mopidy,bencevans/mopidy,SuperStarPL/mopidy,bencevans/mopidy,ZenithDK/mopidy,tkem/mopidy,bencevans/mopidy,ali/mopidy,tkem/mopidy,diandiankan/mopidy,jodal/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,vrs01/mopidy,hkariti/mopidy,swak/mopidy,bacontext/mopidy,mokieyue/mopidy,mokieyue/mopidy,quartz55/mopidy,hkariti/mopidy | tests/mpd/protocol/test_connection.py | tests/mpd/protocol/test_connection.py | from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assert_called_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
| from __future__ import absolute_import, unicode_literals
from mock import patch
from tests.mpd import protocol
class ConnectionHandlerTest(protocol.BaseTestCase):
def test_close_closes_the_client_connection(self):
with patch.object(self.session, 'close') as close_mock:
self.send_request('close')
close_mock.assertEqualResponsecalled_once_with()
self.assertEqualResponse('OK')
def test_empty_request(self):
self.send_request('')
self.assertEqualResponse('ACK [5@0] {} No command given')
self.send_request(' ')
self.assertEqualResponse('ACK [5@0] {} No command given')
def test_kill(self):
self.send_request('kill')
self.assertEqualResponse(
'ACK [4@0] {kill} you don\'t have permission for "kill"')
def test_ping(self):
self.send_request('ping')
self.assertEqualResponse('OK')
| apache-2.0 | Python |
7c79ce6083f5ce93d9c64b458da243fe87a484f7 | Fix argument parsing default setup | abele/jinja-graph | src/jg/__main__.py | src/jg/__main__.py | import re
import sys
from itertools import chain
import argparse
import py
from graphviz import Digraph
EXTENDS_RE = re.compile(r'{%\s*extends\s*[\'"](.*)[\'"]\s*%}')
INCLUDE_RE = re.compile(r'{%\s*include\s*[\'"](.*)[\'"]\s*%}')
TEMPLATE_PATTERN = '*.html'
def main(argv=()):
"""
Args:
argv (list): List of arguments
Returns:
int: A return code
Does stuff.
"""
parser = argparse.ArgumentParser(
description=u'Generate Jinja template dependency graph')
parser.add_argument(
'i',
default='./',
help=u'Root path',
)
parser.add_argument(
'o',
default='./output.dot',
help=u'Output filename',
)
args = parser.parse_args(argv or sys.argv[1:])
root_path = args.i
output_filename = args.o
dot = generate_template_graph(root_path=root_path)
write(dot.source, output_filename)
return 0
def write(content, output_filename):
py.path.local(output_filename).write(content, ensure=True)
def generate_template_graph(root_path):
cwd = py.path.local(root_path)
node_name_and_template_path = ((_node_name(path, cwd), path)
for path in _template_path_seq(root_path))
dot = Digraph()
for node_name, template_path in node_name_and_template_path:
dot.node(node_name)
file_content = template_path.read()
derived_file_seq = chain(
EXTENDS_RE.findall(file_content),
INCLUDE_RE.findall(file_content)
)
for derived_file_path in derived_file_seq:
dot.edge(node_name, derived_file_path)
return dot
def _node_name(path, cwd):
return path.strpath.replace(cwd.strpath, '', 1).lstrip('/')
def _template_path_seq(root_path):
return py.path.local(root_path).visit(TEMPLATE_PATTERN)
if __name__ == "__main__":
sys.exit(main())
| import re
import sys
from itertools import chain
import argparse
import py
from graphviz import Digraph
EXTENDS_RE = re.compile(r'{%\s*extends\s*[\'"](.*)[\'"]\s*%}')
INCLUDE_RE = re.compile(r'{%\s*include\s*[\'"](.*)[\'"]\s*%}')
TEMPLATE_PATTERN = '*.html'
def main(argv=()):
"""
Args:
argv (list): List of arguments
Returns:
int: A return code
Does stuff.
"""
parser = argparse.ArgumentParser(
description=u'Generate Jinja template dependency graph')
parser.add_argument(
'i',
default='./',
help=u'Root path',
)
parser.add_argument(
'o',
default='./output.dot',
help=u'Output filename',
)
args = parser.parse_args(argv)
root_path = args.i
output_filename = args.o
dot = generate_template_graph(root_path=root_path)
write(dot.source, output_filename)
return 0
def write(content, output_filename):
py.path.local(output_filename).write(content, ensure=True)
def generate_template_graph(root_path):
cwd = py.path.local(root_path)
node_name_and_template_path = ((_node_name(path, cwd), path)
for path in _template_path_seq(root_path))
dot = Digraph()
for node_name, template_path in node_name_and_template_path:
dot.node(node_name)
file_content = template_path.read()
derived_file_seq = chain(
EXTENDS_RE.findall(file_content),
INCLUDE_RE.findall(file_content)
)
for derived_file_path in derived_file_seq:
dot.edge(node_name, derived_file_path)
return dot
def _node_name(path, cwd):
return path.strpath.replace(cwd.strpath, '', 1).lstrip('/')
def _template_path_seq(root_path):
return py.path.local(root_path).visit(TEMPLATE_PATTERN)
if __name__ == "__main__":
sys.exit(main())
| bsd-2-clause | Python |
ce6426376f1a0d4b463b3a4b83aa34157896397d | add meetings list to committee api v2 | daonb/Open-Knesset,otadmor/Open-Knesset,habeanf/Open-Knesset,Shrulik/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,MeirKriheli/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,DanaOshri/Open-Knesset,Shrulik/Open-Knesset,noamelf/Open-Knesset,OriHoch/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,otadmor/Open-Knesset,habeanf/Open-Knesset,OriHoch/Open-Knesset,jspan/Open-Knesset,OriHoch/Open-Knesset,noamelf/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,navotsil/Open-Knesset,jspan/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,ofri/Open-Knesset,daonb/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,MeirKriheli/Open-Knesset,ofri/Open-Knesset,daonb/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,otadmor/Open-Knesset,ofri/Open-Knesset,alonisser/Open-Knesset,otadmor/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,DanaOshri/Open-Knesset | committees/api.py | committees/api.py | '''
Api for the committees app
'''
from tastypie.api import Api
from tastypie.constants import ALL
from tastypie.bundle import Bundle
import tastypie.fields as fields
from apis.resources.base import BaseResource
from models import Committee, CommitteeMeeting, ProtocolPart
from mks.api import MemberResource
class CommitteeResource(BaseResource):
''' Committee API
'''
meetings = fields.ToManyField('committees.api.CommitteeMeetingResource', 'meetings')
class Meta:
queryset = Committee.objects.all()
allowed_methods = ['get']
include_absolute_url = True
class CommitteeMeetingResource(BaseResource):
''' Committee Meeting API
'''
committee = fields.ForeignKey(CommitteeResource, 'committee')
mks_attended = fields.ToManyField(MemberResource, 'mks_attended')
protocol = fields.ToManyField('committees.api.ProtocolPartResource',
'parts', full=True)
class Meta:
queryset = CommitteeMeeting.objects.all().select_related('committee',
'mks_attended',
).prefetch_related('parts')
allowed_methods = ['get']
include_absolute_url = True
list_fields = ['committee','mks_attended','date','topics']
excludes = ['protocol_text']
class ProtocolPartResource(BaseResource):
header = fields.CharField(attribute='header')
body = fields.CharField(attribute='body')
class Meta:
queryset = ProtocolPart.objects.all().order_by('order')
allowed_methods = ['get']
fields = list_fields = ['header','body']
include_resource_uri = False
| '''
Api for the committees app
'''
from tastypie.api import Api
from tastypie.constants import ALL
from tastypie.bundle import Bundle
import tastypie.fields as fields
from apis.resources.base import BaseResource
from models import Committee, CommitteeMeeting, ProtocolPart
from mks.api import MemberResource
class CommitteeResource(BaseResource):
''' Committee API
'''
class Meta:
queryset = Committee.objects.all()
allowed_methods = ['get']
include_absolute_url = True
class CommitteeMeetingResource(BaseResource):
''' Committee Meeting API
'''
committee = fields.ForeignKey(CommitteeResource, 'committee')
mks_attended = fields.ToManyField(MemberResource, 'mks_attended')
protocol = fields.ToManyField('committees.api.ProtocolPartResource',
'parts', full=True)
class Meta:
queryset = CommitteeMeeting.objects.all().select_related('committee',
'mks_attended',
).prefetch_related('parts')
allowed_methods = ['get']
include_absolute_url = True
list_fields = ['committee','mks_attended','date','topics']
excludes = ['protocol_text']
class ProtocolPartResource(BaseResource):
header = fields.CharField(attribute='header')
body = fields.CharField(attribute='body')
class Meta:
queryset = ProtocolPart.objects.all().order_by('order')
allowed_methods = ['get']
fields = list_fields = ['header','body']
include_resource_uri = False
| bsd-3-clause | Python |
9f5953577ea8c37674003d3d1bff8788b92379e4 | Integrate LLVM at llvm/llvm-project@8e5f3d04f269 | Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,sarvex/tensorflow,yongtang/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,sarvex/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,karllessard/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,yongtang/tensorflow,sarvex/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,sarvex/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,sarvex/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "8e5f3d04f269dbe791076e775f1d1a098cbada01"
LLVM_SHA256 = "51f4950108027260a6dfeac4781fdad85dfde1d8594f8d26faea504e923ebcf2"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "17800f900dca8243773dec5f90578cce03069b8f"
LLVM_SHA256 = "779e3e2a575e2630571f03518782009de9ed075c9b18ce3139715d34a328a66a"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
adcf84d66af103636e9c6715892652e8643796bc | Integrate LLVM at llvm/llvm-project@69a909b9fefe | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "69a909b9fefeec5d4ece6f3162f8332f125ea202"
LLVM_SHA256 = "0ffd99b2775505b9a9efc2efb9ad7251d4403844ca13777f8f8ef6caad460acb"
tfrt_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "6f85d9e104ca5f785718b25dccb5817b0c6c208d"
LLVM_SHA256 = "d92d32307cb050c2a2a1d4fa2d7ceddea26207ad39ab87f47e389fab830fa0c6"
tfrt_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
cbf7fb49dc8d0714e29fccb28bf2108ba45951aa | Integrate LLVM at llvm/llvm-project@09f43c107fc7 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "09f43c107fc7688639346d3beead72472cdadbdb"
LLVM_SHA256 = "03eaf8e80cdbd0e998c16ab5d5215e346ed7688777344940ad8c1492064674f4"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "505d57486e57eb61e29bed6517de5152d208fede"
LLVM_SHA256 = "d94ac445e026e6cc29387a8345eedc77c428b0c0ac81eb9b843c0295515df494"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
ae32af2969f7444b6cb6d4a4b4aea366409c507a | Integrate LLVM at llvm/llvm-project@5dbc7cf7cac4 | yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "5dbc7cf7cac4428e0876a94a4fca10fe60af7328"
LLVM_SHA256 = "389c14f6cc12103828f319df9161468d3403c3b70224e3a7a39a87bb99f1ff0c"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
"//third_party/llvm:temporary.patch", # Cherry-picks and temporary reverts. Do not remove even if temporary.patch is empty.
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "42836e283fc58d5cebbcbb2e8eb7619d92fb9c2d"
LLVM_SHA256 = "c44be2c5da50431b370fe9dc1762399c27522ec2c5daeda5d618dddf502f4608"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
"//third_party/llvm:temporary.patch", # Cherry-picks and temporary reverts. Do not remove even if temporary.patch is empty.
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
69887d843fef19802765a3f484f95b88e51628a8 | Integrate LLVM at llvm/llvm-project@f962dafbbdf6 | google/tsl,google/tsl,google/tsl | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "f962dafbbdf61234bfa51bde95e2e5c52a02e9b9"
LLVM_SHA256 = "9ae9cae1c72a35630499345bffa72ab45e2fcec2442acba6cbf88f5dee575919"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:build.patch",
"//third_party/llvm:mathextras.patch",
"//third_party/llvm:toolchains.patch",
"//third_party/llvm:temporary.patch", # Cherry-picks and temporary reverts. Do not remove even if temporary.patch is empty.
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "a429773b3ec6978e2cfd27d2ff6a6585b6e88198"
LLVM_SHA256 = "3beb955cfc9b981a35e4bd712db0fe2d4bc8ff03d3c2acc36b30036d7c2bf429"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:build.patch",
"//third_party/llvm:mathextras.patch",
"//third_party/llvm:toolchains.patch",
"//third_party/llvm:temporary.patch", # Cherry-picks and temporary reverts. Do not remove even if temporary.patch is empty.
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
6db82f5c41c88792a877865b8606f4490acd29e4 | Update get_absolute_url of Project model | bjoernricks/trex,bjoernricks/trex | trex/models/project.py | trex/models/project.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.db import models
class Project(models.Model):
name = models.CharField(max_length=50, unique=True)
description = models.TextField(blank=True, default="")
active = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
users = models.ManyToManyField(
settings.AUTH_USER_MODEL, through="ProjectUsers",
related_name="projects")
class Meta:
ordering = ("name", "active")
def get_absolute_url(self):
return reverse_lazy("project-detail", kwargs={"pk": self.id})
class Entry(models.Model):
project = models.ForeignKey(Project, related_name="entries")
date = models.DateField()
duration = models.PositiveIntegerField()
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
state = models.CharField(max_length="5", blank=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True)
tags = models.ManyToManyField("Tags", related_name="entries")
class Meta:
ordering = ("date", "created")
class Tags(models.Model):
project = models.ForeignKey(Project, related_name="tags")
name = models.CharField(max_length=255)
description = models.TextField(blank=True, default="")
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ("name",)
class ProjectUsers(models.Model):
project = models.ForeignKey(Project)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
user_abbr = models.CharField("User abbreviation for the project",
max_length=25)
class Meta:
unique_together = ("project", "user")
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.db import models
class Project(models.Model):
name = models.CharField(max_length=50, unique=True)
description = models.TextField(blank=True, default="")
active = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
users = models.ManyToManyField(
settings.AUTH_USER_MODEL, through="ProjectUsers",
related_name="projects")
class Meta:
ordering = ("name", "active")
def get_absolute_url(self):
return reverse_lazy("project-details", kwargs={"pk": self.id})
class Entry(models.Model):
project = models.ForeignKey(Project, related_name="entries")
date = models.DateField()
duration = models.PositiveIntegerField()
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
state = models.CharField(max_length="5", blank=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True)
tags = models.ManyToManyField("Tags", related_name="entries")
class Meta:
ordering = ("date", "created")
class Tags(models.Model):
project = models.ForeignKey(Project, related_name="tags")
name = models.CharField(max_length=255)
description = models.TextField(blank=True, default="")
created = models.DateTimeField(auto_now_add=True)
class Meta:
ordering = ("name",)
class ProjectUsers(models.Model):
project = models.ForeignKey(Project)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
user_abbr = models.CharField("User abbreviation for the project",
max_length=25)
class Meta:
unique_together = ("project", "user")
| mit | Python |
c8af134a0852eabb1b196a1087cb4f5c176a4818 | Modify module import syntax to avoid confusion with test method | andreas-h/pelican-plugins,talha131/pelican-plugins,talha131/pelican-plugins,rlaboiss/pelican-plugins,UHBiocomputation/pelican-plugins,andreas-h/pelican-plugins,xsteadfastx/pelican-plugins,mikitex70/pelican-plugins,mortada/pelican-plugins,talha131/pelican-plugins,ingwinlu/pelican-plugins,danmackinlay/pelican-plugins,ingwinlu/pelican-plugins,pxquim/pelican-plugins,rlaboiss/pelican-plugins,jantman/pelican-plugins,florianjacob/pelican-plugins,mortada/pelican-plugins,danmackinlay/pelican-plugins,M157q/pelican-plugins,andreas-h/pelican-plugins,mitchins/pelican-plugins,wilsonfreitas/pelican-plugins,pxquim/pelican-plugins,xsteadfastx/pelican-plugins,UHBiocomputation/pelican-plugins,xsteadfastx/pelican-plugins,danmackinlay/pelican-plugins,mortada/pelican-plugins,florianjacob/pelican-plugins,mortada/pelican-plugins,florianjacob/pelican-plugins,jantman/pelican-plugins,pxquim/pelican-plugins,andreas-h/pelican-plugins,M157q/pelican-plugins,MarkusH/pelican-plugins,rlaboiss/pelican-plugins,florianjacob/pelican-plugins,benjaminabel/pelican-plugins,mikitex70/pelican-plugins,wilsonfreitas/pelican-plugins,talha131/pelican-plugins,UHBiocomputation/pelican-plugins,jantman/pelican-plugins,howthebodyworks/pelican-plugins,talha131/pelican-plugins,jantman/pelican-plugins,howthebodyworks/pelican-plugins,MarkusH/pelican-plugins,UHBiocomputation/pelican-plugins,xsteadfastx/pelican-plugins,wilsonfreitas/pelican-plugins,mikitex70/pelican-plugins,pxquim/pelican-plugins,wilsonfreitas/pelican-plugins,mikitex70/pelican-plugins,mitchins/pelican-plugins,MarkusH/pelican-plugins,mortada/pelican-plugins,howthebodyworks/pelican-plugins,benjaminabel/pelican-plugins,benjaminabel/pelican-plugins,danmackinlay/pelican-plugins,benjaminabel/pelican-plugins,M157q/pelican-plugins,ingwinlu/pelican-plugins,MarkusH/pelican-plugins,ingwinlu/pelican-plugins,howthebodyworks/pelican-plugins,M157q/pelican-plugins,MarkusH/pelican-plugins,mitchins/pelican-plugins,rlaboiss/pelican-plugins,mitchins/pelican-plugins | thumbnailer/test_thumbnails.py | thumbnailer/test_thumbnails.py | from thumbnailer import _resizer
from unittest import TestCase, main
import os
from PIL import Image
class ThumbnailerTests(TestCase):
def path(self, filename):
return os.path.join(self.img_path, filename)
def setUp(self):
self.img_path = os.path.join(os.path.dirname(__file__), "test_data")
self.img = Image.open(self.path("sample_image.jpg"))
def testSquare(self):
r = _resizer('square', '100', self.img_path)
output = r.resize(self.img)
self.assertEqual((100, 100), output.size)
def testExact(self):
r = _resizer('exact', '250x100', self.img_path)
output = r.resize(self.img)
self.assertEqual((250, 100), output.size)
def testWidth(self):
r = _resizer('aspect', '250x?', self.img_path)
output = r.resize(self.img)
self.assertEqual((250, 166), output.size)
def testHeight(self):
r = _resizer('aspect', '?x250', self.img_path)
output = r.resize(self.img)
self.assertEqual((375, 250), output.size)
class ThumbnailerFilenameTest(TestCase):
def path(self, *parts):
return os.path.join(self.img_path, *parts)
def setUp(self):
self.img_path = os.path.join(os.path.dirname(__file__), "test_data")
def testRoot(self):
"""Test a file that is in the root of img_path."""
r = _resizer('square', '100', self.img_path)
new_name = r.get_thumbnail_name(self.path('sample_image.jpg'))
self.assertEqual('sample_image_square.jpg', new_name)
def testRootWithSlash(self):
r = _resizer('square', '100', self.img_path + '/')
new_name = r.get_thumbnail_name(self.path('sample_image.jpg'))
self.assertEqual('sample_image_square.jpg', new_name)
def testSubdir(self):
"""Test a file that is in a sub-directory of img_path."""
r = _resizer('square', '100', self.img_path)
new_name = r.get_thumbnail_name(self.path('subdir', 'sample_image.jpg'))
self.assertEqual('subdir/sample_image_square.jpg', new_name)
if __name__=="__main__":
main()
| from thumbnailer import _resizer
from unittest import TestCase, main
import os.path as path
from PIL import Image
class ThumbnailerTests(TestCase):
def path(self, filename):
return path.join(self.img_path, filename)
def setUp(self):
self.img_path = path.join(path.dirname(__file__), "test_data")
self.img = Image.open(self.path("sample_image.jpg"))
def testSquare(self):
r = _resizer('square', '100', self.img_path)
output = r.resize(self.img)
self.assertEqual((100, 100), output.size)
def testExact(self):
r = _resizer('exact', '250x100', self.img_path)
output = r.resize(self.img)
self.assertEqual((250, 100), output.size)
def testWidth(self):
r = _resizer('aspect', '250x?', self.img_path)
output = r.resize(self.img)
self.assertEqual((250, 166), output.size)
def testHeight(self):
r = _resizer('aspect', '?x250', self.img_path)
output = r.resize(self.img)
self.assertEqual((375, 250), output.size)
class ThumbnailerFilenameTest(TestCase):
def path(self, *parts):
return path.join(self.img_path, *parts)
def setUp(self):
self.img_path = path.join(path.dirname(__file__), "test_data")
def testRoot(self):
"""Test a file that is in the root of img_path."""
r = _resizer('square', '100', self.img_path)
new_name = r.get_thumbnail_name(self.path('sample_image.jpg'))
self.assertEqual('sample_image_square.jpg', new_name)
def testRootWithSlash(self):
r = _resizer('square', '100', self.img_path + '/')
new_name = r.get_thumbnail_name(self.path('sample_image.jpg'))
self.assertEqual('sample_image_square.jpg', new_name)
def testSubdir(self):
"""Test a file that is in a sub-directory of img_path."""
r = _resizer('square', '100', self.img_path)
new_name = r.get_thumbnail_name(self.path('subdir', 'sample_image.jpg'))
self.assertEqual('subdir/sample_image_square.jpg', new_name)
if __name__=="__main__":
main()
| agpl-3.0 | Python |
538089f7055b90926adea159bdac03da1d5318d4 | Fix Zika build to include all JSONs | nextstrain/augur,nextstrain/augur,blab/nextstrain-augur,nextstrain/augur | builds/zika/zika.process.py | builds/zika/zika.process.py | from __future__ import print_function
import os, sys
# we assume (and assert) that this script is running from the virus directory, i.e. inside H7N9 or zika
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import base.process
from base.process import process
def collect_args():
"""Returns a Zika-specific argument parser.
"""
parser = base.process.collect_args()
parser.set_defaults(
json="prepared/zika.json"
)
return parser
config = {
"dir": "zika",
"in": "prepared/zika.json",
"newick_tree_options": {"nthreads": 4},
"clock_filter": {
"n_iqd": 4,
},
"geo_inference": ['country', 'region'], # what traits to perform this on
"geo_inference_options": {
"root_state": {
"region": "southeast_asia",
"country": "vietnam",
},
},
"auspice": { ## settings for auspice JSON export
"color_options": {
"country":{"key":"country", "legendTitle":"Country", "menuItem":"country", "type":"discrete"},
"region":{"key":"region", "legendTitle":"Region", "menuItem":"region", "type":"discrete"},
},
"controls": {'authors':['authors']},
"defaults": {'mapTriplicate': True}
},
"timetree_options": {
"Tc": 'opt',
"confidence":True
}
}
if __name__=="__main__":
parser = collect_args()
params = parser.parse_args()
if params.clean:
config["clean"] = True
if params.json:
config["in"] = params.json
config["newick_tree_options"]["raxml"] = not params.no_raxml
runner = process(config)
runner.align(fill_gaps=True)
runner.build_tree()
runner.timetree_setup_filter_run()
runner.run_geo_inference()
runner.save_as_nexus()
runner.auspice_export()
| from __future__ import print_function
import os, sys
# we assume (and assert) that this script is running from the virus directory, i.e. inside H7N9 or zika
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import base.process
from base.process import process
def collect_args():
"""Returns a Zika-specific argument parser.
"""
parser = base.process.collect_args()
parser.set_defaults(
json="prepared/zika.json"
)
return parser
config = {
"dir": "zika",
"in": "prepared/zika.json",
"newick_tree_options": {"nthreads": 4},
"clock_filter": {
"n_iqd": 4,
},
"geo_inference": ['country', 'region'], # what traits to perform this on
"geo_inference_options": {
"root_state": {
"region": "southeast_asia",
"country": "vietnam",
},
},
"auspice": { ## settings for auspice JSON export
"color_options": {
"country":{"key":"country", "legendTitle":"Country", "menuItem":"country", "type":"discrete"},
"region":{"key":"region", "legendTitle":"Region", "menuItem":"region", "type":"discrete"},
},
"controls": {'authors':['authors']},
"defaults": {'mapTriplicate': True},
"extra_jsons": []
},
"timetree_options": {
"Tc": 'opt',
"confidence":True
}
}
if __name__=="__main__":
parser = collect_args()
params = parser.parse_args()
if params.clean:
config["clean"] = True
if params.json:
config["in"] = params.json
config["newick_tree_options"]["raxml"] = not params.no_raxml
runner = process(config)
runner.align(fill_gaps=True)
runner.build_tree()
runner.timetree_setup_filter_run()
runner.run_geo_inference()
runner.save_as_nexus()
runner.auspice_export()
| agpl-3.0 | Python |
fda1cbb413f8647c2ab5be858ec0aaa32a0fbfac | add SEND_MAIL_USER setting | lxdiyun/mail_sender,lxdiyun/mail_sender,lxdiyun/mail_sender,lxdiyun/mail_sender | mail/models.py | mail/models.py | """mail models"""
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from django.db import models
from django.core.urlresolvers import reverse
from django.conf import settings
from jinja2 import Template
from tinymce.models import HTMLField
class Receiver(models.Model):
"""mail receiver"""
mail_address = models.CharField(max_length=128)
title = models.CharField(max_length=128)
company = models.CharField(max_length=256)
country = models.CharField(max_length=256, blank=True, null=True)
city = models.CharField(max_length=256, blank=True, null=True)
def __unicode__(self):
return self.company + ":" + self.title + "[" + self.mail_address +"]"
class Mail(models.Model):
"""mail content"""
subject = models.CharField(max_length=1024)
content = HTMLField()
receivers = models.ManyToManyField('Receiver', null=True, blank=True)
def __unicode__(self):
return self.subject
def get_absolute_url(self):
"""url of the mail"""
return reverse("mail_detail", kwargs={'pk': self.id})
def generate_mail_conent(mail, receiver):
template = Template(mail.content)
html = template.render(obj=receiver)
return html
def generate_mail(mail, receiver):
msg = MIMEMultipart('alternative')
msg['Subject'] = mail.subject
msg['From'] = settings.SEND_MAIL_USER
msg['To'] = receiver.mail_address
msg.attach(MIMEText(generate_mail_conent(mail, receiver), 'html', 'utf-8'))
return msg
| """mail models"""
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from django.db import models
from django.core.urlresolvers import reverse
from django.conf import settings
from jinja2 import Template
from tinymce.models import HTMLField
class Receiver(models.Model):
"""mail receiver"""
mail_address = models.CharField(max_length=128)
title = models.CharField(max_length=128)
company = models.CharField(max_length=256)
country = models.CharField(max_length=256, blank=True, null=True)
city = models.CharField(max_length=256, blank=True, null=True)
def __unicode__(self):
return self.company + ":" + self.title + "[" + self.mail_address +"]"
class Mail(models.Model):
"""mail content"""
subject = models.CharField(max_length=1024)
content = HTMLField()
receivers = models.ManyToManyField('Receiver', null=True, blank=True)
def __unicode__(self):
return self.subject
def get_absolute_url(self):
"""url of the mail"""
return reverse("mail_detail", kwargs={'pk': self.id})
def generate_mail_conent(mail, receiver):
template = Template(mail.content)
html = template.render(obj=receiver)
return html
def generate_mail(mail, receiver):
msg = MIMEMultipart('alternative')
msg['Subject'] = mail.subject
msg['From'] = settings.EMAIL_HOST_USER
msg['To'] = receiver.mail_address
msg.attach(MIMEText(generate_mail_conent(mail, receiver), 'html', 'utf-8'))
return msg
| bsd-3-clause | Python |
6873a477890c08f51b54b616555fe88bb3a9704e | Add the Plivo outgoing number | Razvy000/cabot_alert_plivo | cabot_alert_plivo/models.py | cabot_alert_plivo/models.py | from os import environ as env
from django.conf import settings
from django.template import Context, Template
from django.db import models
from cabot.cabotapp.alert import AlertPlugin, AlertPluginUserData
import requests
import logging
import plivo
# get the environment variables (see cabot/conf/development.env)
auth_id = env.get('PLIVO_AUTH_ID')
auth_token = env.get('PLIVO_AUTH_TOKEN')
# create a client
plivoClient = plivo.RestAPI(auth_id, auth_token)
# message template
plivo_template = "Service {{ service.name }} {% if service.overall_status == service.PASSING_STATUS %}is back to normal{% else %}reporting {{ service.overall_status }} status{% endif %}: {{ scheme }}://{{ host }}{% url 'service' pk=service.id %}. {% if service.overall_status != service.PASSING_STATUS %}Checks failing: {% for check in service.all_failing_checks %}{% if check.check_category == 'Jenkins check' %}{% if check.last_result.error %} {{ check.name }} ({{ check.last_result.error|safe }}) {{jenkins_api}}job/{{ check.name }}/{{ check.last_result.job_number }}/console{% else %} {{ check.name }} {{jenkins_api}}/job/{{ check.name }}/{{check.last_result.job_number}}/console {% endif %}{% else %} {{ check.name }} {% if check.last_result.error %} ({{ check.last_result.error|safe }}){% endif %}{% endif %}{% endfor %}{% endif %}{% if alert %}{% for alias in users %} @{{ alias }}{% endfor %}{% endif %}"
class PlivoAlert(AlertPlugin):
name = "Plivo SMS"
author = "Razvan Pistolea"
def send_alert(self, service, users, duty_officers):
# create message
c = Context({
'service': service,
'host': settings.WWW_HTTP_HOST,
'scheme': settings.WWW_SCHEME
})
t = Template(plivo_template)
msg = t.render(c)
# get the source number
src_number = env.get('PLIVO_OUTGOING_NUMBER')
# get users's plivo mobile numbers
plivo_numbers = [u.mobile_number for u in PlivoAlertUserData.objects.filter(user__user__in=users)]
# send SMS using Plivo Python API
try:
for plivo_number in plivo_numbers:
send_response = plivoClient.Message.send(
src=src_number,
dst=plivo_number,
text=msg,
url='http://localhost.com',
)
except Exception, exp:
logger.exception('Error invoking Plivo SMS: %s' % str(exp))
class PlivoAlertUserData(AlertPluginUserData):
name = "Pluvio Plugin"
mobile_number = models.CharField(max_length=20, blank=True, default='')
| from os import environ as env
from django.conf import settings
from django.template import Context, Template
from django.db import models
from cabot.cabotapp.alert import AlertPlugin, AlertPluginUserData
import requests
import logging
import plivo
# get the environment variables (see cabot/conf/development.env)
auth_id = env.get('PLIVO_AUTH_ID')
auth_token = env.get('PLIVO_AUTH_TOKEN')
# create a client
plivoClient = plivo.RestAPI(auth_id, auth_token)
# message template
plivo_template = "Service {{ service.name }} {% if service.overall_status == service.PASSING_STATUS %}is back to normal{% else %}reporting {{ service.overall_status }} status{% endif %}: {{ scheme }}://{{ host }}{% url 'service' pk=service.id %}. {% if service.overall_status != service.PASSING_STATUS %}Checks failing: {% for check in service.all_failing_checks %}{% if check.check_category == 'Jenkins check' %}{% if check.last_result.error %} {{ check.name }} ({{ check.last_result.error|safe }}) {{jenkins_api}}job/{{ check.name }}/{{ check.last_result.job_number }}/console{% else %} {{ check.name }} {{jenkins_api}}/job/{{ check.name }}/{{check.last_result.job_number}}/console {% endif %}{% else %} {{ check.name }} {% if check.last_result.error %} ({{ check.last_result.error|safe }}){% endif %}{% endif %}{% endfor %}{% endif %}{% if alert %}{% for alias in users %} @{{ alias }}{% endfor %}{% endif %}"
class PlivoAlert(AlertPlugin):
name = "Plivo SMS"
author = "Razvan Pistolea"
def send_alert(self, service, users, duty_officers):
# create message
c = Context({
'service': service,
'host': settings.WWW_HTTP_HOST,
'scheme': settings.WWW_SCHEME
})
t = Template(plivo_template)
msg = t.render(c)
# get users's plivo mobile numbers
plivo_numbers = [u.mobile_number for u in PlivoAlertUserData.objects.filter(user__user__in=users)]
# send SMS using Plivo Python API
try:
for plivo_number in plivo_numbers:
send_response = plivoClient.Message.send(
src='441233801333',
dst=plivo_number,
text=msg,
url='http://localhost.com',
)
except Exception, exp:
logger.exception('Error invoking Plivo SMS: %s' % str(exp))
class PlivoAlertUserData(AlertPluginUserData):
name = "Pluvio Plugin"
mobile_number = models.CharField(max_length=20, blank=True, default='')
| mit | Python |
b55c8a33887a9e2932113ed279b9292b7b547a5d | update : change cmmedia/view.py resvers images to images_url | quxiaolong1504/cloudmusic | cmmedia/views.py | cmmedia/views.py | # encoding=utf-8
from django.utils.translation import ugettext_lazy as _
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework import generics
from rest_framework.viewsets import ModelViewSet
from cmmedia.models import Image
from cmmedia.serializers import ImageSerializer
class ResourceURLView(APIView):
allowed_methods = ['GET']
def get(self,request,*args,**kwargs):
return Response(self.get_url_dispach())
def get_url_dispach(self,format=None):
return {
_(u"images_url").strip(): reverse('resource_image', request=self.request, format=format,),
}
class ImageView(generics.CreateAPIView):
"""
创建和获取Image资源
"""
queryset = Image.objects.all()
serializer_class = ImageSerializer
| # encoding=utf-8
from django.utils.translation import ugettext_lazy as _
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.views import APIView
from rest_framework import generics
from rest_framework.viewsets import ModelViewSet
from cmmedia.models import Image
from cmmedia.serializers import ImageSerializer
class ResourceURLView(APIView):
allowed_methods = ['GET']
def get(self,request,*args,**kwargs):
return Response(self.get_url_dispach())
def get_url_dispach(self,format=None):
return {
_(u"images").strip(): reverse('resource_image', request=self.request, format=format,),
}
class ImageView(generics.CreateAPIView):
"""
创建和获取Image资源
"""
queryset = Image.objects.all()
serializer_class = ImageSerializer
| mpl-2.0 | Python |
36110646c5c5abead8c2a3a7d98d36bb6d49db59 | Remove pointless models include | praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control | subsend/models.py | subsend/models.py | # Create your models here.
| from django.db import models
# Create your models here.
models
| bsd-3-clause | Python |
a4062588d1eae0883dd8d56f198e84a8875eabb5 | make self_test not import if pytest is not installed | dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy | sunpy/__init__.py | sunpy/__init__.py | """
SunPy
=====
An open-source Python library for Solar Physics data analysis.
Web Links
---------
Homepage: http://www.sunpy.org
Documentation: http://sunpy.readthedocs.org/en/latest/index.html
"""
from __future__ import absolute_import
import warnings
__version__ = '0.3.3'
try:
from sunpy.version import version as __version__
except ImportError:
warnings.warn('Missing version.py; you need to run setup.py', Warning)
from sunpy.util.config import load_config, print_config
from sunpy.util import system_info
try:
from sunpy.tests import main as self_test
except ImportError:
pass
# Sample data
from sunpy.data.sample import (AIA_171_IMAGE, RHESSI_IMAGE, EIT_195_IMAGE,
RHESSI_EVENT_LIST, CALLISTO_IMAGE)
# Load user configuration
config = load_config()
| """
SunPy
=====
An open-source Python library for Solar Physics data analysis.
Web Links
---------
Homepage: http://www.sunpy.org
Documentation: http://sunpy.readthedocs.org/en/latest/index.html
"""
from __future__ import absolute_import
import warnings
__version__ = '0.3.3'
try:
from sunpy.version import version as __version__
except ImportError:
warnings.warn('Missing version.py; you need to run setup.py', Warning)
from sunpy.util.config import load_config, print_config
from sunpy.util import system_info
from sunpy.tests import main as self_test
# Sample data
from sunpy.data.sample import (AIA_171_IMAGE, RHESSI_IMAGE, EIT_195_IMAGE,
RHESSI_EVENT_LIST, CALLISTO_IMAGE)
# Load user configuration
config = load_config()
| bsd-2-clause | Python |
336093ee08fdc46c3f95656ca44a43a0dd045c4d | update last_login when authing with token | jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle | bluebottle/auth/views.py | bluebottle/auth/views.py | from rest_framework.views import APIView
from rest_framework.authtoken.models import Token
from rest_framework.response import Response
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.authentication import get_authorization_header
from rest_framework import parsers, renderers
from rest_framework import status
from social.apps.django_app.utils import strategy
#from social_auth.decorators import
from datetime import datetime
class GetAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
model = Token
# Accept backend as a parameter and 'auth' for a login / pass
def post(self, request, backend):
serializer = self.serializer_class(data=request.DATA)
# Here we call PSA to authenticate like we would if we used PSA on server side.
jwt_token = register_by_access_token(request, backend)
# If user is active we get or create the REST token and send it back with user data
if jwt_token:
return Response({'token': jwt_token})
return Response({'error': "Ai caramba!"})
@strategy()
def register_by_access_token(request, backend):
backend = request.strategy.backend
access_token = request.DATA.get('accessToken', None)
if access_token:
user = backend.do_auth(access_token)
user.last_login = datetime.now()
user.save()
return user.get_jwt_token()
return None
| from rest_framework.views import APIView
from rest_framework.authtoken.models import Token
from rest_framework.response import Response
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.authentication import get_authorization_header
from rest_framework import parsers, renderers
from rest_framework import status
from social.apps.django_app.utils import strategy
#from social_auth.decorators import
class GetAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
model = Token
# Accept backend as a parameter and 'auth' for a login / pass
def post(self, request, backend):
serializer = self.serializer_class(data=request.DATA)
# Here we call PSA to authenticate like we would if we used PSA on server side.
jwt_token = register_by_access_token(request, backend)
# If user is active we get or create the REST token and send it back with user data
if jwt_token:
return Response({'token': jwt_token})
return Response({'error': "Ai caramba!"})
@strategy()
def register_by_access_token(request, backend):
backend = request.strategy.backend
access_token = request.DATA.get('accessToken', None)
if access_token:
user = backend.do_auth(access_token)
return user.get_jwt_token()
return None
| bsd-3-clause | Python |
2a8e45575657da57bd21c832ccdd221b5044fc40 | Update bottlespin.py | kallerdaller/Cogs-Yorkfield | bottlespin/bottlespin.py | bottlespin/bottlespin.py | import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
roles = [ctx.message.server.roles]
await self.bot.say(str(roles.name))
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
| import discord
from discord.ext import commands
from random import choice
class Bottlespin:
"""Spins a bottle and lands on a random user."""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, no_pm=True, alias=["bottlespin"])
async def spin(self, ctx, role):
"""Spin the bottle"""
roles = [str(ctx.message.server.roles)]
await self.bot.say(str(roles))
author = ctx.message.author
server = ctx.message.server
if len(server.members) < 2:
await self.bot.say("`Not enough people are around to spin the bottle`")
return
if role in roles:
roleexist = True
else:
await self.bot.say("`{} is not a exising role`".format(role))
return
if roleexist:
target = [m for m in server.members if m != author and role in [
s.name for s in m.roles] and str(m.status) == "online" or str(m.status) == "idle"]
else:
target = [m for m in server.members if m != author and str(
m.status) == "online" or str(m.status) == "idle"]
if not target:
if role:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at with the role {}`".format(role))
else:
await self.bot.say("`Sorry I couldnt find anyone to point the bottle at`")
return
else:
target = choice(list(target))
await self.bot.say("`{0.display_name}#{0.discriminator} spinned the bottle and it landed on {1.display_name}#{1.discriminator}`".format(author, target))
def setup(bot):
n = Bottlespin(bot)
bot.add_cog(n)
| mit | Python |
5cf4e1181dd3146b6070e144d720406bb7fb13d5 | Add qiutil to logging. | ohsu-qin/qiutil | test/helpers/logging.py | test/helpers/logging.py | """
This test logging module configures test case logging to print
debug messages to stdout.
"""
from qiutil.logging import (configure, logger)
configure('test', 'qiutil', level='DEBUG')
| """
This test logging module configures test case logging to print
debug messages to stdout.
"""
from qiutil.logging import (configure, logger)
configure('test', level='DEBUG')
| bsd-2-clause | Python |
67659e8da1a6479dac0eae2b0219962a4eb4eae9 | fix arg bug | heatery/strava-api-experiment,heatery/strava-api-experiment,anthonywu/strava-api-experiment,anthonywu/strava-api-experiment,heatery/strava-api-experiment,anthonywu/strava-api-experiment,heatery/strava-api-experiment,anthonywu/strava-api-experiment | src/strava_local_client.py | src/strava_local_client.py | #!/usr/bin/env python
"""
Strava Development Sandbox.
Get your *Client ID* and *Client Secret* from https://www.strava.com/settings/api
Usage:
strava_local_client.py get_write_token <client_id> <client_secret> [options]
Options:
-h --help Show this screen.
--port=<port> Local port for OAuth client [default: 8000].
"""
import stravalib
from flask import Flask, request
app = Flask(__name__)
API_CLIENT = stravalib.Client()
# set these in __main__
CLIENT_ID = None
CLIENT_SECRET = None
@app.route("/auth")
def auth_callback():
code = request.args.get('code')
access_token = API_CLIENT.exchange_code_for_token(
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
code=code
)
return access_token
if __name__ == '__main__':
import docopt
import subprocess
import sys
from blessings import Terminal
args = docopt.docopt(__doc__)
t = Terminal()
if args['get_write_token']:
CLIENT_ID, CLIENT_SECRET = args['<client_id>'], args['<client_secret>']
auth_url = API_CLIENT.authorization_url(
client_id=args['<client_id>'],
redirect_uri='http://127.0.0.1:{port}/auth'.format(port=args['--port']),
scope='write',
state='from_cli'
)
print(t.red('Go to {0} to authorize access: '.format(auth_url)))
if sys.platform == 'darwin':
print(t.green('On OS X - launching url at default browser'))
subprocess.call(['open', auth_url])
app.run(port=int(args['--port']))
| #!/usr/bin/env python
"""
Strava Development Sandbox.
Get your *Client ID* and *Client Secret* from https://www.strava.com/settings/api
Usage:
strava_local_client.py get_write_token <client_id> <client_secret> [options]
Options:
-h --help Show this screen.
--port=<port> Local port for OAuth client [default: 8000].
"""
import stravalib
from flask import Flask, request
app = Flask(__name__)
API_CLIENT = stravalib.Client()
# set these in __main__
CLIENT_ID = None
CLIENT_SECRET = None
@app.route("/auth")
def auth_callback():
code = request.args.get('code')
access_token = API_CLIENT.exchange_code_for_token(
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
code=code
)
return access_token
if __name__ == '__main__':
import docopt
import subprocess
import sys
from blessings import Terminal
args = docopt.docopt(__doc__)
CLIENT_ID, CLIENT_SECRET = args['<client_id>'], args['<client_secret>']
t = Terminal()
if args['get_write_token']:
auth_url = API_CLIENT.authorization_url(
client_id=1418,
redirect_uri='http://127.0.0.1:{port}/auth'.format(port=args['--port']),
scope='write',
state='from_cli'
)
print(t.red('Go to {0} to authorize access: '.format(auth_url)))
if sys.platform == 'darwin':
print(t.green('On OS X - launching url at default browser'))
subprocess.call(['open', auth_url])
app.run(port=int(args['--port']))
| mit | Python |
854eb3b6cf92dc7c54c931f9f128a4577bca5b2a | Bump version number | nabla-c0d3/sslyze | sslyze/__init__.py | sslyze/__init__.py |
__author__ = 'Alban Diquet'
__version__ = '2.0.0'
__email__ = 'nabla.c0d3@gmail.com'
PROJECT_URL = 'https://github.com/nabla-c0d3/sslyze'
|
__author__ = 'Alban Diquet'
__version__ = '1.4.3'
__email__ = 'nabla.c0d3@gmail.com'
PROJECT_URL = 'https://github.com/nabla-c0d3/sslyze'
| agpl-3.0 | Python |
c2ae2d3a35ee9dc6faf956c90604459fac96473c | Update st2client description and classifiers. | Plexxi/st2,lakshmi-kannan/st2,StackStorm/st2,nzlosh/st2,lakshmi-kannan/st2,nzlosh/st2,StackStorm/st2,peak6/st2,punalpatel/st2,peak6/st2,peak6/st2,punalpatel/st2,dennybaa/st2,pixelrebel/st2,Plexxi/st2,punalpatel/st2,lakshmi-kannan/st2,dennybaa/st2,armab/st2,Plexxi/st2,emedvedev/st2,emedvedev/st2,tonybaloney/st2,tonybaloney/st2,StackStorm/st2,emedvedev/st2,pixelrebel/st2,nzlosh/st2,nzlosh/st2,tonybaloney/st2,dennybaa/st2,armab/st2,pixelrebel/st2,StackStorm/st2,armab/st2,Plexxi/st2 | st2client/setup.py | st2client/setup.py | #!/usr/bin/env python2.7
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from setuptools import setup, find_packages
from dist_utils import fetch_requirements
from dist_utils import apply_vagrant_workaround
from st2client import __version__
ST2_COMPONENT = 'st2client'
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
REQUIREMENTS_FILE = os.path.join(BASE_DIR, 'requirements.txt')
install_reqs, dep_links = fetch_requirements(REQUIREMENTS_FILE)
apply_vagrant_workaround()
setup(
name=ST2_COMPONENT,
version=__version__,
description=('Python client library and CLI for the StackStorm (st2) event-driven '
'automation platform.'),
author='StackStorm',
author_email='info@stackstorm.com',
license='Apache License (2.0)',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Information Technology',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7'
],
install_requires=install_reqs,
dependency_links=dep_links,
test_suite=ST2_COMPONENT,
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['setuptools', 'tests']),
entry_points={
'console_scripts': [
'st2 = st2client.shell:main'
]
}
)
| #!/usr/bin/env python2.7
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
from setuptools import setup, find_packages
from dist_utils import fetch_requirements
from dist_utils import apply_vagrant_workaround
from st2client import __version__
ST2_COMPONENT = 'st2client'
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
REQUIREMENTS_FILE = os.path.join(BASE_DIR, 'requirements.txt')
install_reqs, dep_links = fetch_requirements(REQUIREMENTS_FILE)
apply_vagrant_workaround()
setup(
name=ST2_COMPONENT,
version=__version__,
description='CLI and python client library for the StackStorm (st2) automation platform.',
author='StackStorm',
author_email='info@stackstorm.com',
license='Apache License (2.0)',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7'
],
install_requires=install_reqs,
dependency_links=dep_links,
test_suite=ST2_COMPONENT,
zip_safe=False,
include_package_data=True,
packages=find_packages(exclude=['setuptools', 'tests']),
entry_points={
'console_scripts': [
'st2 = st2client.shell:main'
]
}
)
| apache-2.0 | Python |
e34bcec834bf4d84168d04a1ea0a98613ad0df4e | Update migration to fetch domains with applications using old location fixture | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/locations/management/commands/migrate_new_location_fixture.py | corehq/apps/locations/management/commands/migrate_new_location_fixture.py | import json
from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import SQLLocation
from corehq.apps.domain.models import Domain
from corehq.toggles import HIERARCHICAL_LOCATION_FIXTURE, NAMESPACE_DOMAIN
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Enable FF HIERARCHICAL_LOCATION_FIXTURE for
apps with locations and having commtrack:enabled in app files
The Feature Flag FLAT_LOCATION_FIXTURE should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = (
SQLLocation.objects.order_by('domain').distinct('domain')
.values_list('domain', flat=True)
)
domains_with_hierarchical_fixture = find_applications_with_hierarchical_fixture(
domains_having_locations
)
toggle = Toggle.get(HIERARCHICAL_LOCATION_FIXTURE.slug)
for domain in domains_with_hierarchical_fixture:
toggle.add(domain, True, NAMESPACE_DOMAIN)
def find_applications_with_hierarchical_fixture(domains):
search_string = 'commtrack:enabled'
domain_with_application = {}
for domain in domains:
domain_obj = Domain.get_by_name(domain)
for application in domain_obj.applications():
raw_doc = json.dumps(application.get_db().get(application.id))
if search_string in raw_doc:
search_string[domain] = application.id
continue
return domain_with_application
| from django.core.management.base import BaseCommand
from toggle.models import Toggle
from corehq.apps.locations.models import LocationFixtureConfiguration, SQLLocation
from corehq.toggles import FLAT_LOCATION_FIXTURE
class Command(BaseCommand):
help = """
To migrate to new flat fixture for locations. Update apps with locations and not having
FLAT_LOCATION_FIXTURE enabled to have LocationFixtureConfiguration with
sync_hierarchical_fixture True and sync_flat_fixture False to have old fixtures enabled.
The Feature Flag should be removed after this
"""
def handle(self, *args, **options):
domains_having_locations = set(SQLLocation.objects.values_list('domain', flat=True))
toggle = Toggle.get(FLAT_LOCATION_FIXTURE.slug)
enabled_users = toggle.enabled_users
enabled_domains = [user.split('domain:')[1] for user in enabled_users]
for domain_name in domains_having_locations:
if domain_name not in enabled_domains:
domain_config = LocationFixtureConfiguration.for_domain(domain_name)
# update configs that had not been changed which means both values are at default True
if domain_config.sync_hierarchical_fixture and domain_config.sync_flat_fixture:
# update them to use hierarchical fixture
domain_config.sync_flat_fixture = False
domain_config.sync_hierarchical_fixture = True
domain_config.save()
| bsd-3-clause | Python |
a17ed4f65b7fa5a035efb7c6ff19fcf477a65429 | Remove remaining django-mptt 0.7 compatibility code | edoburu/django-categories-i18n,edoburu/django-categories-i18n | categories_i18n/managers.py | categories_i18n/managers.py | """
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
| """
The manager classes.
"""
import django
from django.db.models.query import QuerySet
from mptt.managers import TreeManager
from mptt.querysets import TreeQuerySet
from parler.managers import TranslatableManager, TranslatableQuerySet
class CategoryQuerySet(TranslatableQuerySet, TreeQuerySet):
"""
The Queryset methods for the Category model.
"""
def as_manager(cls):
# Make sure the Django way of creating managers works.
manager = CategoryManager.from_queryset(cls)()
manager._built_with_as_manager = True
return manager
as_manager.queryset_only = True
as_manager = classmethod(as_manager)
class CategoryManager(TreeManager, TranslatableManager):
"""
Base manager class for the categories.
"""
_queryset_class = CategoryQuerySet
def get_queryset(self):
# Nasty: In some django-mptt 0.7 versions, TreeManager.get_querset() no longer calls super()
# Hence, redefine get_queryset() here to have the logic from django-parler and django-mptt.
return self._queryset_class(self.model, using=self._db).order_by(
self.tree_id_attr, self.left_attr
)
| apache-2.0 | Python |
07df11f81cf050d88218f5fb6348b1193bfb4e20 | use the right user to chown | ceph/ceph-installer,ceph/ceph-installer,ceph/mariner-installer,ceph/ceph-installer | ceph_installer/templates.py | ceph_installer/templates.py |
setup_script = """#!/bin/bash -x -e
if [[ $EUID -ne 0 ]]; then
echo "You must be a root user or execute this script with sudo" 2>&1
exit 1
fi
echo "--> creating new user with disabled password: ansible"
useradd -m ceph-installer
passwd -d ceph-installer
echo "--> adding provisioning key to the ansible authorized_keys"
curl -s -L -o ansible.pub {ssh_key_address}
mkdir -p /home/ansible/.ssh
cat ansible.pub >> /home/ansible/.ssh/authorized_keys
chown -R ceph-installer:ceph-installer /home/ansible/.ssh
echo "--> ensuring that ansible user will be able to sudo"
echo "ansible ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/ansible > /dev/null
echo "--> ensuring ansible user does not require a tty"
echo 'Defaults:ansible !requiretty' | sudo tee /etc/sudoers.d/ansible > /dev/null
"""
# Note that the agent_script can't run by itself, it needs to be concatenated
# along the regular setup script
agent_script = """
echo "--> installing and configuring agent"
curl -d '{{"hosts": ["{target_host}"]}}' -X POST {agent_endpoint}
"""
|
setup_script = """#!/bin/bash -x -e
if [[ $EUID -ne 0 ]]; then
echo "You must be a root user or execute this script with sudo" 2>&1
exit 1
fi
echo "--> creating new user with disabled password: ansible"
useradd -m ceph-installer
passwd -d ceph-installer
echo "--> adding provisioning key to the ansible authorized_keys"
curl -s -L -o ansible.pub {ssh_key_address}
mkdir -p /home/ansible/.ssh
cat ansible.pub >> /home/ansible/.ssh/authorized_keys
chown -R ansible:ansible /home/ansible/.ssh
echo "--> ensuring that ansible user will be able to sudo"
echo "ansible ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/ansible > /dev/null
echo "--> ensuring ansible user does not require a tty"
echo 'Defaults:ansible !requiretty' | sudo tee /etc/sudoers.d/ansible > /dev/null
"""
# Note that the agent_script can't run by itself, it needs to be concatenated
# along the regular setup script
agent_script = """
echo "--> installing and configuring agent"
curl -d '{{"hosts": ["{target_host}"]}}' -X POST {agent_endpoint}
"""
| mit | Python |
a8d6abd869fc30d395ffafec9eea566f58fb840c | add new code tables to admin console | bcgov/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells | app/backend/wells/admin.py | app/backend/wells/admin.py | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.contrib import admin
from wells.models import (
WaterQualityCharacteristic,
WaterQualityColour
)
# Register your models here.
admin.site.register(WaterQualityCharacteristic)
admin.site.register(WaterQualityColour)
| """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.contrib import admin
# Register your models here.
| apache-2.0 | Python |
b9b41ea38de0b54cf85829c10a764aaf70d3e4bf | remove a useless local variable | timmyomahony/django-charsleft-widget,timmyomahony/django-charsleft-widget | charsleft_widget/widgets.py | charsleft_widget/widgets.py | from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
try:
# py2.x
from django.utils.encoding import force_unicode as force_str
except ImportError:
# py3.x
from django.utils.encoding import force_text as force_str
try:
# Django >=1.7
from django.forms.utils import flatatt
except ImportError:
# Django <1.7
from django.forms.util import flatatt
class MediaMixin(object):
pass
class Media:
css = {'screen': ('charsleft-widget/css/charsleft.css',), }
js = ('charsleft-widget/js/charsleft.js',)
class CharsLeftInput(forms.TextInput):
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
final_attrs['value'] = force_str(self._format_value(value))
maxlength = final_attrs.get('maxlength', False)
if not maxlength:
return mark_safe(u'<input%s />' % flatatt(final_attrs))
current = force_str(int(maxlength) - len(value))
html = u"""
<span class="charsleft charsleft-input">
<input %(attrs)s />
<span>
<span class="count">%(current)s</span>
%(char_remain_str)s</span>
<span class="maxlength">%(maxlength)s</span>
</span>
""" % {
'attrs': flatatt(final_attrs),
'current': current,
'char_remain_str': _(u'characters remaining'),
'maxlength': int(maxlength),
}
return mark_safe(html)
| from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
try:
# py2.x
from django.utils.encoding import force_unicode as force_str
except ImportError:
# py3.x
from django.utils.encoding import force_text as force_str
try:
# Django >=1.7
from django.forms.utils import flatatt
except ImportError:
# Django <1.7
from django.forms.util import flatatt
class MediaMixin(object):
pass
class Media:
css = {'screen': ('charsleft-widget/css/charsleft.css',), }
js = ('charsleft-widget/js/charsleft.js',)
class CharsLeftInput(forms.TextInput):
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
final_attrs['value'] = force_str(self._format_value(value))
maxlength = final_attrs.get('maxlength', False)
if not maxlength:
return mark_safe(u'<input%s />' % flatatt(final_attrs))
current = force_str(int(maxlength) - len(value))
char_remain_str = _(u'characters remaining')
html = u"""
<span class="charsleft charsleft-input">
<input %(attrs)s />
<span>
<span class="count">%(current)s</span>
%(char_remain_str)s</span>
<span class="maxlength">%(maxlength)s</span>
</span>
""" % {
'attrs': flatatt(final_attrs),
'current': current,
'char_remain_str': char_remain_str,
'maxlength': int(maxlength),
}
return mark_safe(html)
| mit | Python |
41df2db2fcc3a03c00feaccbd40ccac9f7195ec9 | bump to 0.2.4: version was outdated | FundedByMe/django-mangopay | mangopay/__init__.py | mangopay/__init__.py | __version__ = (0, 2, 4)
| __version__ = (0, 2, 3)
| mit | Python |
2992eede5ba829bb03a85689246775e37f9f69dd | modify version | sosuke-k/cornel-movie-dialogs-corpus-storm,sosuke-k/cornel-movie-dialogs-corpus-storm | mdcorpus/__init__.py | mdcorpus/__init__.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = "0.0.1"
__all__ = ["mdcorpus", "parser"]
from storm.locals import *
from mdcorpus import *
# initializing the relationships
MovieTitlesMetadata.genres = ReferenceSet(MovieTitlesMetadata.id,
MovieGenreLine.movie_id,
MovieGenreLine.genre_id,
Genre.id)
Genre.movies = ReferenceSet(Genre.id,
MovieGenreLine.genre_id,
MovieGenreLine.movie_id,
MovieTitlesMetadata.id)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = "0.1.0"
__all__ = ["mdcorpus", "parser"]
from storm.locals import *
from mdcorpus import *
# initializing the relationships
MovieTitlesMetadata.genres = ReferenceSet(MovieTitlesMetadata.id,
MovieGenreLine.movie_id,
MovieGenreLine.genre_id,
Genre.id)
Genre.movies = ReferenceSet(Genre.id,
MovieGenreLine.genre_id,
MovieGenreLine.movie_id,
MovieTitlesMetadata.id)
| mit | Python |
e975b27484a3d1a6cc96df604f99a4a3efab92b8 | Add better title to job page. | kernelci/kernelci-frontend,kernelci/kernelci-frontend,kernelci/kernelci-frontend,kernelci/kernelci-frontend | app/dashboard/views/job.py | app/dashboard/views/job.py | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flask import (
current_app as app,
render_template,
request
)
from flask.views import View
from dashboard.utils.backend import get_search_parameters
class GeneralJobsView(View):
PAGE_TITLE = app.config.get("DEFAULT_PAGE_TITLE")
JOB_PAGES_TITLE = "%s — %s" % (PAGE_TITLE, "Job Reports")
class JobsAllView(GeneralJobsView):
def dispatch_request(self):
body_title = "Available Jobs"
search_filter, page_len = get_search_parameters(request)
return render_template(
"jobs-all.html",
body_title=body_title,
page_len=page_len,
page_title=self.JOB_PAGES_TITLE,
search_filter=search_filter
)
class JobsJobView(GeneralJobsView):
def dispatch_request(self, **kwargs):
job = kwargs["job"]
body_title = "Details for «%s»" % job
page_title = "%s — «%s» job" % (self.PAGE_TITLE, job)
search_filter, page_len = get_search_parameters(request)
return render_template(
"jobs-job.html",
body_title=body_title,
job=job,
page_len=page_len,
page_title=page_title,
search_filter=search_filter
)
| # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from flask import (
current_app as app,
render_template,
request
)
from flask.views import View
from dashboard.utils.backend import get_search_parameters
class GeneralJobsView(View):
PAGE_TITLE = app.config.get("DEFAULT_PAGE_TITLE")
JOB_PAGES_TITLE = "%s — %s" % (PAGE_TITLE, "Job Reports")
class JobsAllView(GeneralJobsView):
def dispatch_request(self):
body_title = "Available Jobs"
search_filter, page_len = get_search_parameters(request)
return render_template(
"jobs-all.html",
body_title=body_title,
page_len=page_len,
page_title=self.JOB_PAGES_TITLE,
search_filter=search_filter
)
class JobsJobView(GeneralJobsView):
def dispatch_request(self, **kwargs):
body_title = "Details for «%s»" % kwargs["job"]
search_filter, page_len = get_search_parameters(request)
return render_template(
"jobs-job.html",
body_title=body_title,
job=kwargs["job"],
page_len=page_len,
page_title=self.PAGE_TITLE,
search_filter=search_filter
)
| lgpl-2.1 | Python |
e1da4e0047449939086a7e36ac3c85bbcd62e713 | add __all__ | anselmobd/fo2,anselmobd/fo2,anselmobd/fo2,anselmobd/fo2 | src/utils/functions/sql.py | src/utils/functions/sql.py | from itertools import takewhile
from pprint import pprint
__all__ = [
'sql_formato_fo2',
]
def sql_formato_fo2(sql):
"""Recebe um SQL como executado no RDBMS
Retira a identação de todas as linhas não comentário e elimina
linhas vazias do início e do final.
Retorna SQL 'formatado'
"""
linhas = sql.split('\n')
min_spaces = 1000
def strip_min_spaces(linha):
nonlocal min_spaces
linha_strip = linha.strip()
if len(linha_strip) >= 2 and linha_strip[:2] != '--':
min_spaces = min(
min_spaces,
sum(1 for _ in takewhile(lambda c: c == ' ', linha)),
)
return linha_strip
linhas_strip = list(map(strip_min_spaces, linhas))
linhas_vazias_inicio = sum(1 for _ in takewhile(lambda l: len(l) == 0, linhas_strip))
linhas_vazias_final = sum(1 for _ in takewhile(lambda l: len(l) == 0, reversed(linhas_strip)))
del linhas[:linhas_vazias_inicio]
if linhas_vazias_final:
del linhas[-linhas_vazias_final:]
def put_min_spaces(linha):
nonlocal min_spaces
linha_strip = linha.strip()
if len(linha_strip) >= 2 and linha_strip[:2] != '--':
linha = linha[min_spaces:]
else:
pprint(linha)
if len(linha_strip) >= 2:
print('maior')
spaces = sum(1 for _ in takewhile(lambda c: c == ' ', linha))
print('spaces', spaces)
linha = ' '*max(0, spaces-min_spaces)+linha_strip
else:
print('menor')
linha = linha_strip
pprint(linha)
return linha
linhas = list(map(put_min_spaces, linhas))
return '\n'.join(linhas)
| from itertools import takewhile
from pprint import pprint
def sql_formato_fo2(sql):
"""Recebe um SQL como executado no RDBMS
Retira a identação de todas as linhas não comentário e elimina
linhas vazias do início e do final.
Retorna SQL 'formatado'
"""
linhas = sql.split('\n')
min_spaces = 1000
def strip_min_spaces(linha):
nonlocal min_spaces
linha_strip = linha.strip()
if len(linha_strip) >= 2 and linha_strip[:2] != '--':
min_spaces = min(
min_spaces,
sum(1 for _ in takewhile(lambda c: c == ' ', linha)),
)
return linha_strip
linhas_strip = list(map(strip_min_spaces, linhas))
linhas_vazias_inicio = sum(1 for _ in takewhile(lambda l: len(l) == 0, linhas_strip))
linhas_vazias_final = sum(1 for _ in takewhile(lambda l: len(l) == 0, reversed(linhas_strip)))
del linhas[:linhas_vazias_inicio]
if linhas_vazias_final:
del linhas[-linhas_vazias_final:]
def put_min_spaces(linha):
nonlocal min_spaces
linha_strip = linha.strip()
if len(linha_strip) >= 2 and linha_strip[:2] != '--':
linha = linha[min_spaces:]
else:
pprint(linha)
if len(linha_strip) >= 2:
print('maior')
spaces = sum(1 for _ in takewhile(lambda c: c == ' ', linha))
print('spaces', spaces)
linha = ' '*max(0, spaces-min_spaces)+linha_strip
else:
print('menor')
linha = linha_strip
pprint(linha)
return linha
linhas = list(map(put_min_spaces, linhas))
return '\n'.join(linhas)
| mit | Python |
16ecb86d28c3587fabf4268c27a6db255af6de7d | fix tests | fata1ex/django-statsy,fata1ex/django-statsy,zhebrak/django-statsy,zhebrak/django-statsy,fata1ex/django-statsy,zhebrak/django-statsy | statsy/__init__.py | statsy/__init__.py | # coding: utf-8
from django.utils.module_loading import autodiscover_modules
from statsy.log import logger
from statsy.sites import site
__all__ = [
'send', 'watch', 'get_send_params'
'objects', 'groups', 'events',
'site', 'autodiscover', 'logger'
]
def autodiscover():
autodiscover_modules('stats', register_to=site)
def init_signals():
import statsy.signals
def init_globals():
from statsy.core import Statsy
globals().update({
'objects': Statsy.objects,
'groups': Statsy.groups,
'events': Statsy.events,
'_statsy': Statsy(),
'Statsy': Statsy
})
globals().update({
'send': _statsy.send,
'watch': _statsy.watch,
'get_send_params': _statsy.get_send_params
})
default_app_config = 'statsy.apps.StatsyConfig'
| # coding: utf-8
from django.utils.module_loading import autodiscover_modules
from statsy.log import logger
from statsy.sites import site
__all__ = [
'send', 'watch', 'get_send_params'
'objects', 'groups', 'events',
'site', 'autodiscover', 'logger'
]
def autodiscover():
autodiscover_modules('stats', register_to=site)
def init_signals():
import statsy.signals
def init_globals():
from statsy.core import Statsy
globals().update({
'objects': Statsy.objects,
'groups': Statsy.groups,
'events': Statsy.events,
'_statsy': Statsy()
})
globals().update({
'send': _statsy.send,
'watch': _statsy.watch,
'get_send_params': _statsy.get_send_params
})
default_app_config = 'statsy.apps.StatsyConfig'
| mit | Python |
9617c68ee1beca10c5cc1ce820f50bcdf1355bc8 | Fix python test determinism without infomap library | mapequation/infomap,mapequation/infomap,mapequation/infomap,mapequation/infomap | test/testDeterminism.py | test/testDeterminism.py | import sys
import argparse
import subprocess
import re
def run(input, count, infomapArgs):
firstCodelength = 0.0
for i in range(1, count + 1):
print("\nStarting run {}...".format(i))
res = subprocess.run(['./Infomap', input, infomapArgs], stdout=subprocess.PIPE)
stdout = res.stdout.decode('utf-8')
match = re.search('Per level codelength total:.*\(sum: (\d+\.\d+)\)', stdout)
if not match:
sys.exit("No match for codelength from Infomap output '{}'".format(stdout))
codelength = float(match.group(1))
print("****************************************************************")
print("{}: Found codelength: {}".format(i, codelength))
if i == 1:
firstCodelength = codelength
elif codelength != firstCodelength:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("{} != {}".format(codelength, firstCodelength))
if abs(codelength - firstCodelength) > 1e-10:
print("Found non-deterministic behavior!")
return 1
print("Less than 1e-10, continue...")
print("****************************************************************")
print("Done! No non-deterministic behaviour found!")
return 0
def main(argv):
parser = argparse.ArgumentParser(description='Test determinism.')
# parser.add_argument('input', nargs='?', default='../ninetriangles.net', help='input network (default: ninetriangles.net)')
parser.add_argument('input', help='input network')
parser.add_argument('-c', '--count', type=int, default=50, help='max count of iterations (deafult: 40)')
mainArgv = argv
infomapArgv = []
if '--' in argv:
mainArgv = argv[:argv.index('--')]
infomapArgv = argv[argv.index('--')+1:]
args = parser.parse_args(mainArgv)
return run(args.input, args.count, infomapArgs = ' '.join(infomapArgv))
if __name__ == "__main__":
main(sys.argv[1:])
| from infomap import infomap
import sys
import argparse
import subprocess
import re
def run(input, count, infomapArgs):
firstCodelength = 0.0
for i in range(1, count + 1):
print("\nStarting run {}...".format(i))
res = subprocess.run(['./Infomap', input, infomapArgs], stdout=subprocess.PIPE)
stdout = res.stdout.decode('utf-8')
match = re.search('Per level codelength total:.*\(sum: (\d+\.\d+)\)', stdout)
if not match:
sys.exit("No match for codelength from Infomap output '{}'".format(stdout))
codelength = float(match.group(1))
print("****************************************************************")
print("{}: Found codelength: {}".format(i, codelength))
if i == 1:
firstCodelength = codelength
elif codelength != firstCodelength:
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
print("{} != {}".format(codelength, firstCodelength))
if abs(codelength - firstCodelength) > 1e-10:
print("Found non-deterministic behavior!")
return 1
print("Less than 1e-10, continue...")
print("****************************************************************")
print("Done! No non-deterministic behaviour found!")
return 0
def main(argv):
parser = argparse.ArgumentParser(description='Test determinism.')
# parser.add_argument('input', nargs='?', default='../ninetriangles.net', help='input network (default: ninetriangles.net)')
parser.add_argument('input', help='input network')
parser.add_argument('-c', '--count', type=int, default=50, help='max count of iterations (deafult: 40)')
mainArgv = argv
infomapArgv = []
if '--' in argv:
mainArgv = argv[:argv.index('--')]
infomapArgv = argv[argv.index('--')+1:]
args = parser.parse_args(mainArgv)
return run(args.input, args.count, infomapArgs = ' '.join(infomapArgv))
if __name__ == "__main__":
main(sys.argv[1:])
| agpl-3.0 | Python |
e775613d43dac702565cf266d9995c9cd706d7c8 | Add documentation for the CPSR command | 0xddaa/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,cebrusfs/217gdb,pwndbg/pwndbg,chubbymaggie/pwndbg,chubbymaggie/pwndbg,disconnect3d/pwndbg,zachriggle/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,cebrusfs/217gdb,0xddaa/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg | pwndbg/commands/cpsr.py | pwndbg/commands/cpsr.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
'Print out the ARM CPSR register'
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import gdb
import pwndbg.arch
import pwndbg.color
import pwndbg.commands
import pwndbg.regs
@pwndbg.commands.Command
@pwndbg.commands.OnlyWhenRunning
def cpsr():
if pwndbg.arch.current != 'arm':
print("This is only available on ARM")
return
cpsr = pwndbg.regs.cpsr
N = cpsr & (1<<31)
Z = cpsr & (1<<30)
C = cpsr & (1<<29)
V = cpsr & (1<<28)
T = cpsr & (1<<5)
bold = pwndbg.color.bold
result = [
bold('N') if N else 'n',
bold('Z') if Z else 'z',
bold('C') if C else 'c',
bold('V') if V else 'v',
bold('T') if T else 't'
]
print('cpsr %#x [ %s ]' % (cpsr, ' '.join(result)))
| mit | Python |
c174b06ec79070f5e3f7820f59a1133ddf13d1dd | update version prior to tag | timahutchinson/desispec,timahutchinson/desispec,desihub/desispec,gdhungana/desispec,desihub/desispec,gdhungana/desispec | py/desispec/_version.py | py/desispec/_version.py | __version__ = '0.7.0'
| __version__ = '0.5.0.dev858'
| bsd-3-clause | Python |
58e91fc46e75f084ad05459590fcf34b03ab0f36 | improve modularization | marcorosa/wos-cli | src/search.py | src/search.py | import re
import suds
import texttable as tt
import xml.etree.ElementTree as ET
from .config import user_id, password
from datetime import date
from operator import itemgetter
from six import print_
from wos import WosClient
def _draw_table(data):
# Generate table
tab = tt.Texttable()
tab.set_cols_align(['l', 'l', 'l'])
tab.add_rows(data, header=False)
tab.header(['Year', 'Title', 'ID WOS'])
tab.set_cols_width([5, 55, 20]) # Use fixed terminal dimension (80 char)
s = tab.draw()
print_(s)
def query(client, author, years, results, affiliation=None):
# Build query
if affiliation:
query = 'AU=%s AND AD=%s' % (author, affiliation)
else:
query = 'AU=%s' % author
# Build timespan
current_year = date.today().year
date_start = '{}-01-01'.format(current_year - years)
date_stop = '{}-01-01'.format(current_year + 1)
sq = client.search(query,
count=results,
offset=1,
timeSpan={'begin': date_start, 'end': date_stop})
# Format xml
my_xml = re.sub(' xmlns="[^"]+"', '', sq.records, count=1).encode('utf-8')
tree = ET.fromstring(my_xml)
n = len(list(tree))
if n > 0:
print_('Found %s papers' % n)
else:
print_('No papers found for %s in the last %s years' % (author, years))
exit(0)
return tree
def tree_extractor(tree):
# Get results
results = []
for t in tree:
idwos = t.find('UID').text
year = t.find('.//pub_info').attrib.get('pubyear', '?')
paper = t.find('.//title[@type="item"]').text
results.append([year, paper, idwos])
results = sorted(results, key=itemgetter(0), reverse=True)
return results
def search(author, years, results, affiliation=None):
try:
client = WosClient(user_id, password)
client.connect()
except suds.WebFault as e:
print_('Username and/or password not valid, or requests limit exceeded')
print_(e)
exit(1)
# Build the tree
tree = query(client, author, years, results, affiliation)
# Extract information from the tree
results = tree_extractor(tree)
# Draw the table
_draw_table(results)
return results
| import re
import suds
import texttable as tt
import xml.etree.ElementTree as ET
from .config import user_id, password
from datetime import date
from operator import itemgetter
from six import print_
from wos import WosClient
def _draw_table(data):
# Generate table
tab = tt.Texttable()
tab.set_cols_align(['l', 'l', 'l'])
tab.add_rows(data, header=False)
tab.header(['Year', 'Title', 'ID WOS'])
tab.set_cols_width([5, 55, 20]) # Use fixed terminal dimension (80 char)
s = tab.draw()
print_(s)
def search(author, years, results, affiliation=None):
try:
client = WosClient(user_id, password)
client.connect()
except suds.WebFault as e:
print_('Username and/or password not valid, or requests limit exceeded')
print_(e)
exit(1)
# Build query
if affiliation:
query = 'AU=%s AND AD=%s' % (author, affiliation)
else:
query = 'AU=%s' % author
# Build timespan
current_year = date.today().year
date_start = '{}-01-01'.format(current_year - years)
date_stop = '{}-01-01'.format(current_year + 1)
sq = client.search(query,
count=results,
offset=1,
timeSpan={'begin': date_start, 'end': date_stop})
# Format xml
my_xml = re.sub(' xmlns="[^"]+"', '', sq.records, count=1).encode('utf-8')
tree = ET.fromstring(my_xml)
n = len(list(tree))
if n > 0:
print_('Found %s papers' % n)
else:
print_('No papers found for %s in the last %s years' % (author, years))
exit(0)
# Get results
res = []
for t in tree:
idwos = t.find('UID').text
year = t.find('.//pub_info').attrib.get('pubyear', '?')
paper = t.find('.//title[@type="item"]').text
res.append([year, paper, idwos])
res = sorted(res, key=itemgetter(0), reverse=True)
_draw_table(res)
return res
| mit | Python |
3eb3c5ffc0e346103d60c121598e170c3a816818 | Add command handler to flask route | hackerspace-ntnu/coap-iot,hackerspace-ntnu/coap-iot | src/server.py | src/server.py | import logging
import asyncio
import flask
import threading
import aiocoap.resource as resource
import aiocoap
app = flask.Flask(__name__,static_folder="../static",static_url_path="/static",template_folder="../templates")
@app.route("/")
def index():
return flask.render_template("index.html", name="index")
@app.route("/<int:id>/<command>")
def parseCommand(id, command):
pass
class BlockResource(resource.Resource):
"""
We will test some of aiocoap here.
Enjoy.
"""
def __init__(self):
super(BlockResource, self).__init__()
self.content = ("test-content: yes please").encode("ascii")
@asyncio.coroutine
def render_get(self, req):
resp = aiocoap.Message(code=aiocoap.CONTENT,payload=self.content)
return resp
def render_put(self,req):
print("Got payload: %s" % req.payload)
self.content = req.payload
"""
Echo back messages
"""
return aiocoap.Message(code=aiocoap.CHANGED,payload=req.payload)
def main():
root = resource.Site()
root.add_resource(('block',),BlockResource())
websrv = threading.Thread(target=(lambda: app.run(debug=True, port=25565, use_reloader=False)), name="Flask-server")
websrv.start()
asyncio.async(aiocoap.Context.create_server_context(root))
asyncio.get_event_loop().run_forever()
logging.basicConfig(level=logging.INFO)
logging.getLogger("coap-server").setLevel(logging.DEBUG)
if __name__ == "__main__":
main()
| import logging
import asyncio
import flask
import threading
import aiocoap.resource as resource
import aiocoap
app = flask.Flask(__name__,static_folder="../static",static_url_path="/static",template_folder="../templates")
@app.route("/")
def hello():
return flask.render_template("index.html", name="index")
class BlockResource(resource.Resource):
"""
We will test some of aiocoap here.
Enjoy.
"""
def __init__(self):
super(BlockResource, self).__init__()
self.content = ("test-content: yes please").encode("ascii")
@asyncio.coroutine
def render_get(self, req):
resp = aiocoap.Message(code=aiocoap.CONTENT,payload=self.content)
return resp
def render_put(self,req):
print("Got payload: %s" % req.payload)
self.content = req.payload
"""
Echo back messages
"""
return aiocoap.Message(code=aiocoap.CHANGED,payload=req.payload)
def main():
root = resource.Site()
root.add_resource(('block',),BlockResource())
websrv = threading.Thread(target=(lambda: app.run(debug=True, port=25565, use_reloader=False)), name="Flask-server")
websrv.start()
asyncio.async(aiocoap.Context.create_server_context(root))
asyncio.get_event_loop().run_forever()
logging.basicConfig(level=logging.INFO)
logging.getLogger("coap-server").setLevel(logging.DEBUG)
if __name__ == "__main__":
main()
| mit | Python |
8c0d6a6dc75b0f1ec7f39ceb97e8b1d76f4246b8 | Bump version for impending release | bitprophet/releases | releases/_version.py | releases/_version.py | __version_info__ = (1, 2, 0)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (1, 1, 0)
__version__ = '.'.join(map(str, __version_info__))
| bsd-2-clause | Python |
91eca37144d0c378761e47c143e66a79af37c226 | Fix IntegrityError and DoesNotExist 500s | vault/bugit,vault/bugit,vault/bugit | repo_manage/forms.py | repo_manage/forms.py |
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def clean(self):
cleaned_data = super(CollaborationForm, self).clean()
self.instance.full_clean()
return cleaned_data
def clean_user(self):
username = self.cleaned_data['user']
user = None
try:
user = User.objects.get(username=username)
self.instance.user = user
except User.DoesNotExist:
raise forms.ValidationError("User %(username_s does not exist",
params={'username':username})
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
|
from django import forms
from django.forms import ModelForm
from django.forms.models import inlineformset_factory
from common.models import Repository, Collaboration, User
slug_errors = {
'invalid' : "Use only letters, numbers, underscores, and hyphens",
}
class NewRepositoryForm(forms.Form):
repo_name = forms.SlugField(max_length=100, error_messages=slug_errors)
class RepositoryForm(ModelForm):
class Meta:
model = Repository
fields = ['description', 'long_description', 'is_public']
class CollaborationForm(ModelForm):
user = forms.CharField()
class Meta:
model = Collaboration
exclude = ('repository', 'user')
def __init__(self, **kwargs):
super(CollaborationForm, self).__init__(**kwargs)
if 'instance' in kwargs:
print kwargs['instance']
self.fields['user'] = forms.CharField(initial=kwargs['instance'].user.username)
def save(self, **kwargs):
username = self.cleaned_data['user']
user = User.objects.get(username=username)
self.instance.user = user
return super(CollaborationForm, self).save(**kwargs)
CollaborationFormSet = inlineformset_factory(Repository, Repository.collaborators.through, form=CollaborationForm)
| mit | Python |
9963da42589fbc8f3c886c8482c45e514c731285 | Fix a URL bug. | sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp | tcamp/reg/urls.py | tcamp/reg/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
from reg.views import *
from reg.badges import *
urlpatterns = patterns('',
url(r'^$', register),
url(r'^override/$', register_override),
url(r'^price/$', price_check),
url(r'^save/$', save),
url(r'^thanks/$', TemplateView.as_view(template_name="reg/thanks.html")),
url(r'^whos-going/$', whos_going),
url(r'^reports/overview/$', stats),
url(r'^reports/volunteers.csv$', volunteer_export),
url(r'^reports/attendees.csv$', attendee_export),
url(r'^badges\.(?P<format>json|csv|zip)$', attendees),
url(r'^badges/(?P<barcode>[A-Za-z0-9]{21,22})\.json$', attendee),
url(r'^badges/qrcode/(?P<barcode>[A-Za-z0-9]+)\.(?P<format>svg|png)$', qrcode_image),
url(r'^badges/types.json', ticket_types),
url(r'^badges/new$', new_sale),
url(r'^badges/cf_config.json', cardflight_config),
)
admin.autodiscover() | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
from reg.views import *
from reg.badges import *
urlpatterns = patterns('',
url(r'^$', register),
url(r'^override/$', register_override),
url(r'^price/$', price_check),
url(r'^save/$', save),
url(r'^thanks/$', TemplateView.as_view(template_name="reg/thanks.html")),
url(r'^whos-going/$', whos_going),
url(r'^reports/overview/$', stats),
url(r'^reports/volunteers.csv$', volunteer_export),
url(r'^reports/attendees.csv$', attendee_export),
url(r'^badges\.(?P<format>json|csv|zip)$', attendees),
url(r'^badges/(?P<barcode>[A-Za-z0-9]{22})\.json$', attendee),
url(r'^badges/qrcode/(?P<barcode>[A-Za-z0-9]+)\.(?P<format>svg|png)$', qrcode_image),
url(r'^badges/types.json', ticket_types),
url(r'^badges/new$', new_sale),
url(r'^badges/cf_config.json', cardflight_config),
)
admin.autodiscover() | bsd-3-clause | Python |
fe6c8937a1c26acc736f218b73b3eb2968e8b6c7 | Increment version | sendpulse/sendpulse-rest-api-python | pysendpulse/__init__.py | pysendpulse/__init__.py | __author__ = 'Maksym Ustymenko'
__author_email__ = 'tech@sendpulse.com'
__copyright__ = 'Copyright 2017, SendPulse'
__credits__ = ['Maksym Ustymenko', ]
__version__ = '0.0.8'
| __author__ = 'Maksym Ustymenko'
__author_email__ = 'tech@sendpulse.com'
__copyright__ = 'Copyright 2017, SendPulse'
__credits__ = ['Maksym Ustymenko', ]
__version__ = '0.0.7'
| apache-2.0 | Python |
419bb26ddc3017fb87cd9ce1853bc4f64f052394 | Use exception which prints the same regardless of config. | chrisdearman/micropython,oopy/micropython,praemdonck/micropython,torwag/micropython,alex-robbins/micropython,tobbad/micropython,lowRISC/micropython,praemdonck/micropython,praemdonck/micropython,mhoffma/micropython,cwyark/micropython,TDAbboud/micropython,puuu/micropython,AriZuu/micropython,pozetroninc/micropython,jmarcelino/pycom-micropython,mhoffma/micropython,tobbad/micropython,hiway/micropython,Peetz0r/micropython-esp32,ryannathans/micropython,ganshun666/micropython,pozetroninc/micropython,dxxb/micropython,PappaPeppar/micropython,alex-robbins/micropython,AriZuu/micropython,pfalcon/micropython,Timmenem/micropython,alex-march/micropython,mpalomer/micropython,PappaPeppar/micropython,chrisdearman/micropython,alex-robbins/micropython,toolmacher/micropython,redbear/micropython,tralamazza/micropython,lowRISC/micropython,praemdonck/micropython,HenrikSolver/micropython,adafruit/micropython,selste/micropython,torwag/micropython,drrk/micropython,chrisdearman/micropython,deshipu/micropython,misterdanb/micropython,infinnovation/micropython,adafruit/micropython,henriknelson/micropython,cwyark/micropython,matthewelse/micropython,tuc-osg/micropython,micropython/micropython-esp32,dmazzella/micropython,puuu/micropython,tuc-osg/micropython,bvernoux/micropython,trezor/micropython,swegener/micropython,infinnovation/micropython,micropython/micropython-esp32,blazewicz/micropython,oopy/micropython,AriZuu/micropython,dxxb/micropython,SHA2017-badge/micropython-esp32,adafruit/micropython,torwag/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,oopy/micropython,toolmacher/micropython,deshipu/micropython,tralamazza/micropython,alex-robbins/micropython,bvernoux/micropython,MrSurly/micropython-esp32,infinnovation/micropython,mpalomer/micropython,mhoffma/micropython,drrk/micropython,turbinenreiter/micropython,alex-march/micropython,AriZuu/micropython,trezor/micropython,redbear/micropython,TDAbboud/micropython,pramasoul/micropython,mpalomer/micropython,jmarcelino/pycom-micropython,Peetz0r/micropython-esp32,pfalcon/micropython,Timmenem/micropython,AriZuu/micropython,ganshun666/micropython,dxxb/micropython,MrSurly/micropython,trezor/micropython,pfalcon/micropython,jmarcelino/pycom-micropython,redbear/micropython,HenrikSolver/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,selste/micropython,blazewicz/micropython,praemdonck/micropython,matthewelse/micropython,turbinenreiter/micropython,MrSurly/micropython,dxxb/micropython,oopy/micropython,tobbad/micropython,puuu/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,toolmacher/micropython,dinau/micropython,lowRISC/micropython,misterdanb/micropython,bvernoux/micropython,torwag/micropython,dinau/micropython,hosaka/micropython,turbinenreiter/micropython,tuc-osg/micropython,henriknelson/micropython,kerneltask/micropython,adafruit/micropython,alex-march/micropython,ryannathans/micropython,dinau/micropython,swegener/micropython,dinau/micropython,bvernoux/micropython,ganshun666/micropython,torwag/micropython,ganshun666/micropython,blazewicz/micropython,bvernoux/micropython,cwyark/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,HenrikSolver/micropython,kerneltask/micropython,ryannathans/micropython,matthewelse/micropython,redbear/micropython,cwyark/micropython,emfcamp/micropython,emfcamp/micropython,swegener/micropython,henriknelson/micropython,mhoffma/micropython,mhoffma/micropython,redbear/micropython,henriknelson/micropython,dinau/micropython,micropython/micropython-esp32,hiway/micropython,Peetz0r/micropython-esp32,swegener/micropython,chrisdearman/micropython,MrSurly/micropython,infinnovation/micropython,MrSurly/micropython-esp32,PappaPeppar/micropython,ryannathans/micropython,deshipu/micropython,tobbad/micropython,selste/micropython,ryannathans/micropython,matthewelse/micropython,alex-robbins/micropython,MrSurly/micropython,misterdanb/micropython,turbinenreiter/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,pramasoul/micropython,dxxb/micropython,HenrikSolver/micropython,infinnovation/micropython,TDAbboud/micropython,deshipu/micropython,adafruit/micropython,pfalcon/micropython,Timmenem/micropython,adafruit/circuitpython,misterdanb/micropython,tuc-osg/micropython,mpalomer/micropython,pozetroninc/micropython,hiway/micropython,trezor/micropython,puuu/micropython,lowRISC/micropython,trezor/micropython,hosaka/micropython,mpalomer/micropython,micropython/micropython-esp32,toolmacher/micropython,emfcamp/micropython,matthewelse/micropython,swegener/micropython,TDAbboud/micropython,turbinenreiter/micropython,adafruit/circuitpython,hosaka/micropython,hiway/micropython,blazewicz/micropython,adafruit/circuitpython,drrk/micropython,Timmenem/micropython,tralamazza/micropython,Timmenem/micropython,MrSurly/micropython-esp32,oopy/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,pozetroninc/micropython,alex-march/micropython,tobbad/micropython,pramasoul/micropython,PappaPeppar/micropython,toolmacher/micropython,tuc-osg/micropython,drrk/micropython,emfcamp/micropython,pfalcon/micropython,micropython/micropython-esp32,hosaka/micropython,dmazzella/micropython,matthewelse/micropython,jmarcelino/pycom-micropython,drrk/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,pramasoul/micropython,dmazzella/micropython,MrSurly/micropython,henriknelson/micropython,selste/micropython,HenrikSolver/micropython,puuu/micropython,tralamazza/micropython,lowRISC/micropython,selste/micropython,emfcamp/micropython,dmazzella/micropython,PappaPeppar/micropython,pozetroninc/micropython,misterdanb/micropython,cwyark/micropython,ganshun666/micropython,jmarcelino/pycom-micropython,hiway/micropython,SHA2017-badge/micropython-esp32,kerneltask/micropython,pramasoul/micropython,kerneltask/micropython,alex-march/micropython,hosaka/micropython | tests/misc/print_exception.py | tests/misc/print_exception.py | import _io as io # uPy does not have io module builtin
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
1/0
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
2/0
try:
f()
except Exception as e:
print('caught')
print_exc(e)
| import _io as io # uPy does not have io module builtin
import sys
if hasattr(sys, 'print_exception'):
print_exception = sys.print_exception
else:
import traceback
print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f)
def print_exc(e):
buf = io.StringIO()
print_exception(e, buf)
s = buf.getvalue()
for l in s.split("\n"):
# uPy on pyboard prints <stdin> as file, so remove filename.
if l.startswith(" File "):
l = l.split('"')
print(l[0], l[2])
# uPy and CPy tracebacks differ in that CPy prints a source line for
# each traceback entry. In this case, we know that offending line
# has 4-space indent, so filter it out.
elif not l.startswith(" "):
print(l)
# basic exception message
try:
XXX
except Exception as e:
print('caught')
print_exc(e)
# exception message with more than 1 source-code line
def f():
g()
def g():
YYY
try:
f()
except Exception as e:
print('caught')
print_exc(e)
| mit | Python |
8144f9c6b8812e33a6a6bd6777502fd95db1c4ea | Implement ``TEST_DISABLED_APPS`` settings, update ``INSTALLED_APPS`` list. | playpauseandstop/tddspry,playpauseandstop/tddspry | testproject/settings.py | testproject/settings.py | import os
import sys
# Calculate current directory path and add it to ``sys.path``
DIRNAME = os.path.abspath(os.path.dirname(__file__))
sys.path.append(DIRNAME)
# Debug settings
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Authentication settings
AUTH_PROFILE_MODULE = 'testapp.UserProfile'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
# Database settings
# Please, set proper database settings in ``settings_local.py`` file
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(DIRNAME, 'testproject.db'),
},
}
# Date and time settings
TIME_ZONE = 'Europe/Kiev'
# Installed applications
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.flatpages',
'django.contrib.sessions',
'django.contrib.sites',
'registration',
'testproject.disabled.attr',
'testproject.disabled.setting',
'testproject.testapp',
)
# Fixture directories
FIXTURE_DIRS = (
os.path.join(DIRNAME, 'fixtures'),
)
# Media files settings
MEDIA_ROOT = os.path.join(DIRNAME, 'static')
MEDIA_URL = '/static/'
# Middleware settings
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.locale.LocaleMiddleware',
)
# Session settings
SESSION_COOKIE_NAME = 'testproject_sid'
# Template settings
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
)
TEMPLATE_DIRS = (
os.path.join(DIRNAME, 'templates'),
)
# Test settings
TDDSPRY_TEST_CASE = 'django.test.TestCase'
TEST_DISABLED_APPS = ('testproject.disabled.setting', )
# Other **Django** settings
ROOT_URLCONF = 'testproject.urls'
SECRET_KEY = 'set proper value in ``settings_local.py`` file'
SITE_ID = 1
# ``django-registration`` settings
ACCOUNT_ACTIVATION_DAYS = 30
# Try to loading settings from ``settings_local.py`` file
try:
from settings_local import *
except ImportError, e:
sys.stderr.write('settings_local.py not found. Using default settings\n')
sys.stderr.write('%s: %s\n\n' % (e.__class__.__name__, e))
| import os
import sys
# Calculate current directory path and add it to ``sys.path``
DIRNAME = os.path.abspath(os.path.dirname(__file__))
sys.path.append(DIRNAME)
# Debug settings
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# Test settings
TDDSPRY_TEST_CASE = 'django.test.TestCase'
# Authentication settings
AUTH_PROFILE_MODULE = 'testapp.UserProfile'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
# Database settings
# Please, set proper database settings in ``settings_local.py`` file
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(DIRNAME, 'testproject.db'),
},
}
# Date and time settings
TIME_ZONE = 'Europe/Kiev'
# Installed applications
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.flatpages',
'django.contrib.sessions',
'django.contrib.sites',
'registration',
'testproject.testapp',
)
# Fixture directories
FIXTURE_DIRS = (
os.path.join(DIRNAME, 'fixtures'),
)
# Media files settings
MEDIA_ROOT = os.path.join(DIRNAME, 'static')
MEDIA_URL = '/static/'
# Middleware settings
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.locale.LocaleMiddleware',
)
# Session settings
SESSION_COOKIE_NAME = 'testproject_sid'
# Template settings
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
)
TEMPLATE_DIRS = (
os.path.join(DIRNAME, 'templates'),
)
# Other **Django** settings
ROOT_URLCONF = 'testproject.urls'
SECRET_KEY = 'set proper value in ``settings_local.py`` file'
SITE_ID = 1
# ``django-registration`` settings
ACCOUNT_ACTIVATION_DAYS = 30
# Try to loading settings from ``settings_local.py`` file
try:
from settings_local import *
except ImportError, e:
sys.stderr.write('settings_local.py not found. Using default settings\n')
sys.stderr.write('%s: %s\n\n' % (e.__class__.__name__, e))
| bsd-3-clause | Python |
26fc49f1a29d60bf175094be5bb77abff56844ba | test that crosses are vibrant red | IanDCarroll/xox | tests/test_announcer_chair.py | tests/test_announcer_chair.py | import unittest
from source.announcer_chair import *
class AnnouncerTestCase(unittest.TestCase):
def setUp(self):
self.announcer = Announcer()
self.start = "Welcome to XOX, a Noughts and Crosses Game"
self.select = "Type 1 to go first, or 2 to go second."
self.tie = "The game is a draw."
self.computer = "The Computer wins the game."
self.human = "You Win!"
self.question = "Which square do you choose?"
self.bad_move = "Sorry, that's not a legal move. Try again."
self.nought = " O \033[0m"
self.cross = " X \033[0m"
self.mock_board = [1,10,1, 0,10,0, 1,0,10]
self.rendered_board = '''
\033[91m X \033[0m| O \033[0m|\033[91m X \033[0m
---+---+---
\033[30m 4 \033[0m| O \033[0m|\033[30m 6 \033[0m
---+---+---
\033[91m X \033[0m|\033[30m 8 \033[0m| O \033[0m
'''
def test_display_start_of_game(self):
self.assertEqual(self.announcer.start, self.start)
def test_display_select_go_first_or_not(self):
self.assertEqual(self.announcer.select, self.select)
def test_display_game_over_tie(self):
self.assertEqual(self.announcer.tie, self.tie)
def test_display_game_over_computer_wins(self):
self.assertEqual(self.announcer.computer, self.computer)
def test_display_game_over_human_wins(self):
self.assertEqual(self.announcer.human, self.human)
def test_display_question_the_human(self):
self.assertEqual(self.announcer.question, self.question)
def test_display_human_made_illegal_move(self):
self.assertEqual(self.announcer.bad_move, self.bad_move)
def test_display_nought(self):
self.assertEqual(self.announcer.nought, self.nought)
def test_display_cross(self):
self.assertEqual(self.announcer.cross, self.cross)
def test_announcer_has_get_board_size(self):
self.assertEqual(hasattr(self.announcer, 'get_board_size'), True)
def test_display_current_board(self):
test = self.announcer.render_board(self.mock_board)
self.assertEqual(test, self.rendered_board)
if __name__ == '__main__':
unittest.main()
| import unittest
from source.announcer_chair import *
class AnnouncerTestCase(unittest.TestCase):
def setUp(self):
self.announcer = Announcer()
self.start = "Welcome to XOX, a Noughts and Crosses Game"
self.select = "Type 1 to go first, or 2 to go second."
self.tie = "The game is a draw."
self.computer = "The Computer wins the game."
self.human = "You Win!"
self.question = "Which square do you choose?"
self.bad_move = "Sorry, that's not a legal move. Try again."
self.nought = " O \033[0m"
self.cross = " X \033[0m"
self.mock_board = [1,10,1, 0,10,0, 1,0,10]
self.rendered_board = '''
X \033[0m| O \033[0m| X \033[0m
---+---+---
\033[30m 4 \033[0m| O \033[0m|\033[30m 6 \033[0m
---+---+---
X \033[0m|\033[30m 8 \033[0m| O \033[0m
'''
def test_display_start_of_game(self):
self.assertEqual(self.announcer.start, self.start)
def test_display_select_go_first_or_not(self):
self.assertEqual(self.announcer.select, self.select)
def test_display_game_over_tie(self):
self.assertEqual(self.announcer.tie, self.tie)
def test_display_game_over_computer_wins(self):
self.assertEqual(self.announcer.computer, self.computer)
def test_display_game_over_human_wins(self):
self.assertEqual(self.announcer.human, self.human)
def test_display_question_the_human(self):
self.assertEqual(self.announcer.question, self.question)
def test_display_human_made_illegal_move(self):
self.assertEqual(self.announcer.bad_move, self.bad_move)
def test_display_nought(self):
self.assertEqual(self.announcer.nought, self.nought)
def test_display_cross(self):
self.assertEqual(self.announcer.cross, self.cross)
def test_announcer_has_get_board_size(self):
self.assertEqual(hasattr(self.announcer, 'get_board_size'), True)
def test_display_current_board(self):
test = self.announcer.render_board(self.mock_board)
self.assertEqual(test, self.rendered_board)
if __name__ == '__main__':
unittest.main()
| mit | Python |
3e4600a42c8f3153840c875ac17709f8d3d58e6f | save received values to csv | Syralist/LaundryMeasure,Syralist/LaundryMeasure,Syralist/LaundryMeasure | python/recvBroadcast.py | python/recvBroadcast.py | import socket
import csv
import time
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
UDP_PORT = 42042
sock.bind(('', UDP_PORT))
timestamp = str(int(time.time()))
with open(timestamp+'.csv', 'wb') as csvfile:
spamwriter = csv.writer(csvfile)
while True:
try:
data = sock.recv(2048)
value = float(data)/1000.0
spamwriter.writerow([int(time.time()), value])
print value
except KeyboardInterrupt:
print "Interupted by user"
break
| import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
UDP_PORT = 42042
# using the '' address works
sock.bind(('', UDP_PORT))
# using the address of eth1 doesn't
#sock.bind(('192.168.2.123', UDP_PORT))
# and neither does using the local loopback
#sock.bind(('127.0.0.1', UDP_PORT))
recording = False
while True:
try:
data = sock.recv(2048)
value = float(data)/1000.0
print value
except KeyboardInterrupt:
print "Interupted by user"
break
| mit | Python |
f6ddd5c4d79ada59d9db4b467849d9b52c5fef75 | Add GraphFields to package import. | cmshobe/landlab,cmshobe/landlab,cmshobe/landlab,RondaStrauch/landlab,amandersillinois/landlab,RondaStrauch/landlab,landlab/landlab,Carralex/landlab,RondaStrauch/landlab,landlab/landlab,amandersillinois/landlab,csherwood-usgs/landlab,Carralex/landlab,Carralex/landlab,csherwood-usgs/landlab,landlab/landlab | landlab/field/__init__.py | landlab/field/__init__.py | from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
from .graph_field import GraphFields
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError', 'GraphFields', ]
| from landlab.field.scalar_data_fields import ScalarDataFields, FieldError
from landlab.field.grouped import ModelDataFields, GroupError, GroupSizeError
from landlab.field.field_mixin import ModelDataFieldsMixIn
__all__ = ['ScalarDataFields', 'ModelDataFields', 'ModelDataFieldsMixIn',
'FieldError', 'GroupError', 'GroupSizeError']
| mit | Python |
28303a94401ed8eec3658dd3a1b1b09d8ba700f7 | complete test coverage for demo_signal | rgommers/pywt,grlee77/pywt,PyWavelets/pywt,PyWavelets/pywt,rgommers/pywt,rgommers/pywt,grlee77/pywt,rgommers/pywt | pywt/tests/test_data.py | pywt/tests/test_data.py | import os
import numpy as np
from numpy.testing import (assert_allclose, assert_raises, assert_,
run_module_suite)
import pywt.data
data_dir = os.path.join(os.path.dirname(__file__), 'data')
wavelab_data_file = os.path.join(data_dir, 'wavelab_test_signals.npz')
wavelab_result_dict = np.load(wavelab_data_file)
def test_data_aero():
aero = pywt.data.aero()
ref = np.array([[178, 178, 179],
[170, 173, 171],
[185, 174, 171]])
assert_allclose(aero[:3, :3], ref)
def test_data_ascent():
ascent = pywt.data.ascent()
ref = np.array([[83, 83, 83],
[82, 82, 83],
[80, 81, 83]])
assert_allclose(ascent[:3, :3], ref)
def test_data_camera():
ascent = pywt.data.camera()
ref = np.array([[156, 157, 160],
[156, 157, 159],
[158, 157, 156]])
assert_allclose(ascent[:3, :3], ref)
def test_data_ecg():
ecg = pywt.data.ecg()
ref = np.array([-86, -87, -87])
assert_allclose(ecg[:3], ref)
def test_wavelab_signals():
"""Comparison with results generated using WaveLab"""
rtol = atol = 1e-12
for key, val in wavelab_result_dict.items():
key = key.replace('_', '-')
if key in ['gabor', 'sineoneoverx']:
# these functions do not allow a size to be provided
assert_allclose(val, pywt.data.demo_signal(key),
rtol=rtol, atol=atol)
assert_raises(ValueError, pywt.data.demo_signal, key, val.size)
else:
assert_allclose(val, pywt.data.demo_signal(key, val.size),
rtol=rtol, atol=atol)
# these functions require a size to be provided
assert_raises(ValueError, pywt.data.demo_signal, key)
# can get a list of the available signals
available_signals = pywt.data.demo_signal('list')
assert_('Doppler' in available_signals)
# ValueError on unrecognized signal type
assert_raises(ValueError, pywt.data.demo_signal, 'unknown_signal')
# ValueError on invalid length
assert_raises(ValueError, pywt.data.demo_signal, 'Doppler', 0)
if __name__ == '__main__':
run_module_suite()
| import os
import numpy as np
from numpy.testing import assert_allclose, assert_raises, run_module_suite
import pywt.data
data_dir = os.path.join(os.path.dirname(__file__), 'data')
wavelab_data_file = os.path.join(data_dir, 'wavelab_test_signals2.npz')
wavelab_result_dict = np.load(wavelab_data_file)
def test_data_aero():
aero = pywt.data.aero()
ref = np.array([[178, 178, 179],
[170, 173, 171],
[185, 174, 171]])
assert_allclose(aero[:3, :3], ref)
def test_data_ascent():
ascent = pywt.data.ascent()
ref = np.array([[83, 83, 83],
[82, 82, 83],
[80, 81, 83]])
assert_allclose(ascent[:3, :3], ref)
def test_data_camera():
ascent = pywt.data.camera()
ref = np.array([[156, 157, 160],
[156, 157, 159],
[158, 157, 156]])
assert_allclose(ascent[:3, :3], ref)
def test_data_ecg():
ecg = pywt.data.ecg()
ref = np.array([-86, -87, -87])
assert_allclose(ecg[:3], ref)
def test_wavelab_signals():
"""Comparison with results generated using WaveLab"""
rtol = atol = 1e-12
for key, val in wavelab_result_dict.items():
key = key.replace('_', '-')
if key in ['gabor', 'sineoneoverx']:
# these functions do not allow a size to be provided
assert_allclose(val, pywt.data.demo_signal(key),
rtol=rtol, atol=atol)
assert_raises(ValueError, pywt.data.demo_signal, key, val.size)
else:
assert_allclose(val, pywt.data.demo_signal(key, val.size),
rtol=rtol, atol=atol)
# these functions require a size to be provided
assert_raises(ValueError, pywt.data.demo_signal, key)
if __name__ == '__main__':
run_module_suite()
| mit | Python |
4be36c723788bed029c0ed2c0818672f14e1d801 | Use branches | e-koch/FilFinder,dcolombo/FilFinder,keflavich/fil_finder | examples/paper_figures/ks_plots.py | examples/paper_figures/ks_plots.py | # Licensed under an MIT open source license - see LICENSE
'''
KS p-values for different properties.
'''
import numpy as np
from pandas import read_csv
import matplotlib.pyplot as p
import numpy as np
import seaborn as sn
sn.set_context('talk')
sn.set_style('ticks')
sn.mpl.rc("figure", figsize=(7, 9))
# Widths
widths = read_csv("width_ks_table_pvals.csv")
widths.index = widths["Unnamed: 0"]
del widths["Unnamed: 0"]
widths_arr = np.asarray(widths)
widths_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
widths_arr = -np.log10(widths_arr)
# p.figure(figsize=(12, 10))
p.subplot(211)
# p.xlabel("Widths")
p.imshow(widths_arr, origin='lower', cmap='binary', interpolation='nearest')
p.xticks(np.arange(0, 14), [], rotation=90)
p.yticks(np.arange(0, 14), widths.columns)
p.figtext(0.05, 0.95, "a)", fontsize=20)
cb = p.colorbar()
cb.set_label(r'$-\log_{10}$ p-value')
cb.solids.set_edgecolor("face")
p.tight_layout()
p.show()
# Curvature
curve = read_csv("curvature_branches_ks_table_pvals.csv")
curve.index = curve["Unnamed: 0"]
del curve["Unnamed: 0"]
curve_arr = np.asarray(curve)
curve_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
curve_arr = -np.log10(curve_arr)
# p.figure(figsize=(12, 10))
p.subplot(212)
# p.xlabel("Curvature")
p.imshow(curve_arr, interpolation='nearest', origin='lower', cmap='binary')
p.xticks(np.arange(0, 14), curve.columns, rotation=90)
p.yticks(np.arange(0, 14), curve.columns)
p.figtext(0.05, 0.55, "b)", fontsize=20)
cb = p.colorbar()
cb.set_label(r'$-\log_{10}$ p-value')
cb.solids.set_edgecolor("face")
p.tight_layout()
p.show()
| # Licensed under an MIT open source license - see LICENSE
'''
KS p-values for different properties.
'''
import numpy as np
from pandas import read_csv
import matplotlib.pyplot as p
import numpy as np
import seaborn as sn
sn.set_context('talk')
sn.set_style('ticks')
sn.mpl.rc("figure", figsize=(7, 9))
# Widths
widths = read_csv("width_ks_table_pvals.csv")
widths.index = widths["Unnamed: 0"]
del widths["Unnamed: 0"]
widths_arr = np.asarray(widths)
widths_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
widths_arr = -np.log10(widths_arr)
# p.figure(figsize=(12, 10))
p.subplot(211)
# p.xlabel("Widths")
p.imshow(widths_arr, origin='lower', cmap='binary', interpolation='nearest')
p.xticks(np.arange(0, 14), [], rotation=90)
p.yticks(np.arange(0, 14), widths.columns)
p.figtext(0.05, 0.95, "a)", fontsize=20)
cb = p.colorbar()
cb.set_label(r'$-\log_{10}$ p-value')
cb.solids.set_edgecolor("face")
# p.tight_layout()
# p.show()
# Curvature
curve = read_csv("curvature_ks_table_pvals.csv")
curve.index = curve["Unnamed: 0"]
del curve["Unnamed: 0"]
curve_arr = np.asarray(curve)
curve_arr[np.arange(0, 14), np.arange(0, 14)] = 1.0
curve_arr = -np.log10(curve_arr)
# p.figure(figsize=(12, 10))
p.subplot(212)
# p.xlabel("Curvature")
p.imshow(curve_arr, interpolation='nearest', origin='lower', cmap='binary')
p.xticks(np.arange(0, 14), curve.columns, rotation=90)
p.yticks(np.arange(0, 14), curve.columns)
p.figtext(0.05, 0.55, "b)", fontsize=20)
cb = p.colorbar()
cb.set_label(r'$-\log_{10}$ p-value')
cb.solids.set_edgecolor("face")
p.tight_layout()
p.show()
| mit | Python |
41d93b76fc2dd3602b8e7e90959685fad5797a65 | remove pdb ref | closeair/sundowners-fc,closeair/sundowners-fc | commons/tests.py | commons/tests.py | from django.test import TestCase
from django.urls import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from django.contrib.auth.models import User
from .forms import DocumentForm
from .models import Document
import os
class CommonsUploadFormTests(TestCase):
def test_init_invalid_without_entry(self):
form = DocumentForm({})
self.assertFalse(form.is_valid())
def test_init_valid_with_entry(self):
data = {
'name': 'Robert Clements',
'email': 'roger@ace.org',
'phone': '212-123-1232',
'birth_date': '1942-09-01',
'address': '234 Omar Road',
'city': 'Brooklyn',
'state_abbreviation': 'NY',
'zipcode': '11209',
'united_states_citizen': True,
'faa_certificate_number': '1234567F',
'total_flight_hours': '200',
'bfr_expiration': '2017-09-01',
'medical_expiration': '2016t-09-01',
'reference_name': 'brez',
'reference_relation': 'Friend',
'reference_phone': '2122231232',
}
fixture_file = open('%s/club/fixtures/illegitimi.pdf' % os.getcwd(), 'rb')
upload_file = {'drivers_license_report': SimpleUploadedFile(fixture_file.name, fixture_file.read())}
form = DocumentForm(data, upload_file)
self.assertFalse(form.is_valid())
class CommonsUploadedViewTests(TestCase):
def test_uploaded_view(self):
document = Document(name='example', uploaded_file="example.jpg")
response = self.client.get(reverse('uploaded'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "successfully uploaded")
| from django.test import TestCase
from django.urls import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from django.contrib.auth.models import User
from .forms import DocumentForm
from .models import Document
import os
class CommonsUploadFormTests(TestCase):
def test_init_invalid_without_entry(self):
form = DocumentForm({})
self.assertFalse(form.is_valid())
def test_init_valid_with_entry(self):
data = {
'name': 'Robert Clements',
'email': 'roger@ace.org',
'phone': '212-123-1232',
'birth_date': '1942-09-01',
'address': '234 Omar Road',
'city': 'Brooklyn',
'state_abbreviation': 'NY',
'zipcode': '11209',
'united_states_citizen': True,
'faa_certificate_number': '1234567F',
'total_flight_hours': '200',
'bfr_expiration': '2017-09-01',
'medical_expiration': '2016t-09-01',
'reference_name': 'brez',
'reference_relation': 'Friend',
'reference_phone': '2122231232',
}
fixture_file = open('%s/club/fixtures/illegitimi.pdf' % os.getcwd(), 'rb')
upload_file = {'drivers_license_report': SimpleUploadedFile(fixture_file.name, fixture_file.read())}
form = DocumentForm(data, upload_file)
self.assertFalse(form.is_valid())
class CommonsUploadedViewTests(TestCase):
def test_uploaded_view(self):
document = Document(name='example', uploaded_file="example.jpg")
response = self.client.get(reverse('uploaded'))
#import pdb; pdb.set_trace()
self.assertEqual(response.status_code, 200)
self.assertContains(response, "successfully uploaded")
| mit | Python |
f9004902b0edcfef6cbea0cca3c7ed680ad4f873 | Make icon path more portable for image test | kfdm/gntp | test/test_mini.py | test/test_mini.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test most basic GNTP functions using our mini growl function
"""
import unittest
import logging
import os
logging.basicConfig(level=logging.WARNING)
import gntp.config
import gntp.notifier
APPLICATION_NAME = "GNTP unittest"
ICON_URL = "https://www.google.com/intl/en_com/images/srpr/logo3w.png"
ICON_FILE = os.path.join(os.path.dirname(__file__), "growl-icon.png")
CALLBACK_URL = "http://github.com"
class TestHash(unittest.TestCase):
def test_mini(self):
gntp.notifier.mini('Testing gntp.notifier.mini',
applicationName=APPLICATION_NAME
)
def test_config(self):
gntp.config.mini('Testing gntp.config.mini',
applicationName=APPLICATION_NAME
)
def test_url_icon(self):
gntp.config.mini('Testing URL icon',
applicationName=APPLICATION_NAME,
applicationIcon=ICON_URL
)
def test_file_icon(self):
gntp.notifier.mini('Testing URL icon',
applicationName=APPLICATION_NAME,
applicationIcon=open(ICON_FILE).read()
)
def test_sticky(self):
gntp.config.mini('Testing sticky',
applicationName=APPLICATION_NAME,
sticky=True
)
def test_callback(self):
gntp.config.mini('Testing Callback',
applicationName=APPLICATION_NAME,
callback=CALLBACK_URL
)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test most basic GNTP functions using our mini growl function
"""
import unittest
import logging
logging.basicConfig(level=logging.WARNING)
import gntp.config
import gntp.notifier
APPLICATION_NAME = "GNTP unittest"
ICON_URL = "https://www.google.com/intl/en_com/images/srpr/logo3w.png"
ICON_FILE = "test/growl-icon.png"
CALLBACK_URL = "http://github.com"
class TestHash(unittest.TestCase):
def test_mini(self):
gntp.notifier.mini('Testing gntp.notifier.mini',
applicationName=APPLICATION_NAME
)
def test_config(self):
gntp.config.mini('Testing gntp.config.mini',
applicationName=APPLICATION_NAME
)
def test_url_icon(self):
gntp.config.mini('Testing URL icon',
applicationName=APPLICATION_NAME,
applicationIcon=ICON_URL
)
def test_file_icon(self):
gntp.notifier.mini('Testing URL icon',
applicationName=APPLICATION_NAME,
applicationIcon=open(ICON_FILE).read()
)
def test_sticky(self):
gntp.config.mini('Testing sticky',
applicationName=APPLICATION_NAME,
sticky=True
)
def test_callback(self):
gntp.config.mini('Testing Callback',
applicationName=APPLICATION_NAME,
callback=CALLBACK_URL
)
if __name__ == '__main__':
unittest.main()
| mit | Python |
fce8e150b3842449fe25a34eb6afc0b9d871c4fa | Fix Django Debug toolbar | kdeloach/nyc-trees,maurizi/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,RickMohr/nyc-trees,RickMohr/nyc-trees,maurizi/nyc-trees,RickMohr/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,RickMohr/nyc-trees,azavea/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees | src/nyc_trees/nyc_trees/settings/development.py | src/nyc_trees/nyc_trees/settings/development.py | """Development settings and globals."""
from base import * # NOQA
# DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# END DEBUG CONFIGURATION
# EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# END EMAIL CONFIGURATION
# CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# END CACHE CONFIGURATION
# TOOLBAR CONFIGURATION
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2')
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
# END TOOLBAR CONFIGURATION
| """Development settings and globals."""
from base import * # NOQA
# DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# END DEBUG CONFIGURATION
# EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# END EMAIL CONFIGURATION
# CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# END CACHE CONFIGURATION
# TOOLBAR CONFIGURATION
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('10.0.2.2')
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
# See https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
# END TOOLBAR CONFIGURATION
| agpl-3.0 | Python |
4b0841e0a1b654546925500a4823148513d5644b | Hide SV at startup | EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes | lg_sv/scripts/launcher.py | lg_sv/scripts/launcher.py | #! /usr/bin/env python
import rospy
from lg_common import ManagedWindow, ManagedBrowser, ManagedAdhocBrowser
from lg_common.msg import ApplicationState
from lg_common.helpers import add_url_params
DEFAULT_URL = 'http://localhost:8008/lg_sv/webapps/client/index.html'
#FOV for zoom level 3
DEFAULT_FOV = 28.125
def main():
rospy.init_node('panoviewer_browser', anonymous=True)
geometry = ManagedWindow.get_viewport_geometry()
server_type = rospy.get_param('~server_type', 'streetview')
url = str(rospy.get_param('~url', DEFAULT_URL))
field_of_view = float(rospy.get_param('~fov', DEFAULT_FOV))
pitch_offset = float(rospy.get_param('~pitch_offset', 0))
show_links = str(rospy.get_param('~show_links', False)).lower()
yaw_offset = float(rospy.get_param('~yaw_offset', 0))
leader = str(rospy.get_param('~leader', 'false'))
tilt = str(rospy.get_param('~tilt', 'false'))
# put parameters into one big url
url = add_url_params(url,
fov=field_of_view,
pitchOffset=pitch_offset,
showLinks=show_links,
leader=leader,
yawOffset=yaw_offset,
tilt=tilt)
# create the managed browser
slug = server_type + str(field_of_view) + str(yaw_offset) + str(pitch_offset)
managed_browser = ManagedAdhocBrowser(url=url, geometry=geometry, slug=slug)
managed_browser.update_url(url)
# set to visible
state = ApplicationState.HIDDEN
managed_browser.set_state(state)
# listen to state messages
rospy.Subscriber('/%s/state' % server_type, ApplicationState, managed_browser.handle_state_msg)
rospy.spin()
if __name__ == '__main__':
main()
| #! /usr/bin/env python
import rospy
from lg_common import ManagedWindow, ManagedBrowser, ManagedAdhocBrowser
from lg_common.msg import ApplicationState
from lg_common.helpers import add_url_params
DEFAULT_URL = 'http://localhost:8008/lg_sv/webapps/client/index.html'
#FOV for zoom level 3
DEFAULT_FOV = 28.125
def main():
rospy.init_node('panoviewer_browser', anonymous=True)
geometry = ManagedWindow.get_viewport_geometry()
server_type = rospy.get_param('~server_type', 'streetview')
url = str(rospy.get_param('~url', DEFAULT_URL))
field_of_view = float(rospy.get_param('~fov', DEFAULT_FOV))
pitch_offset = float(rospy.get_param('~pitch_offset', 0))
show_links = str(rospy.get_param('~show_links', False)).lower()
yaw_offset = float(rospy.get_param('~yaw_offset', 0))
leader = str(rospy.get_param('~leader', 'false'))
tilt = str(rospy.get_param('~tilt', 'false'))
# put parameters into one big url
url = add_url_params(url,
fov=field_of_view,
pitchOffset=pitch_offset,
showLinks=show_links,
leader=leader,
yawOffset=yaw_offset,
tilt=tilt)
# create the managed browser
slug = server_type + fov + yaw_offset + pitch_offset
managed_browser = ManagedAdhocBrowser(url=url, geometry=geometry, slug=slug)
managed_browser.update_url(url)
# set to visible
state = ApplicationState.VISIBLE
managed_browser.set_state(state)
# listen to state messages
rospy.Subscriber('/%s/state' % server_type, ApplicationState, managed_browser.handle_state_msg)
rospy.spin()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
e6bfb9d9783f7aedc0f1819e1ced0306984e0e0a | Improve normalization method | trein/quora-classifier | ml_helpers.py | ml_helpers.py | import numpy as np
import re
def extract_train():
return extract('dataset/train.txt')
def extract_test():
return extract('dataset/test.txt')
def extract(file):
input_file = open(file)
traindata = input_file.readlines()
features = []
targets = []
for line in traindata:
formatted_line = line.strip("\n")
target_i = formatted_line.split(" ")[1]
feature_i = re.sub(r"(\d+):", "", formatted_line).split(" ")[2:]
targets.append(target_i)
features.append(feature_i)
matrix_features = np.array(features).astype(np.float)
normal_matrix_features = normalize_features(matrix_features)
vector_targets = np.array(targets).astype(np.int)
# print normal_matrix_features
# print "Max", max_features
return (normal_matrix_features, vector_targets)
def normalize_features(matrix_features):
max_features = matrix_features.max(axis = 0)
max_features = (max_features + (max_features == 0))
return matrix_features / max_features
def accuracy(targets_hat, targets):
return (1.0 * (targets_hat == targets)).sum(0) / targets.shape | import numpy as np
import re
def extract_train():
return extract('dataset/train.txt')
def extract_test():
return extract('dataset/test.txt')
def extract(file):
input_file = open(file)
traindata = input_file.readlines()
features = []
targets = []
for line in traindata:
formatted_line = line.replace("\n", "")
target_i = formatted_line.split(" ")[1]
feature_i = re.sub(r"(\d+):", "", formatted_line).split(" ")[2:]
targets.append(target_i)
features.append(feature_i)
matrix_features = np.array(features).astype(np.float)
max_features = matrix_features.max(axis = 0)
max_features = (max_features + (max_features == 0))
normal_matrix_features = matrix_features / max_features
vector_targets = np.array(targets).astype(np.int)
# print normal_matrix_features
# print "Max", max_features
return (normal_matrix_features, vector_targets)
def accuracy(targets_hat, targets):
return (1.0 * (targets_hat == targets)).sum(0) / targets.shape | mit | Python |
9fae5b50815b0ea597ee6d13181f96284d07d1ab | Simplify import_optionset | rdmorganiser/rdmo,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug | rdmo/options/imports.py | rdmo/options/imports.py | import logging
from rdmo.conditions.models import Condition
from rdmo.core.imports import (fetch_parents, get_foreign_field,
get_m2m_instances, set_common_fields,
set_lang_field, validate_instance)
from .models import Option, OptionSet
logger = logging.getLogger(__name__)
def import_optionset(element, save=False):
try:
optionset = OptionSet.objects.get(uri=element.get('uri'))
except OptionSet.DoesNotExist:
optionset = OptionSet()
set_common_fields(optionset, element)
optionset.order = element.get('order')
optionset.provider_key = element.get('provider_key') or ''
conditions = get_m2m_instances(optionset, element.get('conditions'), Condition)
if save and validate_instance(optionset):
if optionset.id:
logger.info('OptionSet created with uri %s.', element.get('uri'))
else:
logger.info('OptionSet %s updated.', element.get('uri'))
optionset.save()
optionset.conditions.set(conditions)
optionset.imported = True
return optionset
def import_option(element, parent_uri=False, save=False):
if parent_uri is False:
parent_uri = element.get('optionset')
try:
option = Option.objects.get(uri=element.get('uri'), optionset__uri=parent_uri)
except Option.DoesNotExist:
option = Option()
set_common_fields(option, element)
option.parent_uri = parent_uri
option.optionset = get_foreign_field(option, parent_uri, OptionSet)
option.order = element.get('order')
option.additional_input = element.get('additional_input')
set_lang_field(option, 'text', element)
if save and validate_instance(option):
if option.id:
logger.info('Option created with uri %s.', element.get('uri'))
else:
logger.info('Option %s updated.', element.get('uri'))
option.save()
option.imported = True
return option
def fetch_option_parents(instances):
return fetch_parents(OptionSet, instances)
| import logging
from rdmo.conditions.models import Condition
from rdmo.core.imports import (fetch_parents, get_foreign_field,
get_m2m_instances, set_common_fields,
set_lang_field, validate_instance)
from .models import Option, OptionSet
logger = logging.getLogger(__name__)
def import_optionset(element, save=False):
try:
optionset = OptionSet.objects.get(uri=element.get('uri'))
except OptionSet.DoesNotExist:
optionset = OptionSet()
set_common_fields(optionset, element)
optionset.order = element.get('order')
optionset.provider_key = element.get('provider_key', '')
if optionset.provider_key is None:
optionset.provider_key = ''
conditions = get_m2m_instances(optionset, element.get('conditions'), Condition)
if save and validate_instance(optionset):
if optionset.id:
logger.info('OptionSet created with uri %s.', element.get('uri'))
else:
logger.info('OptionSet %s updated.', element.get('uri'))
optionset.save()
optionset.conditions.set(conditions)
optionset.imported = True
return optionset
def import_option(element, parent_uri=False, save=False):
if parent_uri is False:
parent_uri = element.get('optionset')
try:
option = Option.objects.get(uri=element.get('uri'), optionset__uri=parent_uri)
except Option.DoesNotExist:
option = Option()
set_common_fields(option, element)
option.parent_uri = parent_uri
option.optionset = get_foreign_field(option, parent_uri, OptionSet)
option.order = element.get('order')
option.additional_input = element.get('additional_input')
set_lang_field(option, 'text', element)
if save and validate_instance(option):
if option.id:
logger.info('Option created with uri %s.', element.get('uri'))
else:
logger.info('Option %s updated.', element.get('uri'))
option.save()
option.imported = True
return option
def fetch_option_parents(instances):
return fetch_parents(OptionSet, instances)
| apache-2.0 | Python |
e360b4e2a19a526e1541a7833648619bb5fac8e2 | Fix read of wrong dictionnary | Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse | stock_orderpoint_move_link/models/procurement_rule.py | stock_orderpoint_move_link/models/procurement_rule.py | # Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in values['orderpoint_ids']]
return vals
| # Copyright 2017 Eficent Business and IT Consulting Services, S.L.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import models
class ProcurementRule(models.Model):
_inherit = 'procurement.rule'
def _get_stock_move_values(self, product_id, product_qty, product_uom,
location_id, name, origin, values, group_id):
vals = super(ProcurementRule, self)._get_stock_move_values(
product_id, product_qty, product_uom,
location_id, name, origin, values, group_id)
if 'orderpoint_id' in values:
vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)]
elif 'orderpoint_ids' in values:
vals['orderpoint_ids'] = [(4, o.id)
for o in vals['orderpoint_ids']]
return vals
| agpl-3.0 | Python |
9d284a64e3ece19ae3d52ea419a537a8b6f9c1ab | add missing quotation | e-koch/mpld3,void32/mpld3,jakevdp/mpld3,keflavich/mpld3,mpld3/mpld3,jakirkham/mpld3,linearregression/mpld3,giserh/mpld3,void32/mpld3,kdheepak89/mpld3,etgalloway/mpld3,huongttlan/mpld3,aflaxman/mpld3,fdeheeger/mpld3,jayhetee/mpld3,Jiangshangmin/mpld3,e-koch/mpld3,jayhetee/mpld3,mlovci/mpld3,aflaxman/mpld3,jrkerns/mpld3,litaotao/mpld3,danielballan/mpld3,etgalloway/mpld3,giserh/mpld3,keflavich/mpld3,huongttlan/mpld3,jakirkham/mpld3,CrazyGuo/mpld3,jrkerns/mpld3,kdheepak89/mpld3,Jiangshangmin/mpld3,mlovci/mpld3,mpld3/mpld3,ahnitz/mpld3,ahnitz/mpld3,linearregression/mpld3,fdeheeger/mpld3,litaotao/mpld3,jakevdp/mpld3,CrazyGuo/mpld3,danielballan/mpld3 | mpld3/urls.py | mpld3/urls.py | import os
from . import __path__
import warnings
#warnings.warn("using temporary MPLD3_URL: switch to ghpages ASAP!")
__all__ = ["D3_URL", "MPLD3_URL", "D3_LOCAL", "MPLD3_LOCAL"]
D3_URL = "http://d3js.org/d3.v3.min.js"
MPLD3_URL = "http://mpld3.github.io/js/mpld3.v0.1.js"
D3_LOCAL = os.path.join(__path__[0], "js", "d3.v3.min.js")
MPLD3_LOCAL = os.path.join(__path__[0], "js", "mpld3.v0.1.js")
| import os
from . import __path__
import warnings
#warnings.warn("using temporary MPLD3_URL: switch to ghpages ASAP!")
__all__ = ["D3_URL", "MPLD3_URL", "D3_LOCAL", "MPLD3_LOCAL"]
D3_URL = "http://d3js.org/d3.v3.min.js"
MPLD3_URL = "http://mpld3.github.io/js/mpld3.v0.1.js
D3_LOCAL = os.path.join(__path__[0], "js", "d3.v3.min.js")
MPLD3_LOCAL = os.path.join(__path__[0], "js", "mpld3.v0.1.js")
| bsd-3-clause | Python |
509fb695072cc9a2e602ab80d44e615aecd7b1b0 | Fix a bug that broke Python 2.5 support. | ryanpetrello/cleaver | cleaver/backend/db/model.py | cleaver/backend/db/model.py | import sqlalchemy as sa
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
ModelBase = declarative_base()
class Experiment(ModelBase):
__tablename__ = 'cleaver_experiment'
name = sa.Column(sa.UnicodeText, primary_key=True)
started_on = sa.Column(sa.DateTime, index=True)
variants = relationship(
"Variant",
backref="experiment",
order_by='Variant.order'
)
participants = relationship(
"Participant",
backref="experiment"
)
events = relationship(
"TrackedEvent",
backref="experiment"
)
class Variant(ModelBase):
__tablename__ = 'cleaver_variant'
name = sa.Column(sa.UnicodeText, primary_key=True)
order = sa.Column(sa.Integer)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__)
)
class Participant(ModelBase):
__tablename__ = 'cleaver_participant'
identity = sa.Column(sa.UnicodeText, primary_key=True)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__),
primary_key=True
)
variant = sa.Column(sa.UnicodeText)
class TrackedEvent(ModelBase):
__tablename__ = 'cleaver_event'
TYPES = (
'PARTICIPANT',
'CONVERSION'
)
type = sa.Column(
sa.Enum(*TYPES, **{'native_enum': False}),
primary_key=True
)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__),
primary_key=True
)
variant_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Variant.__tablename__),
primary_key=True
)
total = sa.Column(sa.Integer, default=0)
class VerifiedHuman(ModelBase):
__tablename__ = 'cleaver_human'
identity = sa.Column(sa.UnicodeText, primary_key=True)
| import sqlalchemy as sa
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
ModelBase = declarative_base()
class Experiment(ModelBase):
__tablename__ = 'cleaver_experiment'
name = sa.Column(sa.UnicodeText, primary_key=True)
started_on = sa.Column(sa.DateTime, index=True)
variants = relationship(
"Variant",
backref="experiment",
order_by='Variant.order'
)
participants = relationship(
"Participant",
backref="experiment"
)
events = relationship(
"TrackedEvent",
backref="experiment"
)
class Variant(ModelBase):
__tablename__ = 'cleaver_variant'
name = sa.Column(sa.UnicodeText, primary_key=True)
order = sa.Column(sa.Integer)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__)
)
class Participant(ModelBase):
__tablename__ = 'cleaver_participant'
identity = sa.Column(sa.UnicodeText, primary_key=True)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__),
primary_key=True
)
variant = sa.Column(sa.UnicodeText)
class TrackedEvent(ModelBase):
__tablename__ = 'cleaver_event'
TYPES = (
'PARTICIPANT',
'CONVERSION'
)
type = sa.Column(sa.Enum(*TYPES, native_enum=False), primary_key=True)
experiment_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Experiment.__tablename__),
primary_key=True
)
variant_name = sa.Column(
sa.UnicodeText,
sa.ForeignKey('%s.name' % Variant.__tablename__),
primary_key=True
)
total = sa.Column(sa.Integer, default=0)
class VerifiedHuman(ModelBase):
__tablename__ = 'cleaver_human'
identity = sa.Column(sa.UnicodeText, primary_key=True)
| bsd-3-clause | Python |
ece87040e75bc5add21d2904444ef8e5edb6761e | Use ranged comparison | jaraco/jaraco.functools | test_functools.py | test_functools.py | import itertools
import time
import copy
import sys
import pytest
from jaraco.functools import Throttler, method_cache
class TestThrottler(object):
def test_function_throttled(self):
"""
Ensure the throttler actually throttles calls.
"""
# set up a function to be called
counter = itertools.count()
# set up a version of `next` that is only called 30 times per second
limited_next = Throttler(next, 30)
# for one second, call next as fast as possible
deadline = time.time() + 1
while time.time() < deadline:
limited_next(counter)
# ensure the counter was advanced about 30 times
assert 28 <= next(counter) <= 32
# ensure that another burst of calls after some idle period will also
# get throttled
time.sleep(1)
deadline = time.time() + 1
counter = itertools.count()
while time.time() < deadline:
limited_next(counter)
assert 28 <= next(counter) <= 32
def test_reconstruct_unwraps(self):
"""
The throttler should be re-usable - if one wants to throttle a
function that's aready throttled, the original function should be
used.
"""
wrapped = Throttler(next, 30)
wrapped_again = Throttler(wrapped, 60)
assert wrapped_again.func is next
assert wrapped_again.max_rate == 60
def test_throttled_method(self):
class ThrottledMethodClass(object):
@Throttler
def echo(self, arg):
return arg
tmc = ThrottledMethodClass()
assert tmc.echo('foo') == 'foo'
class TestMethodCache:
bad_vers = '(3, 5, 0) <= sys.version_info < (3, 5, 1)'
@pytest.mark.skipif(bad_vers, reason="https://bugs.python.org/issue25447")
def test_deepcopy(self):
"""
A deepcopy of an object with a method cache should still
succeed.
"""
class ClassUnderTest:
calls = 0
@method_cache
def method(self, value):
self.calls += 1
return value
ob = ClassUnderTest()
copy.deepcopy(ob)
ob.method(1)
copy.deepcopy(ob)
| import itertools
import time
import copy
import sys
import pytest
from jaraco.functools import Throttler, method_cache
class TestThrottler(object):
def test_function_throttled(self):
"""
Ensure the throttler actually throttles calls.
"""
# set up a function to be called
counter = itertools.count()
# set up a version of `next` that is only called 30 times per second
limited_next = Throttler(next, 30)
# for one second, call next as fast as possible
deadline = time.time() + 1
while time.time() < deadline:
limited_next(counter)
# ensure the counter was advanced about 30 times
assert 28 <= next(counter) <= 32
# ensure that another burst of calls after some idle period will also
# get throttled
time.sleep(1)
deadline = time.time() + 1
counter = itertools.count()
while time.time() < deadline:
limited_next(counter)
assert 28 <= next(counter) <= 32
def test_reconstruct_unwraps(self):
"""
The throttler should be re-usable - if one wants to throttle a
function that's aready throttled, the original function should be
used.
"""
wrapped = Throttler(next, 30)
wrapped_again = Throttler(wrapped, 60)
assert wrapped_again.func is next
assert wrapped_again.max_rate == 60
def test_throttled_method(self):
class ThrottledMethodClass(object):
@Throttler
def echo(self, arg):
return arg
tmc = ThrottledMethodClass()
assert tmc.echo('foo') == 'foo'
class TestMethodCache:
@pytest.mark.skipif('sys.version_info == (3, 5, 0)',
reason="https://bugs.python.org/issue25447")
def test_deepcopy(self):
"""
A deepcopy of an object with a method cache should still
succeed.
"""
class ClassUnderTest:
calls = 0
@method_cache
def method(self, value):
self.calls += 1
return value
ob = ClassUnderTest()
copy.deepcopy(ob)
ob.method(1)
copy.deepcopy(ob)
| mit | Python |
f549d13773eb6c45791af7b734be098b2f90a71e | Remove mention of old test_compare.py | pwyf/foxpath-tools | tests/__init__.py | tests/__init__.py | from .test_list import *
from .test_simple import *
| from .test_compare import *
from .test_list import *
from .test_simple import *
| mit | Python |
376fb5860c573416ac71a0dfe5437011858398b6 | Update __init__.py | luigi-riefolo/network_crawler,luigi-riefolo/network_crawler | tests/__init__.py | tests/__init__.py | """Unit tests init."""
import json
import os
import time
import unittest
try:
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
except ImportError as imp_err:
raise ImportError('Failed to import \'selenium\':\n' + str(imp_err))
from network_crawler.api.operator_web_site import OperatorWebSite
__all__ = ['json', 'os', 'time', 'unittest',
'webdriver', 'WebDriverException', 'OperatorWebSite', ]
| """Unit tests init."""
import json
import os
import time
import unittest
try:
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
except ImportError as imp_err:
raise ImportError('Failed to import \'selenium\':\n' + imp_err)
from network_crawler.api.operator_web_site import OperatorWebSite
__all__ = ['json', 'os', 'time', 'unittest',
'webdriver', 'WebDriverException', 'OperatorWebSite', ]
| mit | Python |
5c77d1203e7f6f985cb7a470324dafba648bbdcd | remove imports | justinwp/croplands,justinwp/croplands | tests/__init__.py | tests/__init__.py | # import unittest
#
#
# from test_auth_views import TestAuthViews
# from test_email import TestEmail
# from test_api import TestApi
# from test_utils_s3 import TestUtilsS3
# from test_db_models import TestDatabase
# from test_countries import TestCountries
# from test_tasks import TestTasks
#
# if __name__ == '__main__':
# unittest.main()
| import unittest
from test_auth_views import TestAuthViews
from test_email import TestEmail
from test_api import TestApi
from test_utils_s3 import TestUtilsS3
from test_db_models import TestDatabase
from test_countries import TestCountries
from test_tasks import TestTasks
if __name__ == '__main__':
unittest.main()
| mit | Python |
ac746d0afcd0a3a3267ee701dc868d584e3f5d94 | Remove unused imports | avanov/Rhetoric,avanov/Rhetoric | tests/__init__.py | tests/__init__.py | import unittest
import os
class BaseTestCase(unittest.TestCase):
def setUp(self):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.testapp.testapp.settings")
from django.test.client import Client
from tests.testapp.testapp.wsgi import application
import rhetoric
DEFAULT_HEADERS = {
'X-API-VERSION': '1.0'
}
self.client = Client(**DEFAULT_HEADERS)
self.rhetoric = rhetoric
self.wsgi_app = application
| import unittest
import os
import re
class BaseTestCase(unittest.TestCase):
def setUp(self):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.testapp.testapp.settings")
from django.test.client import Client
from tests.testapp.testapp.wsgi import application
import rhetoric
DEFAULT_HEADERS = {
'X-API-VERSION': '1.0'
}
self.client = Client(**DEFAULT_HEADERS)
self.rhetoric = rhetoric
self.wsgi_app = application
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.