commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
934bd6c894dc461282d5ff2f450672eb76476698 | Fix typo. | opencivicdata/opencivicdata.org,opencivicdata/opencivicdata.org,opencivicdata/opencivicdata.org | upload/backend/importer.py | upload/backend/importer.py | from pupa.scrape import (Jurisdiction, Person, Organization, Membership, Post)
from pupa.importers import (OrganizationImporter, PersonImporter, PostImporter,
MembershipImporter)
from django.db import transaction
def do_import(stream, stransaction):
stream = list(stream)
jurisdiction_id = stransaction.jurisdiction.id
org_importer = OrganizationImporter(jurisdiction_id)
person_importer = PersonImporter(jurisdiction_id)
post_importer = PostImporter(jurisdiction_id, org_importer)
membership_importer = MembershipImporter(
jurisdiction_id,
person_importer,
org_importer,
post_importer
)
report = {}
def tfilter(otype, stream):
for el in filter(lambda x: isinstance(x, otype), stream):
yield el.as_dict()
with transaction.atomic():
report.update(org_importer.import_data(tfilter(Organization, stream)))
report.update(person_importer.import_data(tfilter(Person, stream)))
report.update(post_importer.import_data(tfilter(Post, stream)))
report.update(membership_importer.import_data(
tfilter(Membership, stream)))
return report
| from pupa.scrape import (Jurisdiction, Person, Organization, Membership, Post)
from pupa.importers import (OrganizationImporter, PersonImporter, PostImporter,
MembershipImporter)
from django.db import transaction
def do_import(stream, transaction):
stream = list(stream)
jurisdiction_id = transaction.jurisdiction.id
org_importer = OrganizationImporter(jurisdiction_id)
person_importer = PersonImporter(jurisdiction_id)
post_importer = PostImporter(jurisdiction_id, org_importer)
membership_importer = MembershipImporter(
jurisdiction_id,
person_importer,
org_importer,
post_importer
)
report = {}
# This basically relates to Pupa's pupa.clu.commands.update:113
# (From there - wrap this in a transaction.)
def tfilter(otype, stream):
for el in filter(lambda x: isinstance(x, otype), stream):
yield el.as_dict()
with transaction.atomic():
report.update(org_importer.import_data(tfilter(Organization, stream)))
report.update(person_importer.import_data(tfilter(Person, stream)))
report.update(post_importer.import_data(tfilter(Post, stream)))
report.update(membership_importer.import_data(
tfilter(Membership, stream)))
return report
| bsd-3-clause | Python |
bfd4bf6118d69a70926982c602d411dc6f9ab1d4 | Check return value of bdist_egg command | pypa/setuptools,pypa/setuptools,pypa/setuptools | tests/test_python33_bdist_egg.py | tests/test_python33_bdist_egg.py | import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
self.assertTrue(_python_cmd("setup.py", "bdist_egg"))
if __name__ == '__main__':
unittest.main()
| import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from distribute_setup import (use_setuptools, _build_egg, _python_cmd,
_do_download, _install, DEFAULT_URL,
DEFAULT_VERSION)
import distribute_setup
class TestPython33BdistEgg(unittest.TestCase):
def test_build_egg(self):
os.chdir(os.path.join(CURDIR, 'python3.3_bdist_egg_test'))
_python_cmd("setup.py", "bdist_egg")
if __name__ == '__main__':
unittest.main()
| mit | Python |
825502e2fab40a930bfa6ba678c6a9bbcd939c7f | rename local variable for attribute names | geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend | yamltomd.py | yamltomd.py | from jinja2 import Template
import yaml
def yaml_to_md(table):
out.write('## ' + table + '\n\n')
data = yaml.load(inp)
attribute_names = data['layers'][table]['attributes'].keys()
dicts = data['layers'][table]['attributes']
dicts2 = dicts['type']['values']
key2 = dicts2.keys()
with open('templates/layer_attributes.md.jinja2') as f:
t2 = Template(f.read())
out.write(t2.render(mylist1=attribute_names, a1=len(attribute_names), space=' ', dicts=dicts))
if 'correlated_attributes' in dicts2[key2[0]].keys():
with open('templates/attribute_values_with_aggtype.md.jinja2') as f:
type_table = f.read()
else:
with open('templates/attribute_values.md.jinja2') as f:
type_table = f.read()
t3 = Template(type_table)
out.write(t3.render(typelist=key2, a=len(key2), dicts=dicts2))
out.write('\n\n')
f = open("osmaxx_schema.yaml", 'r')
inp = f.read()
out = open("documentation.md", 'w')
yaml_to_md('adminarea_a')
yaml_to_md('building_a')
yaml_to_md('geoname_p')
yaml_to_md('landuse_a')
yaml_to_md('military_p')
yaml_to_md('misc_l')
yaml_to_md('natural_a')
yaml_to_md('nonop_l')
yaml_to_md('poi_p')
yaml_to_md('pow_p')
yaml_to_md('railway_bridge_l')
yaml_to_md('road_ground_l')
yaml_to_md('route_l')
yaml_to_md('traffic_a')
yaml_to_md('traffic_p')
yaml_to_md('transport_a')
yaml_to_md('utility_a')
yaml_to_md('utility_p')
yaml_to_md('utility_l')
yaml_to_md('water_a')
yaml_to_md('water_p')
yaml_to_md('water_l')
| from jinja2 import Template
import yaml
def yaml_to_md(table):
out.write('## ' + table + '\n\n')
data = yaml.load(inp)
key1 = data['layers'][table]['attributes'].keys()
dicts = data['layers'][table]['attributes']
dicts2 = dicts['type']['values']
key2 = dicts2.keys()
with open('templates/layer_attributes.md.jinja2') as f:
t2 = Template(f.read())
out.write(t2.render(mylist1=key1, a1=len(key1), space=' ', dicts=dicts))
if 'correlated_attributes' in dicts2[key2[0]].keys():
with open('templates/attribute_values_with_aggtype.md.jinja2') as f:
type_table = f.read()
else:
with open('templates/attribute_values.md.jinja2') as f:
type_table = f.read()
t3 = Template(type_table)
out.write(t3.render(typelist=key2, a=len(key2), dicts=dicts2))
out.write('\n\n')
f = open("osmaxx_schema.yaml", 'r')
inp = f.read()
out = open("documentation.md", 'w')
yaml_to_md('adminarea_a')
yaml_to_md('building_a')
yaml_to_md('geoname_p')
yaml_to_md('landuse_a')
yaml_to_md('military_p')
yaml_to_md('misc_l')
yaml_to_md('natural_a')
yaml_to_md('nonop_l')
yaml_to_md('poi_p')
yaml_to_md('pow_p')
yaml_to_md('railway_bridge_l')
yaml_to_md('road_ground_l')
yaml_to_md('route_l')
yaml_to_md('traffic_a')
yaml_to_md('traffic_p')
yaml_to_md('transport_a')
yaml_to_md('utility_a')
yaml_to_md('utility_p')
yaml_to_md('utility_l')
yaml_to_md('water_a')
yaml_to_md('water_p')
yaml_to_md('water_l')
| mit | Python |
9855e7b0acf70266a3280e0c699961a587d62d57 | Add string representation for ExperimentAction | bburan/psiexperiment | psi/controller/experiment_action.py | psi/controller/experiment_action.py | import logging
log = logging.getLogger(__name__)
from functools import partial
from atom.api import Unicode, Int, Dict, Bool, Typed, Callable, List
from enaml.core.api import Declarative, d_
from psi.util import get_dependencies
class ExperimentState(Declarative):
'''
Allows for indication of a state (e.g., `experiment_active`, `iti_active`).
Automatically contributes the start/end events associataed with the state
(e.g., `experiment_start`, `experiment_end`).
'''
name = d_(Unicode())
events = ['prepare', 'start', 'end']
def _generate_events(self):
events = []
for name in self.events:
event_name = '{}_{}'.format(self.name, name)
event = ExperimentEvent(name=event_name, associated_state=self)
events.append(event)
return events
class ExperimentEvent(Declarative):
name = d_(Unicode())
associated_state = Typed(ExperimentState)
missing_event_mesg = '''
Missing event "{key}".
Perhaps an input, output or device is missing from the IO configuration?
'''
def simple_match(key, context):
try:
return context[key]
except Exception as e:
new_exc = KeyError(missing_event_mesg.format(key=key))
raise new_exc from e
class ExperimentActionBase(Declarative):
# Name of event that triggers command
event = d_(Unicode())
dependencies = List()
match = Callable()
# Defines order of invocation. Less than 100 invokes before default. Higher
# than 100 invokes after default. Note that if concurrent is True, then
# order of execution is not guaranteed.
weight = d_(Int(50))
# Arguments to pass to command by keyword
kwargs = d_(Dict())
def _default_dependencies(self):
return get_dependencies(self.event)
def _default_match(self):
code = compile(self.event, 'dynamic', 'eval')
if len(self.dependencies) == 1:
return partial(simple_match, self.dependencies[0])
else:
return partial(eval, code)
def __str__(self):
return f'{self.event} (weight={self.weight}; kwargs={self.kwargs})'
class ExperimentAction(ExperimentActionBase):
# Command to invoke
command = d_(Unicode())
def invoke(self, core, kwargs):
kwargs = kwargs.copy()
kwargs.update(self.kwargs)
core.invoke_command(action.command, parameters=kwargs)
class ExperimentCallback(ExperimentActionBase):
callback = d_(Callable())
| import logging
log = logging.getLogger(__name__)
from functools import partial
from atom.api import Unicode, Int, Dict, Bool, Typed, Callable, List
from enaml.core.api import Declarative, d_
from psi.util import get_dependencies
class ExperimentState(Declarative):
'''
Allows for indication of a state (e.g., `experiment_active`, `iti_active`).
Automatically contributes the start/end events associataed with the state
(e.g., `experiment_start`, `experiment_end`).
'''
name = d_(Unicode())
events = ['prepare', 'start', 'end']
def _generate_events(self):
events = []
for name in self.events:
event_name = '{}_{}'.format(self.name, name)
event = ExperimentEvent(name=event_name, associated_state=self)
events.append(event)
return events
class ExperimentEvent(Declarative):
name = d_(Unicode())
associated_state = Typed(ExperimentState)
missing_event_mesg = '''
Missing event "{key}".
Perhaps an input, output or device is missing from the IO configuration?
'''
def simple_match(key, context):
try:
return context[key]
except Exception as e:
new_exc = KeyError(missing_event_mesg.format(key=key))
raise new_exc from e
class ExperimentActionBase(Declarative):
# Name of event that triggers command
event = d_(Unicode())
dependencies = List()
match = Callable()
# Defines order of invocation. Less than 100 invokes before default. Higher
# than 100 invokes after default. Note that if concurrent is True, then
# order of execution is not guaranteed.
weight = d_(Int(50))
# Arguments to pass to command by keyword
kwargs = d_(Dict())
def _default_dependencies(self):
return get_dependencies(self.event)
def _default_match(self):
code = compile(self.event, 'dynamic', 'eval')
if len(self.dependencies) == 1:
return partial(simple_match, self.dependencies[0])
else:
return partial(eval, code)
class ExperimentAction(ExperimentActionBase):
# Command to invoke
command = d_(Unicode())
def invoke(self, core, kwargs):
kwargs = kwargs.copy()
kwargs.update(self.kwargs)
core.invoke_command(action.command, parameters=kwargs)
class ExperimentCallback(ExperimentActionBase):
callback = d_(Callable())
| mit | Python |
4241e8f5ca6114e6bcc5b9fda2e79cfa83ba6f3b | Test for ScheduledAnalysis get_sample | mass-project/mass_api_client,mass-project/mass_api_client | tests/test_scheduled_analysis.py | tests/test_scheduled_analysis.py | import json
from mass_api_client import ConnectionManager
from mass_api_client.resources import ScheduledAnalysis
from mass_api_client.resources import FileSample
from tests.serialization_test_case import SerializationTestCase
from tests.httmock_test_case import HTTMockTestCase
from httmock import HTTMock, urlmatch
class ScheduledAnalysisTestCase(SerializationTestCase, HTTMockTestCase):
def test_is_data_correct_after_serialization(self):
with open('tests/data/scheduled_analysis.json') as data_file:
data = json.load(data_file)
self.assertEqualAfterSerialization(ScheduledAnalysis(), data)
def test_get_sample(self):
with open('tests/data/scheduled_analysis.json') as data_file:
@urlmatch()
def mass_mock(url, req):
return open('tests/data/file_sample.json').read()
with HTTMock(mass_mock):
scheduled_analysis = ScheduledAnalysis._create_instance_from_data(json.load(data_file))
sample = scheduled_analysis.get_sample()
self.assertEqual(sample.file_size, 924449)
| import json
from mass_api_client.resources import ScheduledAnalysis
from tests.serialization_test_case import SerializationTestCase
class ScheduledAnalysisTestCase(SerializationTestCase):
def test_is_data_correct_after_serialization(self):
with open('tests/data/scheduled_analysis.json') as data_file:
data = json.load(data_file)
self.assertEqualAfterSerialization(ScheduledAnalysis(), data)
| mit | Python |
dba40486c0f8e0ff22f2b3491a179ef7d6d664b7 | Check for owner in the loader module | MoroseMagician/discolyte | acolyte/modules/loader.py | acolyte/modules/loader.py | from discord.ext import commands
from discord.ext.commands.errors import ExtensionAlreadyLoaded
from discord.ext.commands.errors import ExtensionNotFound
from discord.ext.commands.errors import ExtensionNotLoaded
class Loader(commands.Cog):
""" Module loader """
def __init__(self, bot):
self.bot = bot
@commands.is_owner()
@commands.command(hidden=True)
async def load(self, ctx, *, extension_name):
""" Load a module """
try:
self.bot.load_extension(f"acolyte.modules.{extension_name}")
self.bot.extensions.add(f"acolyte.modules.{extension_name}")
await ctx.send(f"Module {extension_name} loaded!!")
except (AttributeError, ImportError):
await ctx.send("Failed to load extension ;_;")
except ExtensionNotFound:
await ctx.send("No module found by that name! I looked everywhere!!")
except ExtensionAlreadyLoaded:
await ctx.send("WOA!! This module is already loaded!! :tada: :tada:")
@commands.is_owner()
@commands.command(hidden=True)
async def unload(self, ctx, *, extension_name):
""" Unload a module """
if extension_name == "loader":
await ctx.send("hey don't")
return
try:
self.bot.unload_extension(f"acolyte.modules.{extension_name}")
self.bot.extensions.remove(f"acolyte.modules.{extension_name}")
await ctx.send(f"Module {extension_name} unloaded!! Bye!!")
except ExtensionNotLoaded:
await ctx.send(f"Nope! This module isn't loaded!")
@commands.is_owner()
@commands.command(hidden=True)
async def reload(self, ctx):
""" Reload all loaded modules """
for ext in self.bot.extensions:
self.bot.reload_extension(ext)
await ctx.send(f"Reloaded {len(self.bot.extensions)} modules!!")
@commands.is_owner()
@commands.command(hidden=True)
async def list(self, ctx):
""" List all loaded modules """
await ctx.send(f"Loaded modules: {self.bot.extensions}")
def setup(bot):
bot.add_cog(Loader(bot))
| from discord.ext import commands
from discord.ext.commands.errors import ExtensionAlreadyLoaded
from discord.ext.commands.errors import ExtensionNotFound
from discord.ext.commands.errors import ExtensionNotLoaded
class Loader(commands.Cog):
""" Module loader """
def __init__(self, bot):
self.bot = bot
@commands.command(hidden=True)
async def load(self, ctx, *, extension_name):
""" Load a module """
try:
self.bot.load_extension(f"acolyte.modules.{extension_name}")
self.bot.extensions.add(f"acolyte.modules.{extension_name}")
await ctx.send(f"Module {extension_name} loaded!!")
except (AttributeError, ImportError):
await ctx.send("Failed to load extension ;_;")
except ExtensionNotFound:
await ctx.send("No module found by that name! I looked everywhere!!")
except ExtensionAlreadyLoaded:
await ctx.send("WOA!! This module is already loaded!! :tada: :tada:")
@commands.command(hidden=True)
async def unload(self, ctx, *, extension_name):
""" Unload a module """
if extension_name == "loader":
await ctx.send("hey don't")
return
try:
self.bot.unload_extension(f"acolyte.modules.{extension_name}")
self.bot.extensions.remove(f"acolyte.modules.{extension_name}")
await ctx.send(f"Module {extension_name} unloaded!! Bye!!")
except ExtensionNotLoaded:
await ctx.send(f"Nope! This module isn't loaded!")
@commands.command(hidden=True)
async def reload(self, ctx):
""" Reload all loaded modules """
for ext in self.bot.extensions:
self.bot.reload_extension(ext)
await ctx.send(f"Reloaded {len(self.bot.extensions)} modules!!")
@commands.command(hidden=True)
async def list(self, ctx):
""" List all loaded modules """
await ctx.send(f"Loaded modules: {self.bot.extensions}")
def setup(bot):
bot.add_cog(Loader(bot))
| mit | Python |
e342a9eef2f4d1bef28f9284feecd18c6c9b941e | fix to m2m | joeyuan19/flaming-bear,joeyuan19/flaming-bear,joeyuan19/flaming-bear,joeyuan19/flaming-bear | PersonalSite/analytics/models.py | PersonalSite/analytics/models.py | from django.db import models
# Create your models here.
class Visit(models.Model):
url = models.CharField(max_length=256)
date = models.DateTimeField(editable=False)
visitor = models.ForeignKey('Visitor')
# Possibly extend this class in the future to make a tree
# that gives something like "/ -> /projects -> /resume -> /derp -> / ->
# projects/asciiart"
def get_visitor(self):
return "IP: " + self.visitor.ip
def __save__(self,*args,**kwargs):
if not self.id:
self.date = datetime.datetime.today()
return super(Visitor,self).save(*args,**kwargs)
class Visitor(models.Model):
ip = models.CharField(max_length=64)
user_agent = models.CharField(max_length=512)
visits = models.ManyToManyField(Visit,related_name='url_visit')
first_visit = models.DateTimeField(editable=False)
last_visit = models.DateTimeField(editable=False)
def __save__(self,*args,**kwargs):
if not self.id:
self.first_visit = datetime.datetime.today()
self.last_visit = datetime.datetime.today()
return super(Visitor,self).save(*args,**kwargs)
def visit_count(self):
return len(self.visits.all())
def list_visits(self):
li = ""
for visit in self.visits.all():
li += visit.url + " on " + date + "\n"
return li[:-1]
| from django.db import models
# Create your models here.
class Visit(models.Model):
url = models.CharField(max_length=256)
date = models.DateTimeField(editable=False)
visitor = models.ForeignKey('Visitor',related_name)
# Possibly extend this class in the future to make a tree
# that gives something like "/ -> /projects -> /resume -> /derp -> / ->
# projects/asciiart"
def get_visitor(self):
return "IP: " + self.visitor.ip
def __save__(self,*args,**kwargs):
if not self.id:
self.date = datetime.datetime.today()
return super(Visitor,self).save(*args,**kwargs)
class Visitor(models.Model):
ip = models.CharField(max_length=64)
user_agent = models.CharField(max_length=512)
visits = models.ManyToManyField(Visit,related_name='url_visit')
first_visit = models.DateTimeField(editable=False)
last_visit = models.DateTimeField(editable=False)
def __save__(self,*args,**kwargs):
if not self.id:
self.first_visit = datetime.datetime.today()
self.last_visit = datetime.datetime.today()
return super(Visitor,self).save(*args,**kwargs)
def visit_count(self):
return len(self.visits.all())
def list_visits(self):
li = ""
for visit in self.visits.all():
li += visit.url + " on " + date + "\n"
return li[:-1]
| apache-2.0 | Python |
5224d55d4b65ae7142a51bf2ce58221cd69102f6 | fix race condition in issue6171-sdk-start-crash | nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js,nwjs/nw.js | test/sanity/issue6171-sdk-start-crash/test.py | test/sanity/issue6171-sdk-start-crash/test.py | import time
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from nw_util import *
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
chrome_options = Options()
chrome_options.add_argument("nwapp=" + os.path.dirname(os.path.abspath(__file__)))
driver = webdriver.Chrome(executable_path=os.environ['CHROMEDRIVER'], chrome_options=chrome_options)
driver.implicitly_wait(2)
try:
print driver.current_url
print 'waiting for crash'
result = wait_for_element_id(driver, 'versions')
#print result
assert('NWjs version' in result)
print 'There is no crash'
finally:
driver.quit()
| import time
import os
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
chrome_options = Options()
chrome_options.add_argument("nwapp=" + os.path.dirname(os.path.abspath(__file__)))
driver = webdriver.Chrome(executable_path=os.environ['CHROMEDRIVER'], chrome_options=chrome_options)
driver.implicitly_wait(2)
try:
print driver.current_url
print 'waiting for crash'
time.sleep(5)
result = driver.find_element_by_id('versions').get_attribute('innerHTML')
#print result
assert('NWjs version' in result)
print 'There is no crash'
finally:
driver.quit()
| mit | Python |
d0bf235af3742a17c722488fe3679d5b73a0d945 | Fix gemm calls in Softmax | spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | thinc/neural/_classes/softmax.py | thinc/neural/_classes/softmax.py | from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
from ...check import has_shape
from ... import check
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
name = 'softmax'
@check.arg(1, has_shape(('nB', 'nI')))
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
output__BO = self.ops.softmax(output__BO, inplace=False)
return output__BO
@check.arg(1, has_shape(('nB', 'nI')))
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
@check.arg(0, has_shape(('nB', 'nO')))
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True)
self.d_b += grad__BO.sum(axis=0)
grad__BI = self.ops.gemm(grad__BO, self.W)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return grad__BI
return output__BO, finish_update
| from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
from ...check import has_shape
from ... import check
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
name = 'softmax'
@check.arg(1, has_shape(('nB', 'nI')))
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
output__BO = self.ops.softmax(output__BO, inplace=False)
return output__BO
@check.arg(1, has_shape(('nB', 'nI')))
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
@check.arg(0, has_shape(('nB', 'nO')))
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.batch_outer(grad__BO, input__BI)
self.d_b += grad__BO.sum(axis=0)
grad__BI = self.ops.dot(grad__BO, self.W)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return grad__BI
return output__BO, finish_update
| mit | Python |
13723e6238f3026b9f37909c3e2a0e1668c8a1b9 | Update quiz-eliecer.py | eliecer11/Uip-prog3 | laboratorios/quiz1/quiz-eliecer.py | laboratorios/quiz1/quiz-eliecer.py | #calcular el area y perimetro de un rectangulo
#base=5
#altura=7 convertir area y perimetro en metros y pulgadas
base=5
altura=7
perimetro=2*5+2*7
print ("mi perimetro es" + str(perimetro))
area=5*7
print ("mi area es" + str (area))
metrop=perimetro/100
print ("mi perimetro en metro es" + str(metrop))
pulgadap=perimetro/2.54
print ("mi perimetro en pulgada es" + str(pulgadap))
metroa=area/100
print ("mi area en metro es" + str(metroa))
pulgadaa=area/2.54
print ("mi area en pulgada es" + str(pulgadaa))
|
base=5
altura=7
perimetro=2*5+2*7
print ("mi perimetro es" + str(perimetro))
area=5*7
print ("mi area es" + str (area))
metrop=perimetro/100
print ("mi perimetro en metro es" + str(metrop))
pulgadap=perimetro/2.54
print ("mi perimetro en pulgada es" + str(pulgadap))
metroa=area/100
print ("mi area en metro es" + str(metroa))
pulgadaa=area/2.54
print ("mi area en pulgada es" + str(pulgadaa))
| mit | Python |
93304e3a498e24f16894a8e3f75da61bb1383a45 | check if document associated to a file | bricaud/wevia,bricaud/wevia,bricaud/wevia | advancedSettings/views.py | advancedSettings/views.py | from django.shortcuts import render
import os
#from django.conf import settings
from classif.models import Cluster
from fileupload.models import Document
from graphdesign.models import GraphNode
def index(request):
output = ''
if(request.GET.get('check_db')):
output = run_check_db()
if(request.GET.get('clean_db')):
output = run_clean_db()
if(request.GET.get('erase_db')):
output = run_erase_db()
return render(request,'advancedSettings/advanced_settings.html',
{'console_message' :output})
def run_check_db():
try:
clusters = Cluster.objects.all()
except:
print("Database corrupted (can't access 'Cluster' object). Please re-install.")
return "Database corrupted (can't access 'Cluster' object). Please re-install."
nb_clusters = len(clusters)
try:
expressions = GraphNode.objects.all()
except:
print("Database corrupted (can't access 'GraphNode' object). Please re-install.")
return "Database corrupted (can't access 'GrpahNode' object). Please re-install."
nb_expressions = len(expressions)
# Delete only documents that are not associated to a file:
try:
document_set = Document.objects.all()
except:
print("Database corrupted (can't access 'Document' object). Please re-install.")
return "Database corrupted (can't access 'Document' object). Please re-install."
nb_documents = len(document_set)
doc_no_file = 0
docname_no_file = []
for doc in document_set:
if doc.file == '' or not os.path.isfile(doc.file.path):
doc_no_file+=1
docname_no_file.append(doc.name)
return ("""{} documents, {} words and expressions, {} clusters in database."""
.format(nb_documents,nb_expressions,nb_clusters)+
' {} document(s) not associated to a file {}.'.format(doc_no_file,docname_no_file))
def run_clean_db():
Cluster.objects.all().delete()
GraphNode.objects.all().delete()
# Delete only documents that are not associeted to a file:
document_set = Document.objects.all()
for doc in document_set:
if doc.file == '' or not os.path.isfile(doc.file.path):
doc.delete()
return 'Database cleaned.'
def run_erase_db():
Cluster.objects.all().delete()
Document.objects.all().delete()
GraphNode.objects.all().delete()
return 'Database erased.' | from django.shortcuts import render
import os
#from django.conf import settings
from classif.models import Cluster
from fileupload.models import Document
from graphdesign.models import GraphNode
def index(request):
output = ''
if(request.GET.get('check_db')):
output = run_check_db()
if(request.GET.get('clean_db')):
output = run_clean_db()
if(request.GET.get('erase_db')):
output = run_erase_db()
return render(request,'advancedSettings/advanced_settings.html',
{'console_message' :output})
def run_check_db():
try:
clusters = Cluster.objects.all()
except:
print("Database corrupted (can't access 'Cluster' object). Please re-install.")
return "Database corrupted (can't access 'Cluster' object). Please re-install."
nb_clusters = len(clusters)
try:
expressions = GraphNode.objects.all()
except:
print("Database corrupted (can't access 'GraphNode' object). Please re-install.")
return "Database corrupted (can't access 'GrpahNode' object). Please re-install."
nb_expressions = len(expressions)
# Delete only documents that are not associated to a file:
try:
document_set = Document.objects.all()
except:
print("Database corrupted (can't access 'Document' object). Please re-install.")
return "Database corrupted (can't access 'Document' object). Please re-install."
nb_documents = len(document_set)
doc_no_file = 0
for doc in document_set:
if not os.path.isfile(doc.file.path):
doc_no_file+=1
return ('{} documents, {} words and expressions, {} clusters in database.'.format(nb_documents,nb_expressions,nb_clusters)+
' {} document(s) not associated to a file.'.format(doc_no_file))
def run_clean_db():
Cluster.objects.all().delete()
GraphNode.objects.all().delete()
# Delete only documents that are not associeted to a file:
document_set = Document.objects.all()
for doc in document_set:
if not os.path.isfile(doc.file.path):
doc.delete()
return 'Database cleaned.'
def run_erase_db():
Cluster.objects.all().delete()
Document.objects.all().delete()
GraphNode.objects.all().delete()
return 'Database erased.' | agpl-3.0 | Python |
7cb1199d0904e3799a3695bdef465bc8021a2ff4 | Fix pt_br | lk-geimfari/mimesis,lk-geimfari/church,lk-geimfari/elizabeth,lk-geimfari/mimesis | tests/test_builtins/pt_br/test_brazil_spec.py | tests/test_builtins/pt_br/test_brazil_spec.py | import re
import pytest
from mimesis.builtins import BrazilSpecProvider
@pytest.fixture
def pt_br():
return BrazilSpecProvider()
def test_cpf(pt_br):
# test if the cpf has 14 digits with the mask
cpf_with_mask = pt_br.cpf()
assert len(cpf_with_mask) == 14
# test the mask
non_numeric_digits = re.sub(r"\d", "", cpf_with_mask)
assert "..-" == non_numeric_digits == non_numeric_digits
assert len(re.sub(r"\D", "", cpf_with_mask)) == 11
# test for the cpf without mask
cpf_without_mask = pt_br.cpf(False)
assert len(cpf_without_mask) == 11
non_numeric_digits = re.sub(r"\d", "", cpf_without_mask)
assert "" == non_numeric_digits
def test_cnpj(pt_br):
# test if the cnpj has 18 digits with the mask
cnpj_with_mask = pt_br.cnpj()
assert len(cnpj_with_mask) == 18
# test the mask
non_numeric_digits = re.sub(r"\d", "", cnpj_with_mask)
assert "../-" == non_numeric_digits == non_numeric_digits
assert len(re.sub(r"\D", "", cnpj_with_mask)) == 14
# test for the cnpj without mask
cnpj_without_mask = pt_br.cnpj(False)
assert len(cnpj_without_mask) == 14
non_numeric_digits = re.sub(r"\d", "", cnpj_without_mask)
assert "" == non_numeric_digits
| import re
import pytest
from mimesis.builtins import BrazilSpecProvider
@pytest.fixture
def pt_br():
return BrazilSpecProvider()
def test_cpf(pt_br):
# test if the cpf has 14 digits with the mask
cpf_with_mask = pt_br.cpf()
assert len(cpf_with_mask) == 14
# test the mask
non_numeric_digits = re.sub("\d", "", cpf_with_mask)
assert "..-" == non_numeric_digits == non_numeric_digits
assert len(re.sub("\D", "", cpf_with_mask)) == 11
# test for the cpf without mask
cpf_without_mask = pt_br.cpf(False)
assert len(cpf_without_mask) == 11
non_numeric_digits = re.sub("\d", "", cpf_without_mask)
assert "" == non_numeric_digits
def test_cnpj(pt_br):
# test if the cnpj has 18 digits with the mask
cnpj_with_mask = pt_br.cnpj()
assert len(cnpj_with_mask) == 18
# test the mask
non_numeric_digits = re.sub("\d", "", cnpj_with_mask)
assert "../-" == non_numeric_digits == non_numeric_digits
assert len(re.sub("\D", "", cnpj_with_mask)) == 14
# test for the cnpj without mask
cnpj_without_mask = pt_br.cnpj(False)
assert len(cnpj_without_mask) == 14
non_numeric_digits = re.sub("\d", "", cnpj_without_mask)
assert "" == non_numeric_digits
| mit | Python |
b9d8434d310ceb0654c8d69a0c54309bb33ccfe2 | Fix docstring typo. | tensorflow/agents,tensorflow/agents | tf_agents/environments/atari_wrappers_test.py | tf_agents/environments/atari_wrappers_test.py | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for environments.atari_wrappers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from absl.testing.absltest import mock
from tf_agents.environments import atari_wrappers
from tf_agents.environments import time_step as ts
class AtariTimeLimitTest(absltest.TestCase):
def test_game_over_after_limit(self):
max_steps = 5
base_env = mock.MagicMock()
wrapped_env = atari_wrappers.AtariTimeLimit(base_env, max_steps)
base_env.gym.game_over = False
base_env.reset.return_value = ts.restart(1)
base_env.step.return_value = ts.transition(2, 0)
action = 1
self.assertFalse(wrapped_env.game_over)
for _ in range(max_steps):
time_step = wrapped_env.step(action)
self.assertFalse(time_step.is_last())
self.assertFalse(wrapped_env.game_over)
time_step = wrapped_env.step(action)
self.assertTrue(time_step.is_last())
self.assertTrue(wrapped_env.game_over)
def test_resets_after_limit(self):
max_steps = 5
base_env = mock.MagicMock()
wrapped_env = atari_wrappers.AtariTimeLimit(base_env, max_steps)
base_env.gym.game_over = False
base_env.reset.return_value = ts.restart(1)
base_env.step.return_value = ts.transition(2, 0)
action = 1
for _ in range(max_steps + 1):
wrapped_env.step(action)
self.assertTrue(wrapped_env.game_over)
self.assertEqual(1, base_env.reset.call_count)
wrapped_env.step(action)
self.assertFalse(wrapped_env.game_over)
self.assertEqual(2, base_env.reset.call_count)
if __name__ == '__main__':
absltest.main()
| # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for environments.gym_wrapper."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from absl.testing.absltest import mock
from tf_agents.environments import atari_wrappers
from tf_agents.environments import time_step as ts
class AtariTimeLimitTest(absltest.TestCase):
def test_game_over_after_limit(self):
max_steps = 5
base_env = mock.MagicMock()
wrapped_env = atari_wrappers.AtariTimeLimit(base_env, max_steps)
base_env.gym.game_over = False
base_env.reset.return_value = ts.restart(1)
base_env.step.return_value = ts.transition(2, 0)
action = 1
self.assertFalse(wrapped_env.game_over)
for _ in range(max_steps):
time_step = wrapped_env.step(action)
self.assertFalse(time_step.is_last())
self.assertFalse(wrapped_env.game_over)
time_step = wrapped_env.step(action)
self.assertTrue(time_step.is_last())
self.assertTrue(wrapped_env.game_over)
def test_resets_after_limit(self):
max_steps = 5
base_env = mock.MagicMock()
wrapped_env = atari_wrappers.AtariTimeLimit(base_env, max_steps)
base_env.gym.game_over = False
base_env.reset.return_value = ts.restart(1)
base_env.step.return_value = ts.transition(2, 0)
action = 1
for _ in range(max_steps + 1):
wrapped_env.step(action)
self.assertTrue(wrapped_env.game_over)
self.assertEqual(1, base_env.reset.call_count)
wrapped_env.step(action)
self.assertFalse(wrapped_env.game_over)
self.assertEqual(2, base_env.reset.call_count)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | Python |
f8bf6f54c8700d2031c6bd833ad57ccb090730ad | fix pylint:no-self-use of .backend.cbor.Parser._load | ssato/python-anyconfig,ssato/python-anyconfig | anyconfig/backend/cbor.py | anyconfig/backend/cbor.py | #
# Copyright (C) 2017 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
r"""CBOR backend:
- Format to support: CBOR, http://cbor.io, https://tools.ietf.org/html/rfc7049
- Requirements: cbor, https://pypi.python.org/pypi/cbor
- Development Status :: 4 - Beta
- Limitations: None obvious
- Special options:
- All options of cbor.load{s,} and cbor.dump{s,} should work.
- See also: https://github.com/brianolson/cbor_py/blob/master/cbor/cbor.py
Changelog:
.. versionadded:: 0.8.3
"""
from __future__ import absolute_import
import cbor
import anyconfig.backend.base
import anyconfig.compat
from anyconfig.backend.pickle import load_with_fn
class Parser(anyconfig.backend.base.FromStreamLoader,
anyconfig.backend.base.ToStreamDumper):
"""
Parser for CBOR files.
"""
_type = "cbor"
_extensions = ["cbor"]
_load_opts = []
_dump_opts = ["sort_keys"]
_open_flags = ('rb', 'wb')
dump_to_string = anyconfig.backend.base.to_method(cbor.dumps)
dump_to_stream = anyconfig.backend.base.to_method(cbor.dump)
_load = anyconfig.backend.base.to_method(load_with_fn)
def load_from_string(self, content, to_container, **opts):
"""
Load CBOR config from given string `content`.
:param content: CBOR config content
:param to_container: callble to make a container object
:param opts: keyword options passed to `cbor.loads`
:return: Dict-like object holding configuration
"""
return self._load(cbor.loads, content, to_container, **opts)
def load_from_stream(self, stream, to_container, **opts):
"""
Load CBOR config from given stream `stream`.
:param stream: Stream will provide CBOR config content string
:param to_container: callble to make a container object
:param opts: keyword options passed to `cbor.load`
:return: Dict-like object holding configuration
"""
return self._load(cbor.load, stream, to_container, **opts)
# vim:sw=4:ts=4:et:
| #
# Copyright (C) 2017 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
r"""CBOR backend:
- Format to support: CBOR, http://cbor.io, https://tools.ietf.org/html/rfc7049
- Requirements: cbor, https://pypi.python.org/pypi/cbor
- Development Status :: 4 - Beta
- Limitations: None obvious
- Special options:
- All options of cbor.load{s,} and cbor.dump{s,} should work.
- See also: https://github.com/brianolson/cbor_py/blob/master/cbor/cbor.py
Changelog:
.. versionadded:: 0.8.3
"""
from __future__ import absolute_import
import cbor
import anyconfig.backend.base
import anyconfig.compat
class Parser(anyconfig.backend.base.FromStreamLoader,
anyconfig.backend.base.ToStreamDumper):
"""
Parser for CBOR files.
"""
_type = "cbor"
_extensions = ["cbor"]
_load_opts = []
_dump_opts = ["sort_keys"]
_open_flags = ('rb', 'wb')
dump_to_string = anyconfig.backend.base.to_method(cbor.dumps)
dump_to_stream = anyconfig.backend.base.to_method(cbor.dump)
def _load(self, load_fn, content_or_strm, to_container, **opts):
"""
Load CBOR config from given string or stream `content_or_strm`.
:param content_or_strm: CBOR config content or stream will provide it
:param to_container: callble to make a container object
:param opts: keyword options passed to `cbor.load[s]`
:return: Dict-like object holding configuration
"""
return to_container(load_fn(content_or_strm, **opts))
def load_from_string(self, content, to_container, **opts):
"""
Load CBOR config from given string `content`.
:param content: CBOR config content
:param to_container: callble to make a container object
:param opts: keyword options passed to `cbor.loads`
:return: Dict-like object holding configuration
"""
return self._load(cbor.loads, content, to_container, **opts)
def load_from_stream(self, stream, to_container, **opts):
"""
Load CBOR config from given stream `stream`.
:param stream: Stream will provide CBOR config content string
:param to_container: callble to make a container object
:param opts: keyword options passed to `cbor.load`
:return: Dict-like object holding configuration
"""
return self._load(cbor.load, stream, to_container, **opts)
# vim:sw=4:ts=4:et:
| mit | Python |
2d3d1d63f616fe0ccafad8a04f412f93e86ee72c | Expand geo-alchemy example. | mikelambert/flask-admin,jamesbeebop/flask-admin,flabe81/flask-admin,lifei/flask-admin,iurisilvio/flask-admin,quokkaproject/flask-admin,lifei/flask-admin,LennartP/flask-admin,Kha/flask-admin,jschneier/flask-admin,plaes/flask-admin,plaes/flask-admin,jschneier/flask-admin,torotil/flask-admin,jschneier/flask-admin,phantomxc/flask-admin,closeio/flask-admin,flask-admin/flask-admin,jmagnusson/flask-admin,ArtemSerga/flask-admin,HermasT/flask-admin,radioprotector/flask-admin,Junnplus/flask-admin,marrybird/flask-admin,quokkaproject/flask-admin,janusnic/flask-admin,late-warrior/flask-admin,chase-seibert/flask-admin,CoolCloud/flask-admin,litnimax/flask-admin,ondoheer/flask-admin,betterlife/flask-admin,mikelambert/flask-admin,mikelambert/flask-admin,ondoheer/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,ArtemSerga/flask-admin,janusnic/flask-admin,ArtemSerga/flask-admin,plaes/flask-admin,AlmogCohen/flask-admin,mikelambert/flask-admin,jamesbeebop/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,jamesbeebop/flask-admin,betterlife/flask-admin,toddetzel/flask-admin,jschneier/flask-admin,marrybird/flask-admin,rochacbruno/flask-admin,LennartP/flask-admin,chase-seibert/flask-admin,janusnic/flask-admin,likaiguo/flask-admin,quokkaproject/flask-admin,NickWoodhams/flask-admin,ondoheer/flask-admin,dxmo/flask-admin,lifei/flask-admin,flabe81/flask-admin,toddetzel/flask-admin,HermasT/flask-admin,torotil/flask-admin,dxmo/flask-admin,HermasT/flask-admin,flask-admin/flask-admin,chase-seibert/flask-admin,NickWoodhams/flask-admin,phantomxc/flask-admin,chase-seibert/flask-admin,flabe81/flask-admin,iurisilvio/flask-admin,jmagnusson/flask-admin,toddetzel/flask-admin,likaiguo/flask-admin,lifei/flask-admin,marrybird/flask-admin,Kha/flask-admin,jamesbeebop/flask-admin,betterlife/flask-admin,phantomxc/flask-admin,plaes/flask-admin,LennartP/flask-admin,NickWoodhams/flask-admin,closeio/flask-admin,iurisilvio/flask-admin,closeio/flask-admin,flask-admin/flask-admin,betterlife/flask-admin,marrybird/flask-admin,toddetzel/flask-admin,radioprotector/flask-admin,CoolCloud/flask-admin,iurisilvio/flask-admin,janusnic/flask-admin,flabe81/flask-admin,jmagnusson/flask-admin,litnimax/flask-admin,AlmogCohen/flask-admin,late-warrior/flask-admin,AlmogCohen/flask-admin,torotil/flask-admin,NickWoodhams/flask-admin,likaiguo/flask-admin,CoolCloud/flask-admin,dxmo/flask-admin,late-warrior/flask-admin,AlmogCohen/flask-admin,radioprotector/flask-admin,phantomxc/flask-admin,likaiguo/flask-admin,litnimax/flask-admin,CoolCloud/flask-admin,Junnplus/flask-admin,ondoheer/flask-admin,Junnplus/flask-admin,ArtemSerga/flask-admin,Kha/flask-admin,late-warrior/flask-admin,dxmo/flask-admin,radioprotector/flask-admin,LennartP/flask-admin,rochacbruno/flask-admin,Junnplus/flask-admin,Kha/flask-admin,flask-admin/flask-admin,quokkaproject/flask-admin,torotil/flask-admin,litnimax/flask-admin,HermasT/flask-admin | examples/geo-alchemy/app.py | examples/geo-alchemy/app.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from geoalchemy2.types import Geometry
from flask_admin.contrib.geoa import ModelView
# Create application
app = Flask(__name__)
# Create dummy secrey key so we can use sessions
app.config['SECRET_KEY'] = '123456790'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql+psycopg2://flask_admin_geo:flask_admin_geo@localhost/flask_admin_geo'
app.config['SQLALCHEMY_ECHO'] = True
db = SQLAlchemy(app)
app.config['MAPBOX_MAP_ID'] = '...'
app.config['MAPBOX_ACCESS_TOKEN'] = '...'
class Point(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POINT"))
class MultiPoint(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOINT"))
class Polygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POLYGON"))
class MultiPolygon(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTIPOLYGON"))
class LineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("LINESTRING"))
class MultiLineString(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("MULTILINESTRING"))
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
# Create admin
admin = admin.Admin(app, name='Example: GeoAlchemy')
# Add views
admin.add_view(ModelView(Point, db.session, category='Points'))
admin.add_view(ModelView(MultiPoint, db.session, category='Points'))
admin.add_view(ModelView(Polygon, db.session, category='Polygons'))
admin.add_view(ModelView(MultiPolygon, db.session, category='Polygons'))
admin.add_view(ModelView(LineString, db.session, category='Lines'))
admin.add_view(ModelView(MultiLineString, db.session, category='Lines'))
if __name__ == '__main__':
db.create_all()
# Start app
app.run(debug=True)
| from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import flask_admin as admin
from geoalchemy2.types import Geometry
from flask_admin.contrib.geoa import ModelView
# Create application
app = Flask(__name__)
# Create dummy secrey key so we can use sessions
app.config['SECRET_KEY'] = '123456790'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql+psycopg2://flask_admin_geo:flask_admin_geo@localhost/flask_admin_geo'
app.config['SQLALCHEMY_ECHO'] = True
db = SQLAlchemy(app)
app.config['MAPBOX_MAP_ID'] = "..."
app.config['MAPBOX_ACCESS_TOKEN'] = "..."
class Location(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
point = db.Column(Geometry("POINT"))
# Flask views
@app.route('/')
def index():
return '<a href="/admin/">Click me to get to Admin!</a>'
# Create admin
admin = admin.Admin(app, name='Example: GeoAlchemy')
# Add views
admin.add_view(ModelView(Location, db.session))
if __name__ == '__main__':
db.create_all()
# Start app
app.run(debug=True)
| bsd-3-clause | Python |
0d4093c768ee107eb962f7bb3142048493ff7f5e | Update processMIMTokens.py to remove abbreviations. | Eyra-is/Eyra,Eyra-is/Eyra,Eyra-is/Eyra,Eyra-is/Eyra,Eyra-is/Eyra,Eyra-is/Eyra | Backend/scripts/processMIMTokens.py | Backend/scripts/processMIMTokens.py | import sys
import os
import re
wordsToRemove = [
'hv',
'hæstv'
]
def process(sortedSentDir, lowerWCBound, upperWCBound, dest):
with open(dest, 'w', encoding='utf8') as f:
for i in range(int(lowerWCBound), int(upperWCBound)+1):
pathToWords = os.path.join(sortedSentDir, str(i), 'sentences.words')
if (os.path.exists(pathToWords)):
with open(pathToWords, 'r', encoding='utf8') as tmp:
f.write(extractSentences(tmp.read().splitlines()))
def extractSentences(data):
"""
Expects data on format as a list of all lines in sentences.words in the MIM prompts without \n.
Extracts them by returning token1\ntoken2\ntoken3 etc.. with one sentence each line.
"""
out = ''
# remove initial sentence tag
data = [' '.join(y.split(' ')[1:]) for y in data]
data = filter(filterOutNumbers, data)
for line in data:
words = line.split(' ')
words = filter(filterOutPuncuation, words)
words = filter(lambda x: x not in wordsToRemove, words)
words = list(words)
if len(words) >= 1 and not containsCapsAbbrev(words):
out += ' '.join(words) + '\n'
return out
def filterOutPuncuation(x):
if re.match(r'^[\w\s]+$', x, re.UNICODE) is not None:
return True
return False
def filterOutNumbers(x):
if re.search(r'\d', x, re.UNICODE) is not None:
return False
return True
def containsCapsAbbrev(x):
'''
Test if list x contains e.g. OECD, DNA, RNA which are abbreviations but
not filtered out with punctuation.
'''
for word in x:
upperCase = 0
for c in word:
if c.isupper():
upperCase += 1
if upperCase >= 2:
return True
return False
def run():
if len(sys.argv) < 5:
print(
'Usage: python %s sortedSentDir lowerWCBound upperWCBound dest\n\
Description: Processes sorted MIM tokens, using the sorted/X/sentences.words files.\n' % sys.argv[0]
+
'sortedSentDir is the directory containing the folders 1,2,3,4,5,... e.g. /yourpath/MIM_prompts/sent_free/sentences/sorted\n\
lower and upper WCBound are the number of words per sentence you want. E.g. 5 and 10 would give you all sentences with\
between 5 and 10 words inclusive.\n\
dest is the destination file with the output tokens, one on each line.'
)
return
else:
process(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
if __name__ == '__main__':
run() | import sys
import os
import re
def process(sortedSentDir, lowerWCBound, upperWCBound, dest):
with open(dest, 'w', encoding='utf8') as f:
for i in range(int(lowerWCBound), int(upperWCBound)+1):
pathToWords = os.path.join(sortedSentDir, str(i), 'sentences.words')
if (os.path.exists(pathToWords)):
with open(pathToWords, 'r', encoding='utf8') as tmp:
f.write(extractSentences(tmp.read().splitlines()))
def extractSentences(data):
"""
Expects data on format as a list of all lines in sentences.words in the MIM prompts without \n.
Extracts them by returning token1\ntoken2\ntoken3 etc.. with one sentence each line.
"""
out = ''
# remove initial sentence tag
data = [' '.join(y.split(' ')[1:]) for y in data]
data = filter(filterOutNumbers, data)
#data = list(data)
for line in data:
words = line.split(' ')
words = filter(filterOutPuncuation, words)
words = list(words)
if len(words) >= 1:
out += ' '.join(words) + '\n'
return out
def filterOutPuncuation(x):
if re.match(r'^[\w\s]+$', x, re.UNICODE) is not None:
return True
return False
def filterOutNumbers(x):
if re.search(r'\d', x, re.UNICODE) is not None:
return False
return True
def run():
if len(sys.argv) < 5:
print(
'Usage: python %s sortedSentDir lowerWCBound upperWCBound dest\n\
Description: Processes sorted MIM tokens, using the sorted/X/sentences.words files.\n' % sys.argv[0]
+
'sortedSentDir is the directory containing the folders 1,2,3,4,5,... e.g. /yourpath/MIM_prompts/sent_free/sentences/sorted\n\
lower and upper WCBound are the number of words per sentence you want. E.g. 5 and 10 would give you all sentences with\
between 5 and 10 words inclusive.\n\
dest is the destination file with the output tokens, one on each line.'
)
return
else:
process(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
if __name__ == '__main__':
run() | apache-2.0 | Python |
0dece1167815327a272b590a7cec9a99464bf5ed | Bump version | encode/uvicorn,encode/uvicorn | uvicorn/__init__.py | uvicorn/__init__.py | from uvicorn.run import run
__version__ = '0.1.0'
__all__ = ['run']
| from uvicorn.run import run
__version__ = '0.0.15'
__all__ = ['run']
| bsd-3-clause | Python |
215cbbb4f9d9127d8ce2e60c9e49cb597e9d1633 | add BANNED_LINKS | topher200/hearthstone_reddit_card_bot | generate_card_csv.py | generate_card_csv.py | """Fetches latest card names and links from hearthpwn.
Outputs to cards.csv.
"""
import bs4
import collections
import csv
import httplib2
import logging
import util
# These cards cause too many false positives
BANNED_CARD_LIST = [
"Bananas",
"Blizzard",
"Boar",
"Silence",
"Charge",
"Chicken",
"Claw",
"DEBUG",
"Defender",
"Dream",
"Dispel",
"Execute",
"Flare",
"Frog",
"Gnoll",
"Hyena",
"Imp",
"Misdirection",
"Rampage",
"Rooted",
"Sheep",
"Slam",
"Swipe",
"The Coin",
"Windfury",
]
BANNED_LINKS = [
# There's a Lorewalker Cho NPC as well as minion. We want the minion
"http://www.hearthpwn.com/cards/655-lorewalker-cho",
]
def get_cards_from_page(url):
logging.info("getting cards from {}".format(url))
card_dict = collections.OrderedDict()
http = httplib2.Http()
_, response = http.request(url)
for link in bs4.BeautifulSoup(response, parse_only=bs4.SoupStrainer('a')):
if link.has_attr("href") and link.has_attr("data-id"):
# The interal site link doesn't include the root url - we add it
full_link = "http://www.hearthpwn.com{}".format(link['href'])
if full_link in BANNED_LINKS:
continue
card_dict[link.text] = full_link
return card_dict
def main():
util.setup_logging(verbose=False)
logging.info("Starting generate_card_csv.py")
# Grab the cards from each page. We're purposely grabbing from more pages
# than exist. We're using a dict, so grabbing from pages more than once
# won't dupe cards.
card_dict = collections.OrderedDict()
for page_num in range(0, 10):
url = "http://www.hearthpwn.com/cards?display=1&page={}".format(page_num)
card_dict.update(get_cards_from_page(url))
logging.debug("card dict: {}".format(card_dict))
logging.debug("removing banned cards")
for card in BANNED_CARD_LIST:
card_dict.pop(card)
logging.info("writing cards to file")
with open("cards.csv", 'w') as csv_file:
csv_writer = csv.writer(csv_file)
for card in card_dict:
csv_writer.writerow([card, card_dict[card]])
if __name__ == "__main__":
main()
| """Fetches latest card names and links from hearthpwn.
Outputs to cards.csv.
"""
import bs4
import collections
import csv
import httplib2
import logging
import util
BANNED_CARD_LIST = [
"Bananas",
"Blizzard",
"Boar",
"Silence",
"Charge",
"Chicken",
"Claw",
"DEBUG",
"Defender",
"Dream",
"Dispel",
"Execute",
"Flare",
"Frog",
"Gnoll",
"Hyena",
"Imp",
"Misdirection",
"Rampage",
"Rooted",
"Sheep",
"Slam",
"Swipe",
"The Coin",
"Windfury",
]
def get_cards_from_page(url):
logging.info("getting cards from {}".format(url))
card_dict = collections.OrderedDict()
http = httplib2.Http()
_, response = http.request(url)
for link in bs4.BeautifulSoup(response, parse_only=bs4.SoupStrainer('a')):
if link.has_attr("href") and link.has_attr("data-id"):
# The interal site link doesn't include the root url - we add it
card_dict[link.text] = "http://hearthpwn.com{}".format(link['href'])
return card_dict
def main():
util.setup_logging(verbose=False)
logging.info("Starting generate_card_csv.py")
# Grab the cards from each page. We're purposely grabbing from more pages
# than exist. We're using a dict, so grabbing from pages more than once
# won't dupe cards.
card_dict = collections.OrderedDict()
for page_num in range(0, 10):
url = "http://www.hearthpwn.com/cards?display=1&page={}".format(page_num)
card_dict.update(get_cards_from_page(url))
logging.debug("card dict: {}".format(card_dict))
logging.debug("removing banned cards")
for card in BANNED_CARD_LIST:
card_dict.pop(card)
logging.info("writing cards to file")
with open("cards.csv", 'w') as csv_file:
csv_writer = csv.writer(csv_file)
for card in card_dict:
csv_writer.writerow([card, card_dict[card]])
if __name__ == "__main__":
main()
| mit | Python |
d13dca630b2d34a7a0c41b8c3ef5dd79c75ec6cb | Update proxy examples | mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "134.209.128.61:3128", # (Example) - set your own proxy here
"example2": "165.227.83.185:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
"""
PROXY_LIST = {
"example1": "212.87.220.2:3128", # (Example) - set your own proxy here
"example2": "51.75.147.44:3128", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| mit | Python |
9448c082b158dcab960d33982e8189f2d2da4729 | Fix flake8 | gitpython-developers/gitpython,gitpython-developers/GitPython,gitpython-developers/GitPython,gitpython-developers/gitpython | git/compat/typing.py | git/compat/typing.py | # -*- coding: utf-8 -*-
# config.py
# Copyright (C) 2021 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import sys
if sys.version_info[:2] >= (3, 8):
from typing import Final, Literal # noqa: F401
else:
from typing_extensions import Final, Literal # noqa: F401
| # -*- coding: utf-8 -*-
# config.py
# Copyright (C) 2021 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import sys
if sys.version_info[:2] >= (3, 8):
from typing import Final, Literal
else:
from typing_extensions import Final, Literal
| bsd-3-clause | Python |
99cf264a53025f8c80688f4ce89410a678eee876 | update import statements | MrYsLab/PyMata | examples/pymata_i2c_read.py | examples/pymata_i2c_read.py | __author__ = 'Copyright (c) 2013 Alan Yorinks All rights reserved.'
"""
Copyright (c) 2013 Alan Yorinks All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This file demonstrates using PyMata to read temperature values from a SparkFun Digital Temperature Sensor
Breakout for the TMP102 device - SparkFun part #SEN-11931
The code is based on a bildr article: http://bildr.org/2011/01/tmp102-arduino/
"""
# import the API class
import time
from pymata import PyMata
# The PyMata constructor will print status to the console and will return
# when PyMata is ready to accept commands or will exit if unsuccessful
firmata = PyMata("/dev/ttyACM0")
#configure the I2C pins. This code is for the UNO
firmata.i2c_config(0, firmata.ANALOG, 4, 5)
# read i2c device at address 0x48, with no register specified. Expect 2 bytes to be returned
# and the operation is a single shot read
firmata.i2c_read(0x48, 0, 2, firmata.I2C_READ)
# give the serial interface time to send a read, for the device to execute the read
# and to get things back across the interface
time.sleep(3)
# retrieve the data sent from device
data = firmata.i2c_get_read_data(0x48)
# do some calculations on the raw data returned
TemperatureSum = (data[1] << 8 | data[2]) >> 4
celsius = TemperatureSum * 0.0625
print celsius
fahrenheit = (1.8 * celsius) + 32
print fahrenheit
firmata.close()
| __author__ = 'Copyright (c) 2013 Alan Yorinks All rights reserved.'
"""
Copyright (c) 2013 Alan Yorinks All rights reserved.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 3 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This file demonstrates using PyMata to read temperature values from a SparkFun Digital Temperature Sensor
Breakout for the TMP102 device - SparkFun part #SEN-11931
The code is based on a bildr article: http://bildr.org/2011/01/tmp102-arduino/
"""
# import the API class
import time
from PyMata.pymata import PyMata
# The PyMata constructor will print status to the console and will return
# when PyMata is ready to accept commands or will exit if unsuccessful
firmata = PyMata("/dev/ttyACM0")
#configure the I2C pins. This code is for the UNO
firmata.i2c_config(0, firmata.ANALOG, 4, 5)
# read i2c device at address 0x48, with no register specified. Expect 2 bytes to be returned
# and the operation is a single shot read
firmata.i2c_read(0x48, 0, 2, firmata.I2C_READ)
# give the serial interface time to send a read, for the device to execute the read
# and to get things back across the interface
time.sleep(3)
# retrieve the data sent from device
data = firmata.i2c_get_read_data(0x48)
# do some calculations on the raw data returned
TemperatureSum = (data[1] << 8 | data[2]) >> 4
celsius = TemperatureSum * 0.0625
print celsius
fahrenheit = (1.8 * celsius) + 32
print fahrenheit
firmata.close()
| agpl-3.0 | Python |
007a4a1ba529e2913357387c1f42b6028860fe98 | Clean up coverage_coverage.py | 7WebPages/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,blueyed/coveragepy,hugovk/coveragepy,blueyed/coveragepy,nedbat/coveragepy,blueyed/coveragepy,nedbat/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,nedbat/coveragepy | test/coverage_coverage.py | test/coverage_coverage.py | """Coverage-test Coverage.py itself."""
import os, shutil, sys
import nose
HTML_DIR = "htmlcov"
def run_tests_with_coverage():
"""Run the test suite with coverage measuring itself."""
import coverage
tracer = os.environ.get('COVERAGE_TEST_TRACER', 'c')
version = "%s%s" % sys.version_info[:2]
suffix = ".%s_%s" % (version, tracer)
cov = coverage.coverage(branch=True, data_suffix=suffix)
# Cheap trick: the coverage code itself is excluded from measurement, but
# if we clobber the cover_prefix in the coverage object, we can defeat the
# self-detection.
cov.cover_prefix = "Please measure coverage.py!"
cov.erase()
cov.start()
# Re-import coverage to get it coverage tested! I don't understand all the
# mechanics here, but if I don't carry over the imported modules (in
# covmods), then things go haywire (os == None, eventually).
covmods = {}
covdir = os.path.split(coverage.__file__)[0]
# We have to make a list since we'll be deleting in the loop.
modules = list(sys.modules.items())
for name, mod in modules:
if name.startswith('coverage'):
if hasattr(mod, '__file__') and mod.__file__.startswith(covdir):
covmods[name] = mod
del sys.modules[name]
import coverage # don't warn about re-import: pylint: disable-msg=W0404
sys.modules.update(covmods)
# Run nosetests, with the arguments from our command line.
print(":: Running nosetests %s" % " ".join(sys.argv[1:]))
nose.run()
cov.stop()
print(":: Saving .coverage%s" % suffix)
cov.save()
def report_on_combined_files():
"""Combine all the .coverage files and make an HTML report."""
if os.path.exists(HTML_DIR):
shutil.rmtree(HTML_DIR)
print(":: Writing HTML report to %s/index.html" % HTML_DIR)
import coverage
cov = coverage.coverage()
cov.combine()
cov.save()
cov.clear_exclude()
cov.exclude("#pragma: no cover")
cov.exclude("def __repr__")
cov.exclude("if __name__ == .__main__.:")
cov.exclude("raise AssertionError")
cov.html_report(
directory=HTML_DIR, ignore_errors=True, omit_prefixes=["mock"]
)
try:
cmd = sys.argv[1]
except IndexError:
cmd = ''
if cmd == 'run':
# Ugly hack: nose.run reads sys.argv directly, so here I delete my command
# argument so that sys.argv is left as just nose arguments.
del sys.argv[1]
run_tests_with_coverage()
elif cmd == 'report':
report_on_combined_files()
else:
print("Need 'run' or 'report'")
| """Coverage-test Coverage.py itself."""
import os, shutil, sys
import nose
HTML_DIR = "htmlcov"
def run_tests_with_coverage():
import coverage
tracer = os.environ.get('COVERAGE_TEST_TRACER', 'c')
version = "%s%s" % sys.version_info[:2]
suffix = ".%s_%s" % (version, tracer)
cov = coverage.coverage(branch=True, data_suffix=suffix)
# Cheap trick: the coverage code itself is excluded from measurement, but
# if we clobber the cover_prefix in the coverage object, we can defeat the
# self-detection.
cov.cover_prefix = "Please measure coverage.py!"
cov.erase()
cov.start()
# Re-import coverage to get it coverage tested! I don't understand all the
# mechanics here, but if I don't carry over the imported modules (in
# covmods), then things go haywire (os == None, eventually).
covmods = {}
covdir = os.path.split(coverage.__file__)[0]
# We have to make a list since we'll be deleting in the loop.
modules = list(sys.modules.items())
for name, mod in modules:
if name.startswith('coverage'):
if hasattr(mod, '__file__') and mod.__file__.startswith(covdir):
covmods[name] = mod
del sys.modules[name]
import coverage # don't warn about re-import: pylint: disable-msg=W0404
sys.modules.update(covmods)
# Run nosetests, with the arguments from our command line.
print(":: Running nosetests %s" % " ".join(sys.argv[1:]))
nose.run()
cov.stop()
print(":: Saving .coverage%s" % suffix)
cov.save()
def report_on_combined_files():
if os.path.exists(HTML_DIR):
shutil.rmtree(HTML_DIR)
print(":: Writing HTML report to %s/index.html" % HTML_DIR)
import coverage
cov = coverage.coverage()
cov.combine()
cov.save()
cov.clear_exclude()
cov.exclude("#pragma: no cover")
cov.exclude("def __repr__")
cov.exclude("if __name__ == .__main__.:")
cov.exclude("raise AssertionError")
cov.html_report(directory=HTML_DIR, ignore_errors=True, omit_prefixes=["mock"])
try:
cmd = sys.argv[1]
except IndexError:
cmd = ''
if cmd == 'run':
# Ugly hack: nose.run reads sys.argv directly, so here I delete my command
# argument so that sys.argv is left as just nose arguments.
del sys.argv[1]
run_tests_with_coverage()
elif cmd == 'report':
report_on_combined_files()
else:
print("Need 'run' or 'report'")
| apache-2.0 | Python |
6881525aaada9b8a6c895be4dbc40b2fddc6eec6 | Update booted.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | apps/led_berepi/booted.py | apps/led_berepi/booted.py | ## This code for HW init
## It will show LED Blue ON, 30secs after booting
## We can easily check the booting has problem
thispath='...BerePi/trunk/apps/led_berepi'
import sys
from ledinit import *
debug_print = 1
def BootLed():
ledb_on()
time.sleep(1)
ledb_off()
time.sleep(1)
if __name__== "__main__" :
if debug_print is 1: print "(%s/booted.py) >> Starting " %thispath
while True:
BootLed()
if debug_print is 1: print "(%s/booted.py) >> end of a loop" %thispath
GPIO.cleanup()
| ## This code for HW init
## It will show LED Blue ON, 30secs after booting
## We can easily check the booting has problem
thispath='...BerePi/trunk/apps/led_berepi'
import sys
from ledinit import *
debug_print = 1
def BootLed():
ledr_on()
time.sleep(1)
ledr_off()
time.sleep(1)
if __name__== "__main__" :
if debug_print is 1: print "(%s/booted.py) >> Starting " %thispath
while True:
BootLed()
if debug_print is 1: print "(%s/booted.py) >> end of a loop" %thispath
GPIO.cleanup()
| bsd-2-clause | Python |
246971d8dd7d6c5fdc480c55e4e79ffd7a840b9b | Add a render method to view that should be reimplemented | onitake/Uranium,onitake/Uranium | Cura/View/View.py | Cura/View/View.py | #Abstract for all views
class View(object):
def __init__(self):
self._renderer = None
def render(self, glcontext):
pass
| #Abstract for all views
class View(object):
def __init__(self):
self._renderer = None | agpl-3.0 | Python |
169746eea4f8b520af9b7388ae62cdf903dd288b | remove redundant lambda usage | picsadotcom/maguire | backend/maguire/celery.py | backend/maguire/celery.py | from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'maguire.settings')
app = Celery('maguire')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'maguire.settings')
app = Celery('maguire')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| bsd-3-clause | Python |
920209c1bb25ca59161414b285551a08da44a7b2 | Add sponsors to admin | patrick91/pycon,patrick91/pycon | backend/sponsors/admin.py | backend/sponsors/admin.py | from django.contrib import admin
from .models import Sponsor, SponsorLevel
@admin.register(SponsorLevel)
class SponsorLevelAdmin(admin.ModelAdmin):
list_display = ("name", "conference")
@admin.register(Sponsor)
class SponsorAdmin(admin.ModelAdmin):
pass
| # from django.contrib import admin
# from .models import Post
# @admin.register(Post)
# class PostAdmin(admin.ModelAdmin):
# list_display = ("title", "published", "author")
| mit | Python |
ca674b743b6d48593f45d999335ae893cf2a90d6 | Add github and facebook oauth credentials. | klen/Flask-Foundation,klen/fquest,klen/tweetchi | base/config/production.py | base/config/production.py | " Production settings must be here. "
from .core import *
from os import path as op
SECRET_KEY = 'SecretKeyForSessionSigning'
ADMINS = frozenset([MAIL_USERNAME])
# flask.ext.collect
# -----------------
COLLECT_STATIC_ROOT = op.join(op.dirname(ROOTDIR), 'static')
# auth.oauth
# ----------
OAUTH_TWITTER = dict(
consumer_key='750sRyKzvdGPJjPd96yfgw',
consumer_secret='UGcyjDCUOb1q44w1nUk8FA7aXxvwwj1BCbiFvYYI',
)
OAUTH_FACEBOOK = dict(
consumer_key='413457268707622',
consumer_secret='48e9be9f4e8abccd3fb916a3f646dd3f',
)
OAUTH_GITHUB = dict(
consumer_key='8bdb217c5df1c20fe632',
consumer_secret='a3aa972b2e66e3fac488b4544d55eda2aa2768b6',
)
# dealer
DEALER_PARAMS = dict(
backends=('git', 'mercurial', 'simple', 'null')
)
# pymode:lint_ignore=W0614,W404
| " Production settings must be here. "
from .core import *
from os import path as op
SECRET_KEY = 'SecretKeyForSessionSigning'
ADMINS = frozenset([MAIL_USERNAME])
# flask.ext.collect
# -----------------
COLLECT_STATIC_ROOT = op.join(op.dirname(ROOTDIR), 'static')
# auth.oauth
# ----------
OAUTH_TWITTER = dict(
# flask-base-template app
consumer_key='ydcXz2pWyePfc3MX3nxJw',
consumer_secret='Pt1t2PjzKu8vsX5ixbFKu5gNEAekYrbpJrlsQMIwquc'
)
# dealer
DEALER_PARAMS = dict(
backends=('git', 'mercurial', 'simple', 'null')
)
# pymode:lint_ignore=W0614,W404
| bsd-3-clause | Python |
436495835604d58b5c188bb0151e88d11e8e5de2 | fix httprpc_soap_basic.py | arskom/spyne,martijnvermaat/rpclib,martijnvermaat/rpclib,arskom/spyne,arskom/spyne,martijnvermaat/rpclib | src/rpclib/test/interop/server/httprpc_soap_basic.py | src/rpclib/test/interop/server/httprpc_soap_basic.py | #!/usr/bin/env python
#
# rpclib - Copyright (C) Rpclib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('rpclib.protocol.soap')
logger.setLevel(logging.DEBUG)
from rpclib.application import Application
from rpclib.test.interop.server._service import services
from rpclib.protocol.soap import Soap11
from rpclib.protocol.http import HttpRpc
from rpclib.interface.wsdl import Wsdl11
httprpc_soap_application = Application(services, Wsdl11, HttpRpc, Soap11,
tns='rpclib.test.interop.server.httprpc.soap')
from rpclib.server.wsgi import WsgiApplication
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
from wsgiref.validate import validator
wsgi_application = WsgiApplication(httprpc_soap_application)
server = make_server('0.0.0.0', 9756, validator(wsgi_application))
logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', 9756))
logger.info('WSDL is at: /?wsdl')
server.serve_forever()
except ImportError:
print "Error: example server code requires Python >= 2.5"
| #!/usr/bin/env python
#
# rpclib - Copyright (C) Rpclib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('rpclib.protocol.soap')
logger.setLevel(logging.DEBUG)
from rpclib.application import Application
from rpclib.test.interop.server._service import services
from rpclib.protocol.soap import Soap11
from rpclib.protocol.http import HttpRpc
from rpclib.interface.wsdl import Wsdl11
httprpc_soap_application = Application(services, Wsdl11, HttpRpc, Soap11,
tns='rpclib.test.interop.server.httprpc.soap')
from rpclib.server import wsgi
if __name__ == '__main__':
try:
from wsgiref.simple_server import make_server
from wsgiref.validate import validator
wsgi_application = wsgi.Application(httprpc_soap_application)
server = make_server('0.0.0.0', 9756, validator(wsgi_application))
logger.info('Starting interop server at %s:%s.' % ('0.0.0.0', 9756))
logger.info('WSDL is at: /?wsdl')
server.serve_forever()
except ImportError:
print "Error: example server code requires Python >= 2.5"
| lgpl-2.1 | Python |
70332af1d948c882372c91264d776d73b0bcfb29 | Bump version | thombashi/sqliteschema | sqliteschema/__version__.py | sqliteschema/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.15.4"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.15.3"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
c7b6e04c9cc2c218ef816619320d1dc17420ae4c | Bump version | thombashi/sqliteschema | sqliteschema/__version__.py | sqliteschema/__version__.py | __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "1.2.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "1.2.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
931e88af93a0ad9f234be0db0056610037f7a289 | improve print msg | tanghaibao/goatools,tanghaibao/goatools | tests/test_nb_relationships_change_dcnt_values.py | tests/test_nb_relationships_change_dcnt_values.py | #!/usr/bin/env python
"""Test notebook code"""
from goatools.base import get_godag
from goatools.gosubdag.gosubdag import GoSubDag
from goatools.gosubdag.plot.gosubdag_plot import GoSubDagPlot
def test_nb():
"""Test notebook code"""
godag = get_godag("go-basic.obo", optional_attrs={'relationship'})
go_leafs = set(o.item_id for o in godag.values() if not o.children)
virion = 'GO:0019012'
gosubdag_r0 = GoSubDag(go_leafs, godag)
nt_virion = gosubdag_r0.go2nt[virion]
print(nt_virion)
print('r0 THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
gosubdag_r1 = GoSubDag(go_leafs, godag, relationships=True)
nt_virion = gosubdag_r1.go2nt[virion]
print(nt_virion)
print('r1 THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
gosubdag_partof = GoSubDag(go_leafs, godag, relationships={'part_of'})
nt_virion = gosubdag_partof.go2nt[virion]
print(nt_virion)
print('THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
virion_descendants = gosubdag_partof.rcntobj.go2descendants[virion]
print('{N} descendants of virion were found'.format(N=len(virion_descendants)))
# Limit plot of descendants to get a smaller plot
virion_capsid_fiber = {'GO:0098033', 'GO:0098032'}
gosubdag_partof.prt_goids(virion_capsid_fiber,
'{NS} {GO} dcnt({dcnt}) D-{depth:02} {GO_name}')
# Limit plot size by choosing just two virion descendants
# Get a subset containing only a couple virion descendants and their ancestors
pltdag = GoSubDag(virion_capsid_fiber, godag, relationships={'part_of'})
pltobj = GoSubDagPlot(pltdag)
pltobj.plt_dag('virion_capsid_fiber.png')
if __name__ == '__main__':
test_nb()
| #!/usr/bin/env python
"""Test notebook code"""
from goatools.base import get_godag
from goatools.gosubdag.gosubdag import GoSubDag
from goatools.gosubdag.plot.gosubdag_plot import GoSubDagPlot
def test_nb():
"""Test notebook code"""
godag = get_godag("go-basic.obo", optional_attrs={'relationship'})
go_leafs = set(o.item_id for o in godag.values() if not o.children)
virion = 'GO:0019012'
gosubdag_r0 = GoSubDag(go_leafs, godag)
nt_virion = gosubdag_r0.go2nt[virion]
print(nt_virion)
print('THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
gosubdag_r1 = GoSubDag(go_leafs, godag, relationships=True)
nt_virion = gosubdag_r1.go2nt[virion]
print(nt_virion)
print('THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
gosubdag_partof = GoSubDag(go_leafs, godag, relationships={'part_of'})
nt_virion = gosubdag_partof.go2nt[virion]
print(nt_virion)
print('THE VALUE OF dcnt IS: {dcnt}'.format(dcnt=nt_virion.dcnt))
virion_descendants = gosubdag_partof.rcntobj.go2descendants[virion]
print('{N} descendants of virion were found'.format(N=len(virion_descendants)))
# Limit plot of descendants to get a smaller plot
virion_capsid_fiber = {'GO:0098033', 'GO:0098032'}
gosubdag_partof.prt_goids(virion_capsid_fiber,
'{NS} {GO} dcnt({dcnt}) D-{depth:02} {GO_name}')
# Limit plot size by choosing just two virion descendants
# Get a subset containing only a couple virion descendants and their ancestors
pltdag = GoSubDag(virion_capsid_fiber, godag, relationships={'part_of'})
pltobj = GoSubDagPlot(pltdag)
pltobj.plt_dag('virion_capsid_fiber.png')
if __name__ == '__main__':
test_nb()
| bsd-2-clause | Python |
b24de4ee1e641194b378b99462ec1db1d5c4ad2c | Remove unnecessary TODO. | praekelt/airtime-service | airtime_service/service.py | airtime_service/service.py | from twisted.application import strports
from twisted.internet import reactor
from twisted.python import usage
from twisted.web import server
from .api import AirtimeServiceApp
DEFAULT_PORT = '8080'
class Options(usage.Options):
"""Command line args when run as a twistd plugin"""
optParameters = [["port", "p", DEFAULT_PORT,
"Port number for airtime-service to listen on"],
["database-connection-string", "d", None,
"Database connection string"]]
def postOptions(self):
if self['database-connection-string'] is None:
raise usage.UsageError(
"--database-connection-string parameter is mandatory.")
def makeService(options):
app = AirtimeServiceApp(
options['database-connection-string'], reactor=reactor)
site = server.Site(app.app.resource())
return strports.service(options['port'], site)
| from twisted.application import strports
from twisted.internet import reactor
from twisted.python import usage
from twisted.web import server
from .api import AirtimeServiceApp
DEFAULT_PORT = '8080'
class Options(usage.Options):
"""Command line args when run as a twistd plugin"""
# TODO other args
optParameters = [["port", "p", DEFAULT_PORT,
"Port number for airtime-service to listen on"],
["database-connection-string", "d", None,
"Database connection string"]]
def postOptions(self):
if self['database-connection-string'] is None:
raise usage.UsageError(
"--database-connection-string parameter is mandatory.")
def makeService(options):
app = AirtimeServiceApp(
options['database-connection-string'], reactor=reactor)
site = server.Site(app.app.resource())
return strports.service(options['port'], site)
| bsd-3-clause | Python |
53dd5ef261d0b7e2e406d0a4a97c4b5d5b691b70 | use mock in testing output function | OrlandoSoto/retirement,OrlandoSoto/retirement,marteki/retirement,OrlandoSoto/retirement,marteki/retirement,niqjohnson/retirement,mistergone/retirement,marteki/retirement,mistergone/retirement,mistergone/retirement,niqjohnson/retirement,niqjohnson/retirement,marteki/retirement | retirement_api/tests/test_models.py | retirement_api/tests/test_models.py | import os
import sys
from retirement_api.models import AgeChoice, Question, Step, Page, Tooltip
import mock
from django.test import TestCase
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.append(BASE_DIR)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
class ViewModels(TestCase):
testcase = AgeChoice.objects.get(age=61)
testquestion = Question.objects.all()[0]
teststep = Step.objects.all()[0]
testpage = Page.objects.all()[0]
testtip = Tooltip.objects.all()[0]
def test_get_subhed(self):
tc = AgeChoice.objects.get(age=61)
self.assertTrue("You've chosen age 61" in tc.get_subhed())
def test_question_slug(self):
question_slugger = Question(title='test q')
question_slugger.save()
self.assertTrue(question_slugger.slug == "test_q")
question_slugger.delete()
def test_question_translist(self):
tlist = self.testquestion.translist()
self.assertTrue(type(tlist) == list)
for term in ['question', 'answer_yes_a', 'answer_no_b', 'answer_unsure_a_subhed']:
self.assertTrue(term in tlist)
def test_quesiton_dump(self):
dumplist = self.testquestion.dump_translation_text()
self.assertTrue(type(dumplist) == list)
# outfile = "/tmp/%s.po" % self.testquestion.slug
# self.testquestion.dump_translation_text(output=True)
# self.assertTrue(os.path.isfile(outfile))
def test_question_dump_mock_output(self):
open_name = '%s.open' % __name__
with mock.patch(open_name, create=True) as mock_open:
mock_open.return_value = mock.MagicMock(spec=file)
self.testquestion.dump_translation_text(output=True)
file_handle = mock_open.return_value.__enter__.return_value
file_handle.write.assert_call_count==5
def test_agechoice_translist(self):
tlist = self.testcase.translist()
self.assertTrue(type(tlist) == list)
def test_step_translist(self):
tlist = self.teststep.translist()
self.assertTrue(type(tlist) == list)
def test_page_translist(self):
tlist = self.testpage.translist()
self.assertTrue(type(tlist) == list)
def test_tooltip_translist(self):
tlist = self.testtip.translist()
self.assertTrue(type(tlist) == list)
| import os
import sys
from retirement_api.models import AgeChoice, Question, Step, Page, Tooltip
import mock
from django.test import TestCase
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.append(BASE_DIR)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
class ViewModels(TestCase):
testcase = AgeChoice.objects.get(age=61)
testquestion = Question.objects.all()[0]
teststep = Step.objects.all()[0]
testpage = Page.objects.all()[0]
testtip = Tooltip.objects.all()[0]
def test_get_subhed(self):
tc = AgeChoice.objects.get(age=61)
self.assertTrue("You've chosen age 61" in tc.get_subhed())
def test_question_slug(self):
question_slugger = Question(title='test q')
question_slugger.save()
self.assertTrue(question_slugger.slug == "test_q")
question_slugger.delete()
def test_question_translist(self):
tlist = self.testquestion.translist()
self.assertTrue(type(tlist) == list)
for term in ['question', 'answer_yes_a', 'answer_no_b', 'answer_unsure_a_subhed']:
self.assertTrue(term in tlist)
def test_quesiton_dump(self):
dumplist = self.testquestion.dump_translation_text()
self.assertTrue(type(dumplist) == list)
outfile = "/tmp/%s.po" % self.testquestion.slug
self.testquestion.dump_translation_text(output=True)
self.assertTrue(os.path.isfile(outfile))
def test_agechoice_translist(self):
tlist = self.testcase.translist()
self.assertTrue(type(tlist) == list)
def test_step_translist(self):
tlist = self.teststep.translist()
self.assertTrue(type(tlist) == list)
def test_page_translist(self):
tlist = self.testpage.translist()
self.assertTrue(type(tlist) == list)
def test_tooltip_translist(self):
tlist = self.testtip.translist()
self.assertTrue(type(tlist) == list)
| cc0-1.0 | Python |
a1dce3d7ab710106bf3bfaaa01536efebb17c44e | Make sqlite wait longer for locked files | zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto | server/src/voodoo/dbutil.py | server/src/voodoo/dbutil.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import os
def generate_getconn(engine, user, password, host, dbname):
kwargs = {}
if engine == 'mysql':
# If using mysql, choose among MySQLdb or pymysql,
# Trying to load MySQLdb, and if it fails, trying
# to load and register pymysql
try:
import MySQLdb
assert MySQLdb is not None # It can never be: just avoid pyflakes warnings
except ImportError:
import pymysql_sa
pymysql_sa.make_default_mysql_dialect()
# In the case of MySQL, we need to activate this flag
kwargs['client_flag'] = 2
elif engine == 'sqlite':
# By default, sqlite uses a timeout of 5 seconds. Given the
# concurrency levels that WebLab-Deusto might achieve with
# multiple users in a queue, this might not be enough. We
# increase it to a minute and a half to avoid problems with
# multiple concurrent users
kwargs['timeout'] = 90
# Then load the sqlalchemy dialect. In order to do the
# equivalent to:
#
# from sqlalchemy.dialects.mysql import base
# dbi = base.dialect.dbapi()
#
# We import the module itself (sqlalchemy.dialects.mysql)
import sqlalchemy.dialects as dialects
__import__('sqlalchemy.dialects.%s' % engine)
# And once imported, we take the base.dialect.dbapi
dbi = getattr(dialects, engine).base.dialect.dbapi()
if engine == 'sqlite':
def getconn_sqlite():
return dbi.connect(database = get_sqlite_dbname(dbname), **kwargs)
getconn = getconn_sqlite
else:
def getconn_else():
return dbi.connect(user = user, passwd = password, host = host, db = dbname, **kwargs)
getconn = getconn_else
return getconn
def get_sqlite_dbname(dbname):
upper_dir = os.sep.join(('..', 'db', '%s.db' % dbname))
if os.path.exists(upper_dir):
return upper_dir
upper_upper_dir = os.sep.join(('..', upper_dir))
if os.path.exists(upper_upper_dir):
return upper_upper_dir
raise Exception("Could not find %s. Did you run deploy.py?" % dbname)
def get_table_kwargs():
return {'mysql_engine' : 'InnoDB'}
| #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import os
def generate_getconn(engine, user, password, host, dbname):
kwargs = {}
if engine == 'mysql':
# If using mysql, choose among MySQLdb or pymysql,
# Trying to load MySQLdb, and if it fails, trying
# to load and register pymysql
try:
import MySQLdb
assert MySQLdb is not None # It can never be: just avoid pyflakes warnings
except ImportError:
import pymysql_sa
pymysql_sa.make_default_mysql_dialect()
# In the case of MySQL, we need to activate this flag
kwargs['client_flag'] = 2
# Then load the sqlalchemy dialect. In order to do the
# equivalent to:
#
# from sqlalchemy.dialects.mysql import base
# dbi = base.dialect.dbapi()
#
# We import the module itself (sqlalchemy.dialects.mysql)
import sqlalchemy.dialects as dialects
__import__('sqlalchemy.dialects.%s' % engine)
# And once imported, we take the base.dialect.dbapi
dbi = getattr(dialects, engine).base.dialect.dbapi()
if engine == 'sqlite':
def getconn_sqlite():
return dbi.connect(database = get_sqlite_dbname(dbname), **kwargs)
getconn = getconn_sqlite
else:
def getconn_else():
return dbi.connect(user = user, passwd = password, host = host, db = dbname, **kwargs)
getconn = getconn_else
return getconn
def get_sqlite_dbname(dbname):
upper_dir = os.sep.join(('..', 'db', '%s.db' % dbname))
if os.path.exists(upper_dir):
return upper_dir
upper_upper_dir = os.sep.join(('..', upper_dir))
if os.path.exists(upper_upper_dir):
return upper_upper_dir
raise Exception("Could not find %s. Did you run deploy.py?" % dbname)
def get_table_kwargs():
return {'mysql_engine' : 'InnoDB'}
| bsd-2-clause | Python |
5ffc9dfe861db660018dec42b5145c7ed026a3d0 | add find_git_project_root | Woile/commitizen,Woile/commitizen | commitizen/git.py | commitizen/git.py | import os
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import Optional, List
from commitizen import cmd
def tag(tag: str):
c = cmd.run(f"git tag {tag}")
return c
def commit(message: str, args=""):
f = NamedTemporaryFile("wb", delete=False)
f.write(message.encode("utf-8"))
f.close()
c = cmd.run(f"git commit {args} -F {f.name}")
os.unlink(f.name)
return c
def get_commits(start: str, end: str = "HEAD", from_beginning: bool = False) -> list:
c = cmd.run(f"git log --pretty=format:%s%n%b {start}...{end}")
if from_beginning:
c = cmd.run(f"git log --pretty=format:%s%n%b {end}")
if not c.out:
return []
return c.out.split("\n")
def tag_exist(tag: str) -> bool:
c = cmd.run(f"git tag --list {tag}")
return tag in c.out
def is_staging_clean() -> bool:
"""Check if staing is clean"""
c = cmd.run("git diff --no-ext-diff --name-only")
c_cached = cmd.run("git diff --no-ext-diff --cached --name-only")
return not (bool(c.out) or bool(c_cached.out))
def get_latest_tag() -> Optional[str]:
c = cmd.run("git describe --abbrev=0 --tags")
if c.err:
return None
return c.out.strip()
def get_all_tags() -> Optional[List[str]]:
c = cmd.run("git tag --list")
if c.err:
return []
return [tag.strip() for tag in c.out.split("\n") if tag.strip()]
def find_git_project_root() -> Path:
c = cmd.run("git rev-parse --show-toplevel")
return Path(c.out)
| import os
from tempfile import NamedTemporaryFile
from typing import Optional, List
from commitizen import cmd
def tag(tag: str):
c = cmd.run(f"git tag {tag}")
return c
def commit(message: str, args=""):
f = NamedTemporaryFile("wb", delete=False)
f.write(message.encode("utf-8"))
f.close()
c = cmd.run(f"git commit {args} -F {f.name}")
os.unlink(f.name)
return c
def get_commits(start: str, end: str = "HEAD", from_beginning: bool = False) -> list:
c = cmd.run(f"git log --pretty=format:%s%n%b {start}...{end}")
if from_beginning:
c = cmd.run(f"git log --pretty=format:%s%n%b {end}")
if not c.out:
return []
return c.out.split("\n")
def tag_exist(tag: str) -> bool:
c = cmd.run(f"git tag --list {tag}")
return tag in c.out
def is_staging_clean() -> bool:
"""Check if staing is clean"""
c = cmd.run("git diff --no-ext-diff --name-only")
c_cached = cmd.run("git diff --no-ext-diff --cached --name-only")
return not (bool(c.out) or bool(c_cached.out))
def get_latest_tag() -> Optional[str]:
c = cmd.run("git describe --abbrev=0 --tags")
if c.err:
return None
return c.out.strip()
def get_all_tags() -> Optional[List[str]]:
c = cmd.run("git tag --list")
if c.err:
return []
return [tag.strip() for tag in c.out.split("\n") if tag.strip()]
| mit | Python |
ccead03e366a2e50c235ee68b999d6fd83354be3 | Reduce log level | rhettg/BlueOx,rhettg/Ziggy | ziggy/__init__.py | ziggy/__init__.py | # -*- coding: utf-8 -*-
"""
Ziggy
~~~~~~~~
:copyright: (c) 2012 by Rhett Garber
:license: ISC, see LICENSE for more details.
"""
__title__ = 'ziggy'
__version__ = '0.0.1'
__build__ = 0
__author__ = 'Rhett Garber'
__license__ = 'ISC'
__copyright__ = 'Copyright 2012 Rhett Garber'
import logging
from . import utils
from . import network
from .context import Context, set, append, add
from . import context as _context_mod
from .errors import Error
from .timer import timeit
log = logging.getLogger(__name__)
def configure(host, port, recorder=None):
"""Initialize ziggy
This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will
be silently dropped.
Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or
to the specified recorder function
"""
global _record_function
if recorder:
context._recorder_function = recorder
elif host and port:
network.init(host, port)
context._recorder_function = network.send
else:
log.info("Empty ziggy configuration")
context._recorder_function = None
| # -*- coding: utf-8 -*-
"""
Ziggy
~~~~~~~~
:copyright: (c) 2012 by Rhett Garber
:license: ISC, see LICENSE for more details.
"""
__title__ = 'ziggy'
__version__ = '0.0.1'
__build__ = 0
__author__ = 'Rhett Garber'
__license__ = 'ISC'
__copyright__ = 'Copyright 2012 Rhett Garber'
import logging
from . import utils
from . import network
from .context import Context, set, append, add
from . import context as _context_mod
from .errors import Error
from .timer import timeit
log = logging.getLogger(__name__)
def configure(host, port, recorder=None):
"""Initialize ziggy
This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will
be silently dropped.
Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or
to the specified recorder function
"""
global _record_function
if recorder:
context._recorder_function = recorder
elif host and port:
network.init(host, port)
context._recorder_function = network.send
else:
log.warning("Empty ziggy configuration")
context._recorder_function = None
| isc | Python |
5b4a64baf0261d32bffb4471cd2f51a14a38f075 | Fix a bug | feng-zhe/ZheQuant-brain-python | zq_calc/mv_avg.py | zq_calc/mv_avg.py | from zq_db.mongodb import get_recent_stock_data
from zq_gen.str import cmd_str2dic
def mv_avg(cmd_str):
'''Calculate the top stocks ranked by moving average
Args:
cmd_str: The command string
-d: days range used when calculating moving average
-n: number of the stocks returned
e.g: "-d 20 -n 5"
Returns:
A list of dicts representing the top n stocks ranked by moving average
Raises:
N/A
'''
cmd_dict = cmd_str2dic(cmd_str)
days = int(cmd_dict['-d'])
num = int(cmd_dict['-n'])
rst = []
data = get_recent_stock_data(days) # the data is a list of lists each of which represents docs of one stock
for docs in data:
price_now = docs[0]['close_price']
code = docs[0]['code']
sum = 0
for doc in docs: # average price calculation includes the current price
sum += doc['close_price']
avg = sum/len(docs)
diff = avg - price_now # currently use average minus current price
rst.append({'code': code, 'diff': diff})
rst = sorted(rst, key=lambda k: k['diff'], reverse=True)
return rst[0:num]
| from zq_db.mongodb import get_recent_stock_data
from zq_gen.str import cmd_str2dic
def mv_avg(cmd_str):
'''Calculate the top stocks ranked by moving average
Args:
cmd_str: The command string
-d: days range used when calculating moving average
-n: number of the stocks returned
e.g: "-d 20 -n 5"
Returns:
A list of dicts representing the top n stocks ranked by moving average
Raises:
N/A
'''
cmd_dict = cmd_str2dic(cmd_str)
days = int(cmd_dict['-d'])
num = int(cmd_dict['-n'])
rst = []
data = get_recent_stock_data(days) # the data is a list of lists each of which represents docs of one stock
for docs in data:
price_now = docs[0]['close_price']
code = docs[0]['code']
i = 1
sum = 0
docs_len = len(docs)
while i<docs_len:
sum += docs[i]['close_price']
avg = sum/docs_len
diff = avg - price_now
rst.append({'code': code, 'diff': diff})
rst = sorted(rst, key=lambda k: k['diff'], reverse=True)
return rst[0:num]
| apache-2.0 | Python |
eab182efe540f8ace0d2558b8df2ebff1fd0d81a | disable scheduler when download files | revir/Chrombot-server | DownloadThread.py | DownloadThread.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import threading, urllib, os
from datas import gData
from SUtils import logger
class DownloadThread(threading.Thread):
"""Thread to download files"""
def __init__(self):
self.__name = 'DownloadThread'
threading.Thread.__init__(self, name=self.__name)
self.__namespace = None
def emitToClient(self, api, *args):
if self.__namespace:
self.__namespace.emit(api, *args)
def run(self):
while True:
data = gData.downloadQueue.get()
self.__namespace = data.get('namespace')
self.__beforeDownload()
self.download(data)
self.__afterDownload()
gData.downloadQueue.task_done()
def __beforeDownload(self):
logger.info('download begin...')
def __afterDownload(self):
logger.info('download finished!')
def download(self, data):
def scheduler(itemCount, itemSize, fileSize):
percent = 100.0 * itemCount * itemSize / fileSize
if percent > 100:
percent = 100
logger.debug('percent: %.2f%%' % percent)
savedir = os.path.expanduser(data.get('savedir')) or os.path.expanduser('~')
if not os.path.exists(savedir):
os.makedirs(savedir)
savename = data.get('savename')
if data.get('url') and savename:
st = os.path.join(savedir, savename)
logger.info('download, url: '+data['url']+' saveat: '+st)
urllib.urlretrieve(data['url'], st)
if(os.path.exists(st)):
return st
return False
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import threading, urllib, os
from datas import gData
from SUtils import logger
class DownloadThread(threading.Thread):
"""Thread to download files"""
def __init__(self):
self.__name = 'DownloadThread'
threading.Thread.__init__(self, name=self.__name)
self.__namespace = None
def emitToClient(self, api, *args):
if self.__namespace:
self.__namespace.emit(api, *args)
def run(self):
while True:
data = gData.downloadQueue.get()
self.__namespace = data.get('namespace')
self.__beforeDownload()
self.download(data)
self.__afterDownload()
gData.downloadQueue.task_done()
def __beforeDownload(self):
logger.info('download begin...')
def __afterDownload(self):
logger.info('download finished!')
def download(self, data):
def scheduler(itemCount, itemSize, fileSize):
percent = 100.0 * itemCount * itemSize / fileSize
if percent > 100:
percent = 100
logger.debug('percent: %.2f%%' % percent)
savedir = os.path.expanduser(data.get('savedir')) or os.path.expanduser(data.get('savedir'))
if not os.path.exists(savedir):
os.makedirs(savedir)
savename = data.get('savename')
if data.get('url') and savename:
st = os.path.join(savedir, savename)
urllib.urlretrieve(data['url'], st, scheduler)
if(os.path.exists(st)):
return st
return False
| apache-2.0 | Python |
3b5eb8dc66872e9217e52b72ef9bb39aadfd0197 | Correct name of test for element#getElementRect | gotcha/selenium,MCGallaspy/selenium,MCGallaspy/selenium,oddui/selenium,MeetMe/selenium,SevInf/IEDriver,lilredindy/selenium,MeetMe/selenium,jabbrwcky/selenium,houchj/selenium,aluedeke/chromedriver,AutomatedTester/selenium,rrussell39/selenium,Jarob22/selenium,sebady/selenium,tbeadle/selenium,pulkitsinghal/selenium,amikey/selenium,soundcloud/selenium,davehunt/selenium,sag-enorman/selenium,bayandin/selenium,thanhpete/selenium,tkurnosova/selenium,valfirst/selenium,MCGallaspy/selenium,rovner/selenium,asolntsev/selenium,zenefits/selenium,gregerrag/selenium,jabbrwcky/selenium,juangj/selenium,wambat/selenium,DrMarcII/selenium,kalyanjvn1/selenium,Tom-Trumper/selenium,jsakamoto/selenium,dandv/selenium,alb-i986/selenium,gotcha/selenium,aluedeke/chromedriver,krosenvold/selenium,petruc/selenium,SevInf/IEDriver,mojwang/selenium,SeleniumHQ/selenium,markodolancic/selenium,chrisblock/selenium,blackboarddd/selenium,dbo/selenium,orange-tv-blagnac/selenium,tarlabs/selenium,arunsingh/selenium,alb-i986/selenium,alb-i986/selenium,krmahadevan/selenium,HtmlUnit/selenium,denis-vilyuzhanin/selenium-fastview,juangj/selenium,TikhomirovSergey/selenium,Dude-X/selenium,Tom-Trumper/selenium,carsonmcdonald/selenium,p0deje/selenium,sri85/selenium,slongwang/selenium,GorK-ChO/selenium,Sravyaksr/selenium,eric-stanley/selenium,vveliev/selenium,p0deje/selenium,5hawnknight/selenium,Tom-Trumper/selenium,skurochkin/selenium,anshumanchatterji/selenium,pulkitsinghal/selenium,joshbruning/selenium,rovner/selenium,sankha93/selenium,rrussell39/selenium,sevaseva/selenium,quoideneuf/selenium,dcjohnson1989/selenium,sankha93/selenium,sevaseva/selenium,carlosroh/selenium,dibagga/selenium,Appdynamics/selenium,houchj/selenium,skurochkin/selenium,TheBlackTuxCorp/selenium,sevaseva/selenium,meksh/selenium,amikey/selenium,o-schneider/selenium,houchj/selenium,bayandin/selenium,lrowe/selenium,orange-tv-blagnac/selenium,yukaReal/selenium,asolntsev/selenium,Sravyaksr/selenium,titusfortner/selenium,5hawnknight/selenium,isaksky/selenium,bartolkaruza/selenium,joshmgrant/selenium,tarlabs/selenium,actmd/selenium,isaksky/selenium,Herst/selenium,xsyntrex/selenium,wambat/selenium,JosephCastro/selenium,lilredindy/selenium,sri85/selenium,customcommander/selenium,Appdynamics/selenium,jsakamoto/selenium,yukaReal/selenium,dandv/selenium,Tom-Trumper/selenium,alb-i986/selenium,joshuaduffy/selenium,twalpole/selenium,carsonmcdonald/selenium,s2oBCN/selenium,BlackSmith/selenium,bmannix/selenium,dbo/selenium,clavery/selenium,tkurnosova/selenium,sankha93/selenium,temyers/selenium,gregerrag/selenium,dkentw/selenium,wambat/selenium,carsonmcdonald/selenium,5hawnknight/selenium,dbo/selenium,rrussell39/selenium,actmd/selenium,twalpole/selenium,JosephCastro/selenium,aluedeke/chromedriver,bmannix/selenium,mach6/selenium,twalpole/selenium,customcommander/selenium,bayandin/selenium,actmd/selenium,gregerrag/selenium,alexec/selenium,bartolkaruza/selenium,HtmlUnit/selenium,minhthuanit/selenium,doungni/selenium,xmhubj/selenium,Jarob22/selenium,temyers/selenium,lmtierney/selenium,tkurnosova/selenium,mach6/selenium,o-schneider/selenium,gabrielsimas/selenium,pulkitsinghal/selenium,slongwang/selenium,MeetMe/selenium,s2oBCN/selenium,krmahadevan/selenium,doungni/selenium,temyers/selenium,kalyanjvn1/selenium,knorrium/selenium,gabrielsimas/selenium,isaksky/selenium,isaksky/selenium,lmtierney/selenium,kalyanjvn1/selenium,tarlabs/selenium,blueyed/selenium,yukaReal/selenium,gotcha/selenium,lukeis/selenium,meksh/selenium,MCGallaspy/selenium,joshbruning/selenium,petruc/selenium,asolntsev/selenium,sebady/selenium,asashour/selenium,slongwang/selenium,sag-enorman/selenium,joshmgrant/selenium,valfirst/selenium,chrsmithdemos/selenium,mach6/selenium,blackboarddd/selenium,lrowe/selenium,dkentw/selenium,chrisblock/selenium,amikey/selenium,amar-sharma/selenium,titusfortner/selenium,gemini-testing/selenium,twalpole/selenium,temyers/selenium,soundcloud/selenium,krmahadevan/selenium,misttechnologies/selenium,carsonmcdonald/selenium,anshumanchatterji/selenium,clavery/selenium,rplevka/selenium,sebady/selenium,clavery/selenium,jsakamoto/selenium,gurayinan/selenium,alexec/selenium,blueyed/selenium,lukeis/selenium,customcommander/selenium,thanhpete/selenium,soundcloud/selenium,s2oBCN/selenium,gabrielsimas/selenium,orange-tv-blagnac/selenium,gorlemik/selenium,sri85/selenium,minhthuanit/selenium,s2oBCN/selenium,minhthuanit/selenium,krosenvold/selenium,manuelpirez/selenium,TheBlackTuxCorp/selenium,petruc/selenium,carlosroh/selenium,oddui/selenium,Dude-X/selenium,Jarob22/selenium,denis-vilyuzhanin/selenium-fastview,mojwang/selenium,gabrielsimas/selenium,rrussell39/selenium,SeleniumHQ/selenium,sag-enorman/selenium,SouWilliams/selenium,dibagga/selenium,JosephCastro/selenium,dimacus/selenium,sankha93/selenium,Dude-X/selenium,kalyanjvn1/selenium,joshuaduffy/selenium,blackboarddd/selenium,tkurnosova/selenium,lmtierney/selenium,chrsmithdemos/selenium,gotcha/selenium,carsonmcdonald/selenium,manuelpirez/selenium,wambat/selenium,gurayinan/selenium,manuelpirez/selenium,markodolancic/selenium,sankha93/selenium,pulkitsinghal/selenium,davehunt/selenium,gorlemik/selenium,titusfortner/selenium,xsyntrex/selenium,sri85/selenium,meksh/selenium,blueyed/selenium,skurochkin/selenium,Dude-X/selenium,zenefits/selenium,thanhpete/selenium,juangj/selenium,chrsmithdemos/selenium,orange-tv-blagnac/selenium,Tom-Trumper/selenium,lukeis/selenium,slongwang/selenium,tbeadle/selenium,arunsingh/selenium,Herst/selenium,RamaraoDonta/ramarao-clone,yukaReal/selenium,anshumanchatterji/selenium,xmhubj/selenium,uchida/selenium,carlosroh/selenium,tkurnosova/selenium,slongwang/selenium,tbeadle/selenium,Ardesco/selenium,s2oBCN/selenium,Dude-X/selenium,dkentw/selenium,aluedeke/chromedriver,gurayinan/selenium,thanhpete/selenium,GorK-ChO/selenium,lilredindy/selenium,eric-stanley/selenium,markodolancic/selenium,sebady/selenium,rplevka/selenium,JosephCastro/selenium,dkentw/selenium,carlosroh/selenium,s2oBCN/selenium,thanhpete/selenium,telefonicaid/selenium,o-schneider/selenium,manuelpirez/selenium,jabbrwcky/selenium,krosenvold/selenium,Appdynamics/selenium,isaksky/selenium,juangj/selenium,wambat/selenium,blackboarddd/selenium,sevaseva/selenium,manuelpirez/selenium,meksh/selenium,blackboarddd/selenium,SeleniumHQ/selenium,minhthuanit/selenium,asashour/selenium,manuelpirez/selenium,krosenvold/selenium,eric-stanley/selenium,thanhpete/selenium,eric-stanley/selenium,zenefits/selenium,rplevka/selenium,rrussell39/selenium,jerome-jacob/selenium,pulkitsinghal/selenium,telefonicaid/selenium,mestihudson/selenium,Dude-X/selenium,bartolkaruza/selenium,quoideneuf/selenium,lmtierney/selenium,gotcha/selenium,bartolkaruza/selenium,DrMarcII/selenium,Jarob22/selenium,krosenvold/selenium,orange-tv-blagnac/selenium,isaksky/selenium,i17c/selenium,HtmlUnit/selenium,mach6/selenium,Jarob22/selenium,quoideneuf/selenium,AutomatedTester/selenium,lukeis/selenium,s2oBCN/selenium,krmahadevan/selenium,GorK-ChO/selenium,gorlemik/selenium,joshmgrant/selenium,bartolkaruza/selenium,misttechnologies/selenium,dcjohnson1989/selenium,o-schneider/selenium,amikey/selenium,chrsmithdemos/selenium,knorrium/selenium,5hawnknight/selenium,xmhubj/selenium,orange-tv-blagnac/selenium,mestihudson/selenium,SouWilliams/selenium,GorK-ChO/selenium,Jarob22/selenium,gemini-testing/selenium,customcommander/selenium,Appdynamics/selenium,customcommander/selenium,rplevka/selenium,gurayinan/selenium,xmhubj/selenium,quoideneuf/selenium,TikhomirovSergey/selenium,meksh/selenium,actmd/selenium,titusfortner/selenium,mojwang/selenium,xmhubj/selenium,asolntsev/selenium,jerome-jacob/selenium,Ardesco/selenium,joshmgrant/selenium,dibagga/selenium,sebady/selenium,asashour/selenium,skurochkin/selenium,krmahadevan/selenium,uchida/selenium,gabrielsimas/selenium,dbo/selenium,markodolancic/selenium,dkentw/selenium,SeleniumHQ/selenium,Dude-X/selenium,dimacus/selenium,DrMarcII/selenium,twalpole/selenium,vveliev/selenium,denis-vilyuzhanin/selenium-fastview,AutomatedTester/selenium,Sravyaksr/selenium,AutomatedTester/selenium,krosenvold/selenium,minhthuanit/selenium,TheBlackTuxCorp/selenium,bmannix/selenium,dbo/selenium,lrowe/selenium,Herst/selenium,joshbruning/selenium,mojwang/selenium,temyers/selenium,SeleniumHQ/selenium,Appdynamics/selenium,zenefits/selenium,SouWilliams/selenium,anshumanchatterji/selenium,arunsingh/selenium,misttechnologies/selenium,Ardesco/selenium,HtmlUnit/selenium,telefonicaid/selenium,soundcloud/selenium,clavery/selenium,lmtierney/selenium,mach6/selenium,blueyed/selenium,asolntsev/selenium,gabrielsimas/selenium,carlosroh/selenium,rrussell39/selenium,bayandin/selenium,Tom-Trumper/selenium,chrsmithdemos/selenium,bmannix/selenium,manuelpirez/selenium,SeleniumHQ/selenium,gorlemik/selenium,yukaReal/selenium,bmannix/selenium,skurochkin/selenium,xmhubj/selenium,Dude-X/selenium,RamaraoDonta/ramarao-clone,tbeadle/selenium,asashour/selenium,krmahadevan/selenium,rrussell39/selenium,alexec/selenium,GorK-ChO/selenium,p0deje/selenium,sag-enorman/selenium,lmtierney/selenium,telefonicaid/selenium,dandv/selenium,anshumanchatterji/selenium,AutomatedTester/selenium,markodolancic/selenium,twalpole/selenium,jabbrwcky/selenium,rovner/selenium,gabrielsimas/selenium,carlosroh/selenium,dibagga/selenium,bmannix/selenium,valfirst/selenium,telefonicaid/selenium,o-schneider/selenium,uchida/selenium,lrowe/selenium,dkentw/selenium,houchj/selenium,kalyanjvn1/selenium,minhthuanit/selenium,Appdynamics/selenium,arunsingh/selenium,sri85/selenium,sag-enorman/selenium,doungni/selenium,lmtierney/selenium,pulkitsinghal/selenium,zenefits/selenium,amar-sharma/selenium,RamaraoDonta/ramarao-clone,knorrium/selenium,arunsingh/selenium,sebady/selenium,Dude-X/selenium,SevInf/IEDriver,dcjohnson1989/selenium,aluedeke/chromedriver,JosephCastro/selenium,AutomatedTester/selenium,titusfortner/selenium,valfirst/selenium,gotcha/selenium,lukeis/selenium,gurayinan/selenium,bartolkaruza/selenium,MCGallaspy/selenium,joshmgrant/selenium,alexec/selenium,gorlemik/selenium,temyers/selenium,stupidnetizen/selenium,gemini-testing/selenium,amar-sharma/selenium,gurayinan/selenium,chrisblock/selenium,asashour/selenium,houchj/selenium,manuelpirez/selenium,houchj/selenium,uchida/selenium,dandv/selenium,denis-vilyuzhanin/selenium-fastview,rplevka/selenium,asashour/selenium,joshmgrant/selenium,petruc/selenium,DrMarcII/selenium,oddui/selenium,chrsmithdemos/selenium,alb-i986/selenium,anshumanchatterji/selenium,stupidnetizen/selenium,kalyanjvn1/selenium,joshuaduffy/selenium,mestihudson/selenium,sri85/selenium,bmannix/selenium,lilredindy/selenium,Ardesco/selenium,oddui/selenium,sevaseva/selenium,DrMarcII/selenium,jsakamoto/selenium,alexec/selenium,gorlemik/selenium,aluedeke/chromedriver,amar-sharma/selenium,5hawnknight/selenium,i17c/selenium,mach6/selenium,oddui/selenium,krmahadevan/selenium,dcjohnson1989/selenium,misttechnologies/selenium,rplevka/selenium,dandv/selenium,soundcloud/selenium,aluedeke/chromedriver,carsonmcdonald/selenium,lilredindy/selenium,TheBlackTuxCorp/selenium,stupidnetizen/selenium,Jarob22/selenium,sevaseva/selenium,amar-sharma/selenium,bartolkaruza/selenium,knorrium/selenium,amar-sharma/selenium,slongwang/selenium,gemini-testing/selenium,customcommander/selenium,minhthuanit/selenium,gotcha/selenium,dcjohnson1989/selenium,lilredindy/selenium,SeleniumHQ/selenium,telefonicaid/selenium,TikhomirovSergey/selenium,vveliev/selenium,juangj/selenium,dibagga/selenium,tbeadle/selenium,petruc/selenium,gemini-testing/selenium,Herst/selenium,wambat/selenium,petruc/selenium,TheBlackTuxCorp/selenium,tkurnosova/selenium,misttechnologies/selenium,rplevka/selenium,MeetMe/selenium,SevInf/IEDriver,jerome-jacob/selenium,lmtierney/selenium,clavery/selenium,titusfortner/selenium,lrowe/selenium,Sravyaksr/selenium,carsonmcdonald/selenium,joshuaduffy/selenium,joshmgrant/selenium,amikey/selenium,carsonmcdonald/selenium,RamaraoDonta/ramarao-clone,oddui/selenium,Appdynamics/selenium,aluedeke/chromedriver,markodolancic/selenium,amikey/selenium,o-schneider/selenium,lrowe/selenium,rrussell39/selenium,blueyed/selenium,blackboarddd/selenium,TikhomirovSergey/selenium,joshuaduffy/selenium,p0deje/selenium,Appdynamics/selenium,juangj/selenium,uchida/selenium,Ardesco/selenium,GorK-ChO/selenium,clavery/selenium,bmannix/selenium,chrisblock/selenium,bayandin/selenium,sri85/selenium,carlosroh/selenium,skurochkin/selenium,joshmgrant/selenium,sri85/selenium,yukaReal/selenium,mojwang/selenium,dimacus/selenium,dkentw/selenium,gorlemik/selenium,gemini-testing/selenium,soundcloud/selenium,davehunt/selenium,kalyanjvn1/selenium,arunsingh/selenium,Tom-Trumper/selenium,temyers/selenium,MCGallaspy/selenium,jerome-jacob/selenium,joshbruning/selenium,gemini-testing/selenium,sebady/selenium,mestihudson/selenium,jabbrwcky/selenium,slongwang/selenium,knorrium/selenium,misttechnologies/selenium,telefonicaid/selenium,Herst/selenium,lukeis/selenium,gorlemik/selenium,rovner/selenium,bartolkaruza/selenium,o-schneider/selenium,stupidnetizen/selenium,rovner/selenium,BlackSmith/selenium,xsyntrex/selenium,doungni/selenium,MCGallaspy/selenium,zenefits/selenium,davehunt/selenium,tarlabs/selenium,i17c/selenium,GorK-ChO/selenium,doungni/selenium,HtmlUnit/selenium,gotcha/selenium,i17c/selenium,twalpole/selenium,twalpole/selenium,lmtierney/selenium,markodolancic/selenium,Tom-Trumper/selenium,DrMarcII/selenium,lukeis/selenium,amikey/selenium,SouWilliams/selenium,chrsmithdemos/selenium,gotcha/selenium,yukaReal/selenium,xsyntrex/selenium,quoideneuf/selenium,xsyntrex/selenium,lrowe/selenium,alexec/selenium,amikey/selenium,zenefits/selenium,AutomatedTester/selenium,oddui/selenium,gregerrag/selenium,JosephCastro/selenium,tarlabs/selenium,dandv/selenium,Herst/selenium,mach6/selenium,tbeadle/selenium,pulkitsinghal/selenium,customcommander/selenium,eric-stanley/selenium,carsonmcdonald/selenium,BlackSmith/selenium,Jarob22/selenium,krmahadevan/selenium,AutomatedTester/selenium,blueyed/selenium,SouWilliams/selenium,gorlemik/selenium,bartolkaruza/selenium,chrsmithdemos/selenium,xmhubj/selenium,wambat/selenium,orange-tv-blagnac/selenium,bayandin/selenium,clavery/selenium,tkurnosova/selenium,Ardesco/selenium,DrMarcII/selenium,jabbrwcky/selenium,Herst/selenium,SeleniumHQ/selenium,doungni/selenium,gregerrag/selenium,jerome-jacob/selenium,actmd/selenium,tkurnosova/selenium,MeetMe/selenium,RamaraoDonta/ramarao-clone,mojwang/selenium,juangj/selenium,uchida/selenium,SevInf/IEDriver,chrsmithdemos/selenium,HtmlUnit/selenium,5hawnknight/selenium,valfirst/selenium,sri85/selenium,lilredindy/selenium,chrisblock/selenium,houchj/selenium,dimacus/selenium,i17c/selenium,MeetMe/selenium,AutomatedTester/selenium,yukaReal/selenium,anshumanchatterji/selenium,temyers/selenium,jerome-jacob/selenium,BlackSmith/selenium,Sravyaksr/selenium,krmahadevan/selenium,sag-enorman/selenium,BlackSmith/selenium,dimacus/selenium,asashour/selenium,TikhomirovSergey/selenium,knorrium/selenium,eric-stanley/selenium,sankha93/selenium,o-schneider/selenium,rovner/selenium,i17c/selenium,oddui/selenium,DrMarcII/selenium,asolntsev/selenium,thanhpete/selenium,uchida/selenium,valfirst/selenium,markodolancic/selenium,sag-enorman/selenium,Herst/selenium,JosephCastro/selenium,blackboarddd/selenium,gurayinan/selenium,slongwang/selenium,mestihudson/selenium,jerome-jacob/selenium,arunsingh/selenium,meksh/selenium,valfirst/selenium,thanhpete/selenium,RamaraoDonta/ramarao-clone,jabbrwcky/selenium,telefonicaid/selenium,SeleniumHQ/selenium,lilredindy/selenium,dibagga/selenium,jerome-jacob/selenium,Sravyaksr/selenium,TikhomirovSergey/selenium,p0deje/selenium,sankha93/selenium,manuelpirez/selenium,BlackSmith/selenium,krosenvold/selenium,TikhomirovSergey/selenium,SevInf/IEDriver,i17c/selenium,customcommander/selenium,stupidnetizen/selenium,joshbruning/selenium,lrowe/selenium,SouWilliams/selenium,bayandin/selenium,titusfortner/selenium,alexec/selenium,zenefits/selenium,valfirst/selenium,i17c/selenium,Jarob22/selenium,HtmlUnit/selenium,actmd/selenium,oddui/selenium,dimacus/selenium,dbo/selenium,slongwang/selenium,vveliev/selenium,dkentw/selenium,Appdynamics/selenium,dimacus/selenium,denis-vilyuzhanin/selenium-fastview,p0deje/selenium,gregerrag/selenium,lrowe/selenium,isaksky/selenium,jerome-jacob/selenium,quoideneuf/selenium,rovner/selenium,zenefits/selenium,joshuaduffy/selenium,mestihudson/selenium,doungni/selenium,actmd/selenium,pulkitsinghal/selenium,tkurnosova/selenium,actmd/selenium,alb-i986/selenium,HtmlUnit/selenium,meksh/selenium,xsyntrex/selenium,joshmgrant/selenium,sankha93/selenium,jsakamoto/selenium,eric-stanley/selenium,alb-i986/selenium,stupidnetizen/selenium,o-schneider/selenium,joshmgrant/selenium,orange-tv-blagnac/selenium,tbeadle/selenium,GorK-ChO/selenium,eric-stanley/selenium,gabrielsimas/selenium,tarlabs/selenium,xsyntrex/selenium,sag-enorman/selenium,dbo/selenium,doungni/selenium,houchj/selenium,TikhomirovSergey/selenium,bayandin/selenium,jsakamoto/selenium,minhthuanit/selenium,asashour/selenium,p0deje/selenium,skurochkin/selenium,Sravyaksr/selenium,gabrielsimas/selenium,mojwang/selenium,HtmlUnit/selenium,SevInf/IEDriver,gregerrag/selenium,tbeadle/selenium,Herst/selenium,TheBlackTuxCorp/selenium,gurayinan/selenium,titusfortner/selenium,davehunt/selenium,valfirst/selenium,gregerrag/selenium,carlosroh/selenium,telefonicaid/selenium,titusfortner/selenium,mach6/selenium,orange-tv-blagnac/selenium,mestihudson/selenium,TheBlackTuxCorp/selenium,valfirst/selenium,misttechnologies/selenium,p0deje/selenium,sebady/selenium,BlackSmith/selenium,mojwang/selenium,mestihudson/selenium,bayandin/selenium,DrMarcII/selenium,customcommander/selenium,aluedeke/chromedriver,5hawnknight/selenium,juangj/selenium,blueyed/selenium,davehunt/selenium,blueyed/selenium,actmd/selenium,knorrium/selenium,denis-vilyuzhanin/selenium-fastview,rrussell39/selenium,isaksky/selenium,soundcloud/selenium,chrisblock/selenium,JosephCastro/selenium,Ardesco/selenium,knorrium/selenium,petruc/selenium,chrisblock/selenium,SevInf/IEDriver,dandv/selenium,joshuaduffy/selenium,anshumanchatterji/selenium,valfirst/selenium,tbeadle/selenium,stupidnetizen/selenium,TheBlackTuxCorp/selenium,kalyanjvn1/selenium,dibagga/selenium,soundcloud/selenium,yukaReal/selenium,RamaraoDonta/ramarao-clone,chrisblock/selenium,clavery/selenium,sag-enorman/selenium,s2oBCN/selenium,arunsingh/selenium,soundcloud/selenium,wambat/selenium,asolntsev/selenium,eric-stanley/selenium,dcjohnson1989/selenium,amar-sharma/selenium,lukeis/selenium,5hawnknight/selenium,dcjohnson1989/selenium,doungni/selenium,quoideneuf/selenium,stupidnetizen/selenium,dbo/selenium,dcjohnson1989/selenium,SouWilliams/selenium,knorrium/selenium,vveliev/selenium,SevInf/IEDriver,quoideneuf/selenium,xmhubj/selenium,titusfortner/selenium,clavery/selenium,carlosroh/selenium,SeleniumHQ/selenium,mojwang/selenium,denis-vilyuzhanin/selenium-fastview,blackboarddd/selenium,joshbruning/selenium,dibagga/selenium,sevaseva/selenium,misttechnologies/selenium,arunsingh/selenium,xsyntrex/selenium,jsakamoto/selenium,quoideneuf/selenium,amar-sharma/selenium,dcjohnson1989/selenium,RamaraoDonta/ramarao-clone,denis-vilyuzhanin/selenium-fastview,blackboarddd/selenium,alb-i986/selenium,davehunt/selenium,lukeis/selenium,TheBlackTuxCorp/selenium,mestihudson/selenium,p0deje/selenium,mach6/selenium,alb-i986/selenium,HtmlUnit/selenium,joshuaduffy/selenium,alexec/selenium,lilredindy/selenium,JosephCastro/selenium,joshmgrant/selenium,dkentw/selenium,BlackSmith/selenium,dbo/selenium,isaksky/selenium,amikey/selenium,MeetMe/selenium,jabbrwcky/selenium,misttechnologies/selenium,MeetMe/selenium,s2oBCN/selenium,sevaseva/selenium,joshbruning/selenium,pulkitsinghal/selenium,wambat/selenium,MCGallaspy/selenium,vveliev/selenium,kalyanjvn1/selenium,dandv/selenium,amar-sharma/selenium,asashour/selenium,asolntsev/selenium,dimacus/selenium,SeleniumHQ/selenium,vveliev/selenium,asolntsev/selenium,temyers/selenium,i17c/selenium,titusfortner/selenium,thanhpete/selenium,houchj/selenium,rplevka/selenium,krosenvold/selenium,xsyntrex/selenium,SouWilliams/selenium,MeetMe/selenium,meksh/selenium,markodolancic/selenium,petruc/selenium,dandv/selenium,chrisblock/selenium,gurayinan/selenium,alexec/selenium,uchida/selenium,Sravyaksr/selenium,denis-vilyuzhanin/selenium-fastview,krosenvold/selenium,BlackSmith/selenium,bmannix/selenium,anshumanchatterji/selenium,joshbruning/selenium,jabbrwcky/selenium,Sravyaksr/selenium,meksh/selenium,Tom-Trumper/selenium,twalpole/selenium,minhthuanit/selenium,petruc/selenium,sankha93/selenium,MCGallaspy/selenium,joshuaduffy/selenium,sevaseva/selenium,jsakamoto/selenium,skurochkin/selenium,skurochkin/selenium,TikhomirovSergey/selenium,stupidnetizen/selenium,dimacus/selenium,Ardesco/selenium,joshbruning/selenium,vveliev/selenium,5hawnknight/selenium,blueyed/selenium,davehunt/selenium,tarlabs/selenium,SouWilliams/selenium,xmhubj/selenium,RamaraoDonta/ramarao-clone,rovner/selenium,vveliev/selenium,rovner/selenium,tarlabs/selenium,tarlabs/selenium,dibagga/selenium,GorK-ChO/selenium,uchida/selenium,gemini-testing/selenium,gregerrag/selenium,davehunt/selenium,juangj/selenium,jsakamoto/selenium,Ardesco/selenium,gemini-testing/selenium,sebady/selenium,rplevka/selenium | py/test/selenium/webdriver/common/rendered_webelement_tests.py | py/test/selenium/webdriver/common/rendered_webelement_tests.py | # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class RenderedWebElementTests(unittest.TestCase):
@pytest.mark.ignore_chrome
def testShouldPickUpStyleOfAnElement(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-parent")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(0, 128, 0, 1)", backgroundColour)
element = self.driver.find_element(by=By.ID, value="red-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(255, 0, 0, 1)", backgroundColour)
@pytest.mark.ignore_chrome
def testShouldAllowInheritedStylesToBeUsed(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("transparent", backgroundColour)
def testShouldCorrectlyIdentifyThatAnElementHasWidth(self):
self._loadPage("xhtmlTest")
shrinko = self.driver.find_element(by=By.ID, value="linkId")
size = shrinko.size
self.assertTrue(size["width"] > 0, "Width expected to be greater than 0")
self.assertTrue(size["height"] > 0, "Height expected to be greater than 0")
def testShouldBeAbleToDetermineTheRectOfAnElement(self):
self._loadPage("xhtmlTest")
element = self.driver.find_element(By.ID, "username")
rect = element.rect
self.assertTrue(rect["x"] > 0, "Element should not be in the top left")
self.assertTrue(rect["y"] > 0, "Element should not be in the top left")
self.assertTrue(rect["width"] > 0, "Width expected to be greater than 0")
self.assertTrue(rect["height"] > 0, "Height expected to be greater than 0")
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| # Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pytest
from selenium.webdriver.common.by import By
class RenderedWebElementTests(unittest.TestCase):
@pytest.mark.ignore_chrome
def testShouldPickUpStyleOfAnElement(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-parent")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(0, 128, 0, 1)", backgroundColour)
element = self.driver.find_element(by=By.ID, value="red-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("rgba(255, 0, 0, 1)", backgroundColour)
@pytest.mark.ignore_chrome
def testShouldAllowInheritedStylesToBeUsed(self):
self._loadPage("javascriptPage")
element = self.driver.find_element(by=By.ID, value="green-item")
backgroundColour = element.value_of_css_property("background-color")
self.assertEqual("transparent", backgroundColour)
def testShouldCorrectlyIdentifyThatAnElementHasWidth(self):
self._loadPage("xhtmlTest")
shrinko = self.driver.find_element(by=By.ID, value="linkId")
size = shrinko.size
self.assertTrue(size["width"] > 0, "Width expected to be greater than 0")
self.assertTrue(size["height"] > 0, "Height expected to be greater than 0")
def testShouldBeAbleToDetermineTheLocationOfAnElement(self):
self._loadPage("xhtmlTest")
element = self.driver.find_element(By.ID, "username")
rect = element.rect
self.assertTrue(rect["x"] > 0, "Element should not be in the top left")
self.assertTrue(rect["y"] > 0, "Element should not be in the top left")
self.assertTrue(rect["width"] > 0, "Width expected to be greater than 0")
self.assertTrue(rect["height"] > 0, "Height expected to be greater than 0")
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| apache-2.0 | Python |
33c3f5a39821b28c70a08ebe2742375364e70dc1 | fix small bug with timezones | hackupc/backend,hackupc/backend,hackupc/backend,hackupc/backend | applications/management/commands/expire_applications.py | applications/management/commands/expire_applications.py | from datetime import timedelta
from django.core import mail
from django.core.management.base import BaseCommand
from django.utils import timezone
from applications import models, emails
class Command(BaseCommand):
help = 'Checks invites that have expired and sends reminders 24 before'
def handle(self, *args, **options):
fourdaysago = timezone.now() - timedelta(days=4)
self.stdout.write('Checking reminders...')
reminders = models.Application.objects.filter(
status_update_date__lte=fourdaysago, status=models.APP_INVITED)
self.stdout.write('Checking reminders...%s found' % reminders.count())
self.stdout.write('Sending reminders...')
msgs = []
for app in reminders:
app.last_reminder()
msgs.append(emails.create_lastreminder_email(app))
connection = mail.get_connection()
connection.send_messages(msgs)
self.stdout.write(self.style.SUCCESS(
'Sending reminders... Successfully sent %s reminders' % len(msgs)))
onedayago = timezone.now() - timedelta(days=1)
self.stdout.write('Checking expired...')
expired = models.Application.objects.filter(
status_update_date__lte=onedayago, status=models.APP_LAST_REMIDER)
self.stdout.write('Checking expired...%s found' % expired.count())
self.stdout.write('Setting expired...')
count = len([app.expire() for app in expired])
self.stdout.write(self.style.SUCCESS(
'Setting expired... Successfully expired %s applications' % count))
| from datetime import timedelta
from django.core import mail
from django.core.management.base import BaseCommand
from django.utils.datetime_safe import datetime
from applications import models, emails
class Command(BaseCommand):
help = 'Checks invites that have expired and sends reminders 24 before'
def handle(self, *args, **options):
fourdaysago = datetime.today() - timedelta(days=4)
self.stdout.write('Checking reminders...')
reminders = models.Application.objects.filter(
status_update_date__lte=fourdaysago, status=models.APP_INVITED)
self.stdout.write('Checking reminders...%s found' % reminders.count())
self.stdout.write('Sending reminders...')
msgs = []
for app in reminders:
app.last_reminder()
msgs.append(emails.create_lastreminder_email(app))
connection = mail.get_connection()
connection.send_messages(msgs)
self.stdout.write(self.style.SUCCESS(
'Sending reminders... Successfully sent %s reminders' % len(msgs)))
onedayago = datetime.today() - timedelta(days=1)
self.stdout.write('Checking expired...')
expired = models.Application.objects.filter(
status_update_date__lte=onedayago, status=models.APP_LAST_REMIDER)
self.stdout.write('Checking expired...%s found' % expired.count())
self.stdout.write('Setting expired...')
count = len([app.expire() for app in expired])
self.stdout.write(self.style.SUCCESS(
'Setting expired... Successfully expired %s applications' % count))
| mit | Python |
4f5f073e6aa693117142bc8fff4cd8a71f2da601 | document ipv6 behavior | MalloZup/sumaform,moio/sumaform,MalloZup/sumaform,MalloZup/sumaform,moio/sumaform,moio/sumaform,MalloZup/sumaform | salt/default/set_ip_in_etc_hosts.py | salt/default/set_ip_in_etc_hosts.py | #!/usr/bin/python
import errno
import os
import re
import socket
import subprocess
import sys
if len(sys.argv) != 3:
print("Usage: set_ip_in_etc_hosts.py <HOSTNAME> <DOMAIN>")
sys.exit(1)
_, hostname, domain = sys.argv
fqdn = hostname + "." + domain
def guess_address(fqdn, hostname, socket_type, invalid_prefixes, default):
infos = []
try:
infos += socket.getaddrinfo(fqdn, None, socket_type)
except socket.error:
pass
try:
infos += socket.getaddrinfo(hostname, None, socket_type)
except socket.error:
pass
addresses = [info[4][0] for info in infos]
valid_addresses = filter(lambda s: not re.match(invalid_prefixes, s, re.I), addresses)
if valid_addresses:
return valid_addresses[0]
else:
return default
def update_hosts_file(fqdn, hostname, repl):
with open("/etc/hosts", "r+") as f:
hosts = f.read()
pattern = re.compile("\\n+(.*{0} {1}\\n+)+".format(re.escape(fqdn), re.escape(hostname)), flags=re.M)
new_hosts, n = pattern.subn(repl, hosts)
if n == 0:
new_hosts = hosts + repl
f.seek(0)
f.truncate()
f.write(new_hosts)
update_hosts_file(fqdn, hostname, "")
ipv4 = guess_address(fqdn, hostname, socket.AF_INET, "127\\.0\\.", "127.0.1.1")
# we explicitly exlcude link-local addresses as we currently can't get the interface names
ipv6 = guess_address(fqdn, hostname, socket.AF_INET6, "(::1$)|(fe[89ab][0-f]:)", "# ipv6 address not found for names:")
repl = "\n\n{0} {1} {2}\n{3} {4} {5}\n".format(ipv4, fqdn, hostname, ipv6, fqdn, hostname)
update_hosts_file(fqdn, hostname, repl)
print("/etc/hosts updated.")
| #!/usr/bin/python
import errno
import os
import re
import socket
import subprocess
import sys
if len(sys.argv) != 3:
print("Usage: set_ip_in_etc_hosts.py <HOSTNAME> <DOMAIN>")
sys.exit(1)
_, hostname, domain = sys.argv
fqdn = hostname + "." + domain
def guess_address(fqdn, hostname, socket_type, invalid_prefixes, default):
infos = []
try:
infos += socket.getaddrinfo(fqdn, None, socket_type)
except socket.error:
pass
try:
infos += socket.getaddrinfo(hostname, None, socket_type)
except socket.error:
pass
addresses = [info[4][0] for info in infos]
valid_addresses = filter(lambda s: not re.match(invalid_prefixes, s, re.I), addresses)
if valid_addresses:
return valid_addresses[0]
else:
return default
def update_hosts_file(fqdn, hostname, repl):
with open("/etc/hosts", "r+") as f:
hosts = f.read()
pattern = re.compile("\\n+(.*{0} {1}\\n+)+".format(re.escape(fqdn), re.escape(hostname)), flags=re.M)
new_hosts, n = pattern.subn(repl, hosts)
if n == 0:
new_hosts = hosts + repl
f.seek(0)
f.truncate()
f.write(new_hosts)
update_hosts_file(fqdn, hostname, "")
ipv4 = guess_address(fqdn, hostname, socket.AF_INET, "127\\.0\\.", "127.0.1.1")
ipv6 = guess_address(fqdn, hostname, socket.AF_INET6, "(::1$)|(fe[89ab][0-f]:)", "# ipv6 address not found for names:")
repl = "\n\n{0} {1} {2}\n{3} {4} {5}\n".format(ipv4, fqdn, hostname, ipv6, fqdn, hostname)
update_hosts_file(fqdn, hostname, repl)
print("/etc/hosts updated.")
| bsd-3-clause | Python |
2a073d94fcc9c6c9519e74a0c125d4fbb920885b | Add GNG module. | makism/dyfunconn | dyfunconn/cluster/__init__.py | dyfunconn/cluster/__init__.py | # -*- coding: utf-8 -*-
"""
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .ng import NeuralGas
from .mng import MergeNeuralGas
from .rng import RelationalNeuralGas
from .gng import GrowingNeuralGas
from .som import SOM
from .umatrix import umatrix
__all__ = [
"NeuralGas",
"MergeNeuralGas",
"RelationalNeuralGas",
"GrowingNeuralGas",
"SOM",
"umatrix",
]
| # -*- coding: utf-8 -*-
"""
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .ng import NeuralGas
from .mng import MergeNeuralGas
from .rng import RelationalNeuralGas
from .som import SOM
from .umatrix import umatrix
__all__ = ['NeuralGas',
'MergeNeuralGas',
'RelationalNeuralGas',
'SOM',
'umatrix'
]
| bsd-3-clause | Python |
29db07c8892fc066917654aa3553140d92d8449b | remove unused imports | cuckoobox/cuckoo,cuckoobox/cuckoo,cuckoobox/cuckoo,cuckoobox/cuckoo,rodionovd/cuckoo-osx-analyzer,cuckoobox/cuckoo,rodionovd/cuckoo-osx-analyzer,rodionovd/cuckoo-osx-analyzer | analyzer/darwin/lib/core/filetimes.py | analyzer/darwin/lib/core/filetimes.py | # Copyright (c) 2009, David Buxton <david@gasmark6.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tools to convert between Python datetime instances and Microsoft times.
"""
from calendar import timegm
# http://support.microsoft.com/kb/167296
# How To Convert a UNIX time_t to a Win32 FILETIME or SYSTEMTIME
EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS file time
HUNDREDS_OF_NANOSECONDS = 10000000
def dt_to_filetime(dt, delta_from_utc):
"""Converts a datetime to Microsoft filetime format.
>>> "%.0f" % dt_to_filetime(datetime(2009, 7, 25, 23, 0))
'128930364000000000'
>>> "%.0f" % dt_to_filetime(datetime(1970, 1, 1, 0, 0, tzinfo=utc))
'116444736000000000'
>>> "%.0f" % dt_to_filetime(datetime(1970, 1, 1, 0, 0))
'116444736000000000'
>>> dt_to_filetime(datetime(2009, 7, 25, 23, 0, 0, 100))
128930364000001000
"""
dt += delta_from_utc
ft = EPOCH_AS_FILETIME + (timegm(dt.timetuple()) * HUNDREDS_OF_NANOSECONDS)
return ft + (dt.microsecond * 10)
| # Copyright (c) 2009, David Buxton <david@gasmark6.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tools to convert between Python datetime instances and Microsoft times.
"""
from datetime import datetime, timedelta, tzinfo
from calendar import timegm
# http://support.microsoft.com/kb/167296
# How To Convert a UNIX time_t to a Win32 FILETIME or SYSTEMTIME
EPOCH_AS_FILETIME = 116444736000000000 # January 1, 1970 as MS file time
HUNDREDS_OF_NANOSECONDS = 10000000
def dt_to_filetime(dt, delta_from_utc):
"""Converts a datetime to Microsoft filetime format.
>>> "%.0f" % dt_to_filetime(datetime(2009, 7, 25, 23, 0))
'128930364000000000'
>>> "%.0f" % dt_to_filetime(datetime(1970, 1, 1, 0, 0, tzinfo=utc))
'116444736000000000'
>>> "%.0f" % dt_to_filetime(datetime(1970, 1, 1, 0, 0))
'116444736000000000'
>>> dt_to_filetime(datetime(2009, 7, 25, 23, 0, 0, 100))
128930364000001000
"""
dt += delta_from_utc
ft = EPOCH_AS_FILETIME + (timegm(dt.timetuple()) * HUNDREDS_OF_NANOSECONDS)
return ft + (dt.microsecond * 10)
| mit | Python |
1cc6c0271a70742818662c4c3630084136dd5b14 | Fix redundant second arg | ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner | app/data_model/questionnaire_store.py | app/data_model/questionnaire_store.py | import logging
from app.storage.storage_factory import StorageFactory
from flask import g
import jsonpickle
logger = logging.getLogger(__name__)
class QuestionnaireStore:
def __init__(self, user_id, user_ik):
self.data = {}
if user_id and user_ik:
self.user_id = user_id
self.user_ik = user_ik
else:
raise ValueError("No user_id or user_ik found in session")
self.storage = StorageFactory.get_storage_mechanism()
if self.storage.has_data(self.user_id):
logger.debug("User %s has previous data loading", user_id)
self.data = self.storage.get(self.user_id, self.user_ik)
else:
logger.debug("User %s does not have previous data creating", user_id)
self.save()
def delete(self):
logger.debug("Deleting questionnaire data for %s", self.user_id)
self.data = {}
self.storage.delete(self.user_id)
def save(self):
logger.debug("Saving user data %s for user id %s", self.data, self.user_id)
self.storage.store(data=self.data, user_id=self.user_id, user_ik=self.user_ik)
def encode_metadata(self, metadata):
self.data["METADATA"] = jsonpickle.encode(metadata)
def decode_metadata(self):
if "METADATA" in self.data:
return jsonpickle.decode(self.data["METADATA"])
else:
raise RuntimeError("No metadata for user %s", self.user_id)
def get_questionnaire_store(user_id, user_ik):
# Sets up a single QuestionnaireStore instance throughout app.
store = g.get('_questionnaire_store')
if store is None:
try:
store = g._questionnaire_store = QuestionnaireStore(user_id, user_ik)
except Exception as e:
logger.error("questionnaire_store failed to init", exception=repr(e))
return store
def get_metadata(user):
try:
questionnaire_store = get_questionnaire_store(user.user_id, user.user_ik)
return questionnaire_store.decode_metadata()
except AttributeError:
logger.debug("Anonymous user requesting metadata get instance")
# anonymous user mixin - this happens on the error pages before authentication
return None
| import logging
from app.storage.storage_factory import StorageFactory
from flask import g
import jsonpickle
logger = logging.getLogger(__name__)
class QuestionnaireStore:
def __init__(self, user_id, user_ik):
self.data = {}
if user_id and user_ik:
self.user_id = user_id
self.user_ik = user_ik
else:
raise ValueError("No user_id or user_ik found in session")
self.storage = StorageFactory.get_storage_mechanism()
if self.storage.has_data(self.user_id):
logger.debug("User %s has previous data loading", user_id)
self.data = self.storage.get(self.user_id, self.user_ik)
else:
logger.debug("User %s does not have previous data creating", user_id)
self.save()
def delete(self):
logger.debug("Deleting questionnaire data for %s", self.user_id)
self.data = {}
self.storage.delete(self.user_id)
def save(self):
logger.debug("Saving user data %s for user id %s", self.data, self.user_id)
self.storage.store(data=self.data, user_id=self.user_id, user_ik=self.user_ik)
def encode_metadata(self, metadata):
self.data["METADATA"] = jsonpickle.encode(metadata)
def decode_metadata(self):
if "METADATA" in self.data:
return jsonpickle.decode(self.data["METADATA"])
else:
raise RuntimeError("No metadata for user %s", self.user_id)
def get_questionnaire_store(user_id, user_ik):
# Sets up a single QuestionnaireStore instance throughout app.
store = g.get('_questionnaire_store', None)
if store is None:
try:
store = g._questionnaire_store = QuestionnaireStore(user_id, user_ik)
except Exception as e:
logger.error("questionnaire_store failed to init", exception=repr(e))
return store
def get_metadata(user):
try:
questionnaire_store = get_questionnaire_store(user.user_id, user.user_ik)
return questionnaire_store.decode_metadata()
except AttributeError:
logger.debug("Anonymous user requesting metadata get instance")
# anonymous user mixin - this happens on the error pages before authentication
return None
| mit | Python |
f84c09a979547a159d39922a75941873f4ecf041 | add display capability | pletzer/icqsol,gregvonkuster/icqsol,pletzer/icqsol,pletzer/icqsol,gregvonkuster/icqsol,gregvonkuster/icqsol,gregvonkuster/icqsol,pletzer/icqsol | examples/colorSurfaceField.py | examples/colorSurfaceField.py | #!/usr/bin/env python
"""
Color a surface field
"""
import argparse
import time
import os
import re
import sys
from icqsol.shapes.icqShapeManager import ShapeManager
from icqsol import util
# time stamp
tid = re.sub(r'\.', '', str(time.time()))
parser = argparse.ArgumentParser(description='Color surface field')
parser.add_argument('--input', dest='input', default='',
help='VTK input file')
parser.add_argument('--colormap', dest='colormap', default='hot',
help='Colormap ("hot", "cold", or "blackbody")')
parser.add_argument('--name', dest='name', default='',
help='Set the name of the field')
parser.add_argument('--component', dest='component', type=int, default=0,
help='Set the component of the field')
parser.add_argument('--ascii', dest='ascii', action='store_true',
help='Save data in ASCII format (default is binary)')
parser.add_argument('--display', dest='display', action='store_true',
help='Display colored geometry')
parser.add_argument('--output', dest='output',
default='colorSurfaceField-{0}.vtk'.format(tid),
help='VTK Output file.')
args = parser.parse_args()
if not args.input:
print 'ERROR: must specify input file: --input <file>'
sys.exit(3)
if not os.path.exists(args.input):
print 'ERROR: file {} does not exist'.format(args.input)
sys.exit(2)
file_format = util.getFileFormat(args.input)
if file_format != util.VTK_FORMAT:
print 'ERROR: file {} must be VTK format'.format(args.input)
sys.exit(2)
vtk_dataset_type = util.getVtkDatasetType(args.input)
if vtk_dataset_type not in util.VTK_DATASET_TYPES:
print 'ERROR: invalid VTK dataset type {}'.format(vtk_dataset_type)
sys.exit(2)
shape_mgr = ShapeManager(file_format=util.VTK_FORMAT, vtk_dataset_type=vtk_dataset_type)
pDataInput = shape_mgr.loadAsVtkPolyData(args.input)
pDataColored = shape_mgr.colorSurfaceField(pDataInput, args.colormap,
field_name=args.name,
field_component=args.component)
if args.display:
shape_mgr.showVtkPolyData(pDataColored)
if args.output:
if args.ascii:
file_type = util.ASCII
else:
file_type = util.BINARY
shape_mgr.saveVtkPolyData(vtk_poly_data=pDataColored, file_name=args.output, file_type=file_type)
| #!/usr/bin/env python
"""
Color a surface field
"""
import argparse
import time
import os
import re
import sys
from icqsol.shapes.icqShapeManager import ShapeManager
from icqsol import util
# time stamp
tid = re.sub(r'\.', '', str(time.time()))
parser = argparse.ArgumentParser(description='Color surface field')
parser.add_argument('--input', dest='input', default='',
help='VTK input file')
parser.add_argument('--colormap', dest='colormap', default='hot',
help='Colormap ("hot", "cold", or "blackbody")')
parser.add_argument('--name', dest='name', default='',
help='Set the name of the field')
parser.add_argument('--component', dest='component', type=int, default=0,
help='Set the component of the field')
parser.add_argument('--ascii', dest='ascii', action='store_true',
help='Save data in ASCII format (default is binary)')
parser.add_argument('--output', dest='output',
default='colorSurfaceField-{0}.vtk'.format(tid),
help='VTK Output file.')
args = parser.parse_args()
if not args.input:
print 'ERROR: must specify input file: --input <file>'
sys.exit(3)
if not os.path.exists(args.input):
print 'ERROR: file {} does not exist'.format(args.input)
sys.exit(2)
file_format = util.getFileFormat(args.input)
if file_format != util.VTK_FORMAT:
print 'ERROR: file {} must be VTK format'.format(args.input)
sys.exit(2)
vtk_dataset_type = util.getVtkDatasetType(args.input)
if vtk_dataset_type not in util.VTK_DATASET_TYPES:
print 'ERROR: invalid VTK dataset type {}'.format(vtk_dataset_type)
sys.exit(2)
shape_mgr = ShapeManager(file_format=util.VTK_FORMAT, vtk_dataset_type=vtk_dataset_type)
pDataInput = shape_mgr.loadAsVtkPolyData(args.input)
pDataColored = shape_mgr.colorSurfaceField(pDataInput, args.colormap,
field_name=args.name,
field_component=args.component)
if args.output:
if args.ascii:
file_type = util.ASCII
else:
file_type = util.BINARY
shape_mgr.saveVtkPolyData(vtk_poly_data=pDataColored, file_name=args.output, file_type=file_type)
| unknown | Python |
482749c384bbbef0bea8fba25b8ac0f94a7e56a5 | Add missing dot and correctly sort imports in soc.logic.helper.notifications module. | SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange,SRabbelier/Melange | app/soc/logic/helper/notifications.py | app/soc/logic/helper/notifications.py | #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for sending out notifications.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
import os
from google.appengine.api import users
from django.utils.translation import ugettext_lazy
from soc.logic import mail_dispatcher
from soc.logic.models import user as user_logic
from soc.views.helper import redirects
DEF_INVITATION_FMT = ugettext_lazy(
"Invitation to become a %(role)s for %(group)s")
def sendInviteNotification(entity):
"""Sends out an invite notification to the user the request is for.
Args:
entity : A request containing the information needed to create the message
"""
# get the current user
properties = {'account': users.get_current_user()}
current_user_entity = user_logic.logic.getForFields(properties, unique=True)
# get the user the request is for
properties = {'link_id': entity.link_id }
request_user_entity = user_logic.logic.getForFields(properties, unique=True)
# create the invitation_url
invitation_url = "%(host)s%(index)s" % {
'host' : os.environ['HTTP_HOST'],
'index': redirects.inviteAcceptedRedirect(entity, None)}
# get the group entity
group_entity = entity.scope
messageProperties = {
'to_name': request_user_entity.name,
'sender_name': current_user_entity.name,
'role': entity.role,
'group': group_entity.name,
'invitation_url': invitation_url,
'to': request_user_entity.account.email(),
'sender': current_user_entity.account.email(),
'subject': DEF_INVITATION_FMT % {
'role': entity.role,
'group': group_entity.name
},
}
# send out the message using the default invitation template
mail_dispatcher.sendMailFromTemplate('soc/mail/invitation.html',
messageProperties)
| #!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for sending out notifications
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from google.appengine.api import users
from django.utils.translation import ugettext_lazy
from soc.logic import mail_dispatcher
from soc.logic.models import user as user_logic
from soc.views.helper import redirects
import os
DEF_INVITATION_FMT = ugettext_lazy(
"Invitation to become a %(role)s for %(group)s")
def sendInviteNotification(entity):
"""Sends out an invite notification to the user the request is for.
Args:
entity : A request containing the information needed to create the message
"""
# get the current user
properties = {'account': users.get_current_user()}
current_user_entity = user_logic.logic.getForFields(properties, unique=True)
# get the user the request is for
properties = {'link_id': entity.link_id }
request_user_entity = user_logic.logic.getForFields(properties, unique=True)
# create the invitation_url
invitation_url = "%(host)s%(index)s" % {
'host' : os.environ['HTTP_HOST'],
'index': redirects.inviteAcceptedRedirect(entity, None)}
# get the group entity
group_entity = entity.scope
messageProperties = {
'to_name': request_user_entity.name,
'sender_name': current_user_entity.name,
'role': entity.role,
'group': group_entity.name,
'invitation_url': invitation_url,
'to': request_user_entity.account.email(),
'sender': current_user_entity.account.email(),
'subject': DEF_INVITATION_FMT % {
'role': entity.role,
'group': group_entity.name
},
}
# send out the message using the default invitation template
mail_dispatcher.sendMailFromTemplate('soc/mail/invitation.html',
messageProperties)
| apache-2.0 | Python |
d9e4978912c1c0dfd0c0fa667fede94c70634774 | Move logger into get_events so displays with main script | laurenrevere/osf.io,erinspace/osf.io,acshi/osf.io,acshi/osf.io,acshi/osf.io,icereval/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,mfraezz/osf.io,rdhyee/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,aaxelb/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,erinspace/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,adlius/osf.io,mattclark/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,aaxelb/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,felliott/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,sloria/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,hmoco/osf.io,sloria/osf.io,caneruguz/osf.io,binoculars/osf.io,cwisecarver/osf.io,mluo613/osf.io,hmoco/osf.io,cwisecarver/osf.io,mattclark/osf.io,rdhyee/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,caneruguz/osf.io,Nesiehr/osf.io,mluo613/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,felliott/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,chennan47/osf.io,TomBaxter/osf.io,chrisseto/osf.io,hmoco/osf.io,leb2dg/osf.io,TomBaxter/osf.io,icereval/osf.io,chennan47/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,saradbowman/osf.io,baylee-d/osf.io,caseyrollins/osf.io,felliott/osf.io,erinspace/osf.io,chrisseto/osf.io,alexschiller/osf.io,acshi/osf.io,adlius/osf.io,caneruguz/osf.io,crcresearch/osf.io,adlius/osf.io,laurenrevere/osf.io,leb2dg/osf.io,crcresearch/osf.io,leb2dg/osf.io,adlius/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,binoculars/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,saradbowman/osf.io,rdhyee/osf.io,cslzchen/osf.io,Nesiehr/osf.io,alexschiller/osf.io,binoculars/osf.io,icereval/osf.io,alexschiller/osf.io,aaxelb/osf.io,mfraezz/osf.io,mfraezz/osf.io,mluo613/osf.io,mluo613/osf.io,felliott/osf.io,mluo613/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,pattisdr/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io | scripts/analytics/node_log_count.py | scripts/analytics/node_log_count.py | import time
import logging
import argparse
from modularodm import Q
from datetime import datetime, timedelta
from dateutil.parser import parse
from website.app import init_app
from website.project.model import NodeLog
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def get_events(date):
""" Get all node logs from a given date. Defaults to starting yesterday
to today (both in UTC).
"""
node_log_query = Q('date', 'lte', date) & Q('date', 'gt', date - timedelta(1))
node_logs = NodeLog.find(node_log_query)
node_log_events = []
for node_log in node_logs:
event = {
'keen': {'timestamp': date.isoformat()},
'date': node_log.date.isoformat(),
'action': node_log.action
}
if node_log.user:
event.update({'user_id': node_log.user._id})
node_log_events.append(event)
logger.info('NodeLogs counted. {} NodeLogs.'.format(len(node_log_events)))
return node_log_events
def parse_args():
parser = argparse.ArgumentParser(description='Get node log counts!')
parser.add_argument('-d', '--date', dest='date', required=False)
return parser.parse_args()
def yield_chunked_events(events):
""" The keen API likes events in chunks no bigger than 5000 -
Only yield that many at a time.
"""
for i in range(0, len(events), 5000):
yield events[i:i + 5000]
def main():
""" Run when the script is accessed individually to send all results to keen.
Gathers data and sends events in 5000 piece chunks.
"""
today = datetime.datetime.utcnow().date()
args = parse_args()
date = parse(args.end_date).date() if args.date else today
node_log_events = get_events(date)
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
for chunk in yield_chunked_events(node_log_events):
client.add_events({'node_log_analytics': chunk})
time.sleep(1)
else:
print(node_log_events)
if __name__ == '__main__':
init_app()
main()
| import time
import logging
import argparse
from modularodm import Q
from datetime import datetime, timedelta
from dateutil.parser import parse
from website.app import init_app
from website.project.model import NodeLog
from website.settings import KEEN as keen_settings
from keen.client import KeenClient
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def get_events(date):
""" Get all node logs from a given date. Defaults to starting yesterday
to today (both in UTC).
"""
node_log_query = Q('date', 'lte', date) & Q('date', 'gt', date - timedelta(1))
node_logs = NodeLog.find(node_log_query)
node_log_events = []
for node_log in node_logs:
event = {
'keen': {'timestamp': date.isoformat()},
'date': node_log.date.isoformat(),
'action': node_log.action
}
if node_log.user:
event.update({'user_id': node_log.user._id})
node_log_events.append(event)
return node_log_events
def parse_args():
parser = argparse.ArgumentParser(description='Get node log counts!')
parser.add_argument('-d', '--date', dest='date', required=False)
return parser.parse_args()
def yield_chunked_events(events):
""" The keen API likes events in chunks no bigger than 5000 -
Only yield that many at a time.
"""
for i in range(0, len(events), 5000):
yield events[i:i + 5000]
def main():
""" Run when the script is accessed individually to send all results to keen.
Gathers data and sends events in 5000 piece chunks.
"""
today = datetime.datetime.utcnow().date()
args = parse_args()
date = parse(args.end_date).date() if args.date else today
node_log_events = get_events(date)
keen_project = keen_settings['private']['project_id']
write_key = keen_settings['private']['write_key']
if keen_project and write_key:
client = KeenClient(
project_id=keen_project,
write_key=write_key,
)
for chunk in yield_chunked_events(node_log_events):
client.add_events({'node_log_analytics': chunk})
time.sleep(1)
else:
print(node_log_events)
logger.info('NodeLogs counted. {} NodeLogs.'.format(len(node_log_events)))
if __name__ == '__main__':
init_app()
main()
| apache-2.0 | Python |
2f8ac0d7c22fd316aa30505e0d6732ea81a19a5e | Make possible to set a custom view to the controlcenter | byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter | controlcenter/views.py | controlcenter/views.py | from django.conf.urls import url
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils.decorators import method_decorator
from django.utils.module_loading import import_string
from django.views.generic.base import TemplateView
from . import app_settings
class ControlCenter(object):
def __init__(self, view_class):
self.view_class = view_class
def get_dashboards(self):
klasses = map(import_string, app_settings.DASHBOARDS)
dashboards = [klass(pk=pk) for pk, klass in enumerate(klasses)]
if not dashboards:
raise ImproperlyConfigured('No dashboards found.')
return dashboards
def get_view(self):
dashboards = self.get_dashboards()
return self.view_class.as_view(dashboards=dashboards)
def get_urls(self):
urlpatterns = [
url(r'^(?P<pk>\d+)/$', self.get_view(), name='dashboard'),
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), 'controlcenter', 'controlcenter'
class DashboardView(TemplateView):
dashboards = NotImplemented
template_name = 'controlcenter/dashboard.html'
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(DashboardView, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
pk = int(self.kwargs['pk'])
try:
self.dashboard = self.dashboards[pk]
except IndexError:
raise Http404('Dashboard not found.')
return super(DashboardView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = {
'title': self.dashboard.title,
'dashboard': self.dashboard,
'dashboards': self.dashboards,
'groups': self.dashboard.get_widgets(self.request),
'sharp': app_settings.SHARP,
}
# Admin context
kwargs.update(admin.site.each_context(self.request))
kwargs.update(context)
return super(DashboardView, self).get_context_data(**kwargs)
controlcenter = ControlCenter(view_class=DashboardView)
| from importlib import import_module
from django.conf.urls import url
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils.decorators import method_decorator
from django.views.generic.base import TemplateView
from . import app_settings
class ControlCenter(object):
def get_urls(self):
self.dashboards = []
for index, path in enumerate(app_settings.DASHBOARDS):
pkg, name = path.rsplit('.', 1)
klass = getattr(import_module(pkg), name)
instance = klass(index)
self.dashboards.append(instance)
if not self.dashboards:
raise ImproperlyConfigured('No dashboard found in '
'settings.CONTROLCENTER_DASHBOARDS.')
urlpatterns = [
url(r'^(?P<pk>\d+)/$', dashboard_view, name='dashboard'),
]
return urlpatterns
@property
def urls(self):
# include(arg, namespace=None, app_name=None)
return self.get_urls(), 'controlcenter', 'controlcenter'
controlcenter = ControlCenter()
class DashboardView(TemplateView):
template_name = 'controlcenter/dashboard.html'
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(DashboardView, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
pk = int(self.kwargs['pk'])
try:
self.dashboard = controlcenter.dashboards[pk]
except IndexError:
raise Http404('Dashboard not found.')
return super(DashboardView, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = {
'title': self.dashboard.title,
'dashboard': self.dashboard,
'dashboards': controlcenter.dashboards,
'groups': self.dashboard.get_widgets(self.request),
'sharp': app_settings.SHARP,
}
# Admin context
kwargs.update(admin.site.each_context(self.request))
kwargs.update(context)
return super(DashboardView, self).get_context_data(**kwargs)
dashboard_view = DashboardView.as_view()
| bsd-3-clause | Python |
bbb2871bfba427654d70592f0f4fa0d1806a3fa0 | Fix PEP8 for generate-keyword-tests | GBGamer/rust,AerialX/rust,cllns/rust,richo/rust,pshc/rust,aneeshusa/rust,sae-bom/rust,pelmers/rust,dwillmer/rust,mvdnes/rust,miniupnp/rust,victorvde/rust,andars/rust,avdi/rust,kwantam/rust,mahkoh/rust,mvdnes/rust,gifnksm/rust,AerialX/rust,AerialX/rust-rt-minimal,ruud-v-a/rust,aidancully/rust,jroesch/rust,rohitjoshi/rust,pelmers/rust,graydon/rust,pshc/rust,richo/rust,pshc/rust,nwin/rust,dwillmer/rust,krzysz00/rust,ejjeong/rust,vhbit/rust,ejjeong/rust,pshc/rust,dwillmer/rust,vhbit/rust,gifnksm/rust,mihneadb/rust,zaeleus/rust,mdinger/rust,dwillmer/rust,jroesch/rust,richo/rust,cllns/rust,miniupnp/rust,ruud-v-a/rust,rohitjoshi/rust,graydon/rust,ebfull/rust,bombless/rust,gifnksm/rust,jroesch/rust,ruud-v-a/rust,andars/rust,avdi/rust,reem/rust,mahkoh/rust,mahkoh/rust,ruud-v-a/rust,carols10cents/rust,zaeleus/rust,pelmers/rust,reem/rust,mihneadb/rust,jashank/rust,untitaker/rust,robertg/rust,rprichard/rust,rprichard/rust,l0kod/rust,pelmers/rust,robertg/rust,zachwick/rust,graydon/rust,aepsil0n/rust,cllns/rust,dwillmer/rust,gifnksm/rust,carols10cents/rust,mdinger/rust,GBGamer/rust,aidancully/rust,untitaker/rust,XMPPwocky/rust,cllns/rust,philyoon/rust,gifnksm/rust,avdi/rust,ebfull/rust,pelmers/rust,zubron/rust,XMPPwocky/rust,AerialX/rust-rt-minimal,carols10cents/rust,vhbit/rust,zachwick/rust,aneeshusa/rust,aneeshusa/rust,TheNeikos/rust,omasanori/rust,sae-bom/rust,aidancully/rust,kwantam/rust,XMPPwocky/rust,mvdnes/rust,GBGamer/rust,aepsil0n/rust,dwillmer/rust,ebfull/rust,krzysz00/rust,vhbit/rust,mvdnes/rust,mdinger/rust,l0kod/rust,AerialX/rust,dwillmer/rust,pshc/rust,aidancully/rust,rohitjoshi/rust,nwin/rust,reem/rust,rohitjoshi/rust,dwillmer/rust,krzysz00/rust,krzysz00/rust,zaeleus/rust,mahkoh/rust,reem/rust,rprichard/rust,hauleth/rust,TheNeikos/rust,vhbit/rust,graydon/rust,seanrivera/rust,nwin/rust,rprichard/rust,rohitjoshi/rust,mahkoh/rust,vhbit/rust,miniupnp/rust,bombless/rust,miniupnp/rust,pshc/rust,krzysz00/rust,zubron/rust,miniupnp/rust,mvdnes/rust,philyoon/rust,hauleth/rust,aepsil0n/rust,aepsil0n/rust,graydon/rust,jroesch/rust,untitaker/rust,hauleth/rust,sae-bom/rust,aneeshusa/rust,TheNeikos/rust,mdinger/rust,kwantam/rust,TheNeikos/rust,ruud-v-a/rust,bombless/rust,ebfull/rust,philyoon/rust,seanrivera/rust,seanrivera/rust,ejjeong/rust,AerialX/rust,kwantam/rust,jroesch/rust,miniupnp/rust,zaeleus/rust,AerialX/rust-rt-minimal,aidancully/rust,bombless/rust,richo/rust,mvdnes/rust,aidancully/rust,omasanori/rust,vhbit/rust,ejjeong/rust,seanrivera/rust,mahkoh/rust,ejjeong/rust,aepsil0n/rust,TheNeikos/rust,zaeleus/rust,GBGamer/rust,zachwick/rust,zaeleus/rust,nwin/rust,zubron/rust,GBGamer/rust,TheNeikos/rust,AerialX/rust-rt-minimal,hauleth/rust,victorvde/rust,zubron/rust,philyoon/rust,ebfull/rust,graydon/rust,gifnksm/rust,pshc/rust,hauleth/rust,GBGamer/rust,jashank/rust,l0kod/rust,AerialX/rust-rt-minimal,carols10cents/rust,rprichard/rust,miniupnp/rust,XMPPwocky/rust,KokaKiwi/rust,untitaker/rust,avdi/rust,ebfull/rust,carols10cents/rust,omasanori/rust,mihneadb/rust,philyoon/rust,pelmers/rust,mihneadb/rust,nwin/rust,l0kod/rust,victorvde/rust,mihneadb/rust,robertg/rust,aepsil0n/rust,ejjeong/rust,l0kod/rust,hauleth/rust,avdi/rust,omasanori/rust,nwin/rust,jashank/rust,robertg/rust,KokaKiwi/rust,robertg/rust,XMPPwocky/rust,krzysz00/rust,reem/rust,nwin/rust,nwin/rust,aneeshusa/rust,l0kod/rust,KokaKiwi/rust,andars/rust,richo/rust,zubron/rust,zubron/rust,bombless/rust,mdinger/rust,jroesch/rust,sae-bom/rust,XMPPwocky/rust,victorvde/rust,vhbit/rust,cllns/rust,omasanori/rust,zubron/rust,kwantam/rust,jashank/rust,zachwick/rust,zachwick/rust,seanrivera/rust,untitaker/rust,andars/rust,GBGamer/rust,jroesch/rust,AerialX/rust,untitaker/rust,seanrivera/rust,bombless/rust,victorvde/rust,aneeshusa/rust,KokaKiwi/rust,robertg/rust,zachwick/rust,andars/rust,l0kod/rust,reem/rust,victorvde/rust,sae-bom/rust,mdinger/rust,cllns/rust,kwantam/rust,pshc/rust,jashank/rust,ruud-v-a/rust,miniupnp/rust,l0kod/rust,jroesch/rust,avdi/rust,sae-bom/rust,AerialX/rust,AerialX/rust-rt-minimal,jashank/rust,carols10cents/rust,zubron/rust,jashank/rust,andars/rust,GBGamer/rust,KokaKiwi/rust,rprichard/rust,rohitjoshi/rust,philyoon/rust,KokaKiwi/rust,omasanori/rust,jashank/rust,richo/rust,mihneadb/rust | src/etc/generate-keyword-tests.py | src/etc/generate-keyword-tests.py | #!/usr/bin/env python
#
# Copyright 2013 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
"""
This script takes a list of keywords and generates a testcase, that checks
if using the keyword as identifier fails, for every keyword. The generate
test files are set read-only.
Test for https://github.com/rust-lang/rust/issues/2275
sample usage: src/etc/generate-keyword-tests.py as break
"""
import sys
import os
import datetime
import stat
template = """// Copyright %d The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-keyword-tests.py %s'
fn main() {
let %s = "foo"; //~ error: ident
}
"""
test_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../test/compile-fail')
)
for kw in sys.argv[1:]:
test_file = os.path.join(test_dir, 'keyword-%s-as-identifier.rs' % kw)
# set write permission if file exists, so it can be changed
if os.path.exists(test_file):
os.chmod(test_file, stat.S_IWUSR)
with open(test_file, 'wt') as f:
f.write(template % (datetime.datetime.now().year, kw, kw))
# mark file read-only
os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
| #!/usr/bin/env python
#
# Copyright 2013 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
"""
This script takes a list of keywords and generates a testcase, that checks
if using the keyword as identifier fails, for every keyword. The generate
test files are set read-only.
Test for https://github.com/rust-lang/rust/issues/2275
sample usage: src/etc/generate-keyword-tests.py as break
"""
import sys
import os
import datetime
import stat
template = """// Copyright %d The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This file was auto-generated using 'src/etc/generate-keyword-tests.py %s'
fn main() {
let %s = "foo"; //~ error: ident
}
"""
test_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../test/compile-fail')
)
for kw in sys.argv[1:]:
test_file = os.path.join(test_dir, 'keyword-%s-as-identifier.rs' % kw)
# set write permission if file exists, so it can be changed
if os.path.exists(test_file):
os.chmod(test_file, stat.S_IWUSR)
with open(test_file, 'wt') as f:
f.write(template % (datetime.datetime.now().year, kw, kw))
# mark file read-only
os.chmod(test_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
| apache-2.0 | Python |
ac6c34c13ade272a7b7ae5b587fa97fdd04b161a | Remove unused pipe_client1 from test | transceptor-technology/siridb-server,transceptor-technology/siridb-server,transceptor-technology/siridb-server,transceptor-technology/siridb-server | test/test_pipe_support.py | test/test_pipe_support.py | import os
import asyncio
import functools
import random
import time
from testing import Client
from testing import default_test_setup
from testing import gen_data
from testing import gen_points
from testing import gen_series
from testing import InsertError
from testing import PoolError
from testing import QueryError
from testing import run_test
from testing import Series
from testing import Server
from testing import ServerError
from testing import SiriDB
from testing import TestBase
from testing import UserAuthError
from testing import SiriDBAsyncUnixConnection
PIPE_NAME = '/tmp/siridb_pipe_test.sock'
DATA = {
'series num_float': [
[1471254705, 1.5],
[1471254707, -3.5],
[1471254710, -7.3]],
'series num_integer': [
[1471254705, 5],
[1471254708, -3],
[1471254710, -7]],
'series_log': [
[1471254710, 'log line one'],
[1471254712, 'log line two'],
[1471254714, 'another line (three)'],
[1471254716, 'and yet one more']]
}
if os.path.exists(PIPE_NAME):
os.unlink(PIPE_NAME)
class TestPipeSupport(TestBase):
title = 'Test pipe support object'
@default_test_setup(1, pipe_name=PIPE_NAME)
async def run(self):
pipe_client0 = SiriDBAsyncUnixConnection(PIPE_NAME)
await pipe_client0.connect('iris', 'siri', self.db.dbname)
self.assertEqual(
await pipe_client0.insert(DATA),
{'success_msg': 'Successfully inserted 10 point(s).'})
self.assertAlmostEqual(
await pipe_client0.query('select * from "series num_float"'),
{'series num_float': DATA['series num_float']})
self.assertEqual(
await pipe_client0.query('select * from "series num_integer"'),
{'series num_integer': DATA['series num_integer']})
self.assertEqual(
await pipe_client0.query('select * from "series_log"'),
{'series_log': DATA['series_log']})
pipe_client0.close()
# return False
if __name__ == '__main__':
SiriDB.LOG_LEVEL = 'CRITICAL'
Server.HOLD_TERM = True
Server.MEM_CHECK = True
Server.BUILDTYPE = 'Debug'
run_test(TestPipeSupport())
| import os
import asyncio
import functools
import random
import time
from testing import Client
from testing import default_test_setup
from testing import gen_data
from testing import gen_points
from testing import gen_series
from testing import InsertError
from testing import PoolError
from testing import QueryError
from testing import run_test
from testing import Series
from testing import Server
from testing import ServerError
from testing import SiriDB
from testing import TestBase
from testing import UserAuthError
from testing import SiriDBAsyncUnixConnection
PIPE_NAME = '/tmp/siridb_pipe_test.sock'
DATA = {
'series num_float': [
[1471254705, 1.5],
[1471254707, -3.5],
[1471254710, -7.3]],
'series num_integer': [
[1471254705, 5],
[1471254708, -3],
[1471254710, -7]],
'series_log': [
[1471254710, 'log line one'],
[1471254712, 'log line two'],
[1471254714, 'another line (three)'],
[1471254716, 'and yet one more']]
}
if os.path.exists(PIPE_NAME):
os.unlink(PIPE_NAME)
class TestPipeSupport(TestBase):
title = 'Test pipe support object'
@default_test_setup(1, pipe_name=PIPE_NAME)
async def run(self):
pipe_client0 = SiriDBAsyncUnixConnection(PIPE_NAME)
pipe_client1 = SiriDBAsyncUnixConnection(PIPE_NAME)
await pipe_client0.connect('iris', 'siri', self.db.dbname)
self.assertEqual(
await pipe_client0.insert(DATA),
{'success_msg': 'Successfully inserted 10 point(s).'})
self.assertAlmostEqual(
await pipe_client0.query('select * from "series num_float"'),
{'series num_float': DATA['series num_float']})
self.assertEqual(
await pipe_client0.query('select * from "series num_integer"'),
{'series num_integer': DATA['series num_integer']})
self.assertEqual(
await pipe_client0.query('select * from "series_log"'),
{'series_log': DATA['series_log']})
pipe_client0.close()
# return False
if __name__ == '__main__':
SiriDB.LOG_LEVEL = 'CRITICAL'
Server.HOLD_TERM = True
Server.MEM_CHECK = True
Server.BUILDTYPE = 'Debug'
run_test(TestPipeSupport())
| mit | Python |
ac50044c16e2302e7543923d562cca5ba715e311 | Switch from () to __call__() | masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api | web/impact/impact/v1/events/base_history_event.py | web/impact/impact/v1/events/base_history_event.py | from abc import (
ABCMeta,
abstractmethod,
)
from impact.v1.helpers import (
STRING_FIELD,
)
class BaseHistoryEvent(object):
__metaclass__ = ABCMeta
CLASS_FIELDS = {
"event_type": STRING_FIELD,
"datetime": STRING_FIELD,
"latest_datetime": STRING_FIELD,
"description": STRING_FIELD,
}
def __init__(self):
self.earliest = None
self.latest = None
@classmethod
def all_fields(cls):
result = {}
for base_class in cls.__bases__:
if hasattr(base_class, "all_fields"):
result.update(base_class.all_fields())
if hasattr(cls, "CLASS_FIELDS"):
result.update(cls.CLASS_FIELDS)
return result
@classmethod
def event_type(cls):
return cls.EVENT_TYPE
@abstractmethod
def calc_datetimes(self):
pass # pragma: no cover
def datetime(self):
self._check_date_cache()
return self.earliest
def latest_datetime(self):
self._check_date_cache()
return self.latest
def _check_date_cache(self):
if not self.earliest and hasattr(self, "calc_datetimes"):
self.calc_datetimes()
def description(self):
return None # pragma: no cover
def serialize(self):
result = {}
for key in self.all_fields().keys():
value = getattr(self, key).__call__()
if value is not None:
result[key] = value
return result
| from abc import (
ABCMeta,
abstractmethod,
)
from impact.v1.helpers import (
STRING_FIELD,
)
class BaseHistoryEvent(object):
__metaclass__ = ABCMeta
CLASS_FIELDS = {
"event_type": STRING_FIELD,
"datetime": STRING_FIELD,
"latest_datetime": STRING_FIELD,
"description": STRING_FIELD,
}
def __init__(self):
self.earliest = None
self.latest = None
@classmethod
def all_fields(cls):
result = {}
for base_class in cls.__bases__:
if hasattr(base_class, "all_fields"):
result.update(base_class.all_fields())
if hasattr(cls, "CLASS_FIELDS"):
result.update(cls.CLASS_FIELDS)
return result
@classmethod
def event_type(cls):
return cls.EVENT_TYPE
@abstractmethod
def calc_datetimes(self):
pass # pragma: no cover
def datetime(self):
self._check_date_cache()
return self.earliest
def latest_datetime(self):
self._check_date_cache()
return self.latest
def _check_date_cache(self):
if not self.earliest and hasattr(self, "calc_datetimes"):
self.calc_datetimes()
def description(self):
return None # pragma: no cover
def serialize(self):
result = {}
for key in self.all_fields().keys():
value = getattr(self, key)()
if value is not None:
result[key] = value
return result
| mit | Python |
30e84d34f7d2e75ca9052f1e702cdcdea738fb5e | add ContactRelationship to admin | shebeerki/django-crm,alviandk/django-crm,blag/django-crm,firth/django-crm,shebeerki/django-crm,susheels/django-crm,alviandk/django-crm,Tiam0202/django-crm,blag/django-crm,firth/django-crm,Vegulla/django-crm,susheels/django-crm,shebeerki/django-crm,jcortes0309/django-crm,alviandk/django-crm,sebastien247/django-crm,aviabrams/django-crm,Tiam0202/django-crm,Vegulla/django-crm,Vegulla/django-crm,sebastien247/django-crm,jcortes0309/django-crm,blag/django-crm,aviabrams/django-crm,susheels/django-crm,Tiam0202/django-crm,aviabrams/django-crm,sebastien247/django-crm,jcortes0309/django-crm,firth/django-crm | crm/admin.py | crm/admin.py | # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# $Id: admin.py 433 2009-07-14 04:10:28Z tobias $
# ----------------------------------------------------------------------------
#
# Copyright (C) 2008 Caktus Consulting Group, LLC
#
# This file is part of minibooks.
#
# minibooks is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# minibooks is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with minibooks. If not, see <http://www.gnu.org/licenses/>.
#
from django import forms
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from crm import models as crm
class BusinessTypeAdmin(admin.ModelAdmin):
pass
admin.site.register(crm.BusinessType, BusinessTypeAdmin)
class RelationshipType(admin.ModelAdmin):
list_display = ('name', 'slug',)
admin.site.register(crm.RelationshipType, RelationshipType)
def send_account_activation_email(modeladmin, request, queryset):
selected = request.POST.getlist(admin.ACTION_CHECKBOX_NAME)
selected = ["ids=%d" % pk for pk in selected]
url = reverse('create_registration')
return HttpResponseRedirect("%s?%s" % (
url,
"&".join(selected)
))
class ContactAdmin(admin.ModelAdmin):
search_fields = ('first_name', 'last_name', 'name', 'email')
raw_id_fields = ('user', 'locations')
list_display = ('id', 'type', 'name', 'first_name', 'last_name', 'email')
list_filter = ('type',)
order_by = ('sortname',)
actions = [send_account_activation_email]
admin.site.register(crm.Contact, ContactAdmin)
class LoginRegistrationAdmin(admin.ModelAdmin):
list_display = ('contact', 'date', 'activation_key', 'activated')
raw_id_fields = ('contact',)
list_filter = ('activated', 'date',)
order_by = ('date',)
admin.site.register(crm.LoginRegistration, LoginRegistrationAdmin)
class ContactRelationshipAdmin(admin.ModelAdmin):
list_display = ('id', 'from_contact', 'to_contact', 'start_date',
'end_date')
raw_id_fields = ('from_contact', 'to_contact')
list_filter = ('start_date', 'end_date',)
order_by = ('start_date',)
admin.site.register(crm.ContactRelationship, ContactRelationshipAdmin)
| # -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------
# $Id: admin.py 433 2009-07-14 04:10:28Z tobias $
# ----------------------------------------------------------------------------
#
# Copyright (C) 2008 Caktus Consulting Group, LLC
#
# This file is part of minibooks.
#
# minibooks is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# minibooks is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with minibooks. If not, see <http://www.gnu.org/licenses/>.
#
from django import forms
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from crm import models as crm
class BusinessTypeAdmin(admin.ModelAdmin):
pass
admin.site.register(crm.BusinessType, BusinessTypeAdmin)
class RelationshipType(admin.ModelAdmin):
list_display = ('name', 'slug',)
admin.site.register(crm.RelationshipType, RelationshipType)
def send_account_activation_email(modeladmin, request, queryset):
selected = request.POST.getlist(admin.ACTION_CHECKBOX_NAME)
selected = ["ids=%d" % pk for pk in selected]
url = reverse('create_registration')
return HttpResponseRedirect("%s?%s" % (
url,
"&".join(selected)
))
class ContactAdmin(admin.ModelAdmin):
search_fields = ('first_name', 'last_name', 'name', 'email')
raw_id_fields = ('user', 'locations')
list_display = ('id', 'type', 'name', 'first_name', 'last_name', 'email')
list_filter = ('type',)
order_by = ('sortname',)
actions = [send_account_activation_email]
admin.site.register(crm.Contact, ContactAdmin)
class LoginRegistrationAdmin(admin.ModelAdmin):
list_display = ('contact', 'date', 'activation_key', 'activated')
raw_id_fields = ('contact',)
list_filter = ('activated', 'date',)
order_by = ('date',)
admin.site.register(crm.LoginRegistration, LoginRegistrationAdmin)
| bsd-3-clause | Python |
6722e16aef43f9cfe03e7e76fc578582139721f6 | Split file collecting algorithm to FileFilter | Kuniwak/vint,RianFuro/vint,RianFuro/vint,Kuniwak/vint | vint/linting/env.py | vint/linting/env.py | import os
import os.path
from pathlib import Path
from vint.linting.file_filter import find_vim_script
def build_environment(cmdargs):
return {
'cmdargs': cmdargs,
'home_path': _get_home_path(cmdargs),
'cwd': _get_cwd(cmdargs),
'file_paths': _get_file_paths(cmdargs)
}
def _get_cwd(cmdargs):
return Path(os.getcwd())
def _get_home_path(cmdargs):
return Path(os.path.expanduser('~'))
def _get_file_paths(cmdargs):
if 'files' not in cmdargs:
return []
found_file_paths = find_vim_script(map(Path, cmdargs['files']))
return set(found_file_paths)
| import os
import os.path
import re
import logging
from pathlib import Path
VIM_SCRIPT_FILE_NAME_PATTERNS = r'(?:[\._]g?vimrc|.*\.vim$)'
def build_environment(cmdargs):
return {
'cmdargs': cmdargs,
'home_path': _get_home_path(cmdargs),
'cwd': _get_cwd(cmdargs),
'file_paths': _get_file_paths(cmdargs)
}
def _get_cwd(cmdargs):
return Path(os.getcwd())
def _get_home_path(cmdargs):
return Path(os.path.expanduser('~'))
def _get_file_paths(cmdargs):
if 'files' not in cmdargs:
return []
found_files = _collect_files([Path(path) for path in cmdargs['files']])
return found_files
def _collect_files(paths):
result = set()
for path in paths:
if path.is_dir():
dir_path = path
result |= _collect_files(tuple(dir_path.iterdir()))
elif _is_vim_script(path):
file_path = path
result.add(file_path)
else:
logging.debug('ignore not Vim script file: `{file_path}`'.format(
file_path=str(path)))
return result
def _is_vim_script(path):
file_name = path.name
return bool(re.search(VIM_SCRIPT_FILE_NAME_PATTERNS, file_name))
| mit | Python |
075c64f41c686186ea16d0da4e63be1a4473c483 | use installed version of cros_workon | bpsinc-native/src_third_party_chromite,zhang0137/chromite,bpsinc-native/src_third_party_chromite,coreos/chromite,bpsinc-native/src_third_party_chromite,coreos/chromite,zhang0137/chromite,zhang0137/chromite,chadversary/chromiumos.chromite,chadversary/chromiumos.chromite,coreos/chromite | shell/subcmds/workon_cmd.py | shell/subcmds/workon_cmd.py | # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implementation of the 'workon' chromite command."""
import chromite.lib.cros_build_lib as cros_lib
from chromite.shell import subcmd
class WorkonCmd(subcmd.WrappedChrootCmd):
"""Run cros_workon."""
def __init__(self):
"""WorkonCmd constructor."""
# Just call the WrappedChrootCmd superclass, which does most of the work.
super(WorkonCmd, self).__init__(
['cros_workon-%s'], ['cros_workon', '--host'],
need_args=True
)
def Run(self, raw_argv, *args, **kwargs):
"""Run the command.
We do just a slight optimization to help users with a common typo.
Args:
raw_argv: Command line arguments, including this command's name, but not
the chromite command name or chromite options.
args: The rest of the positional arguments. See _DoWrappedChrootCommand.
kwargs: The keyword arguments. See _DoWrappedChrootCommand.
"""
# Slight optimization, just since I do this all the time...
if len(raw_argv) >= 2:
if raw_argv[1] in ('start', 'stop', 'list', 'list-all', 'iterate'):
cros_lib.Warning('OOPS, looks like you forgot a board name. Pick one.')
raw_argv = raw_argv[:1] + [''] + raw_argv[1:]
super(WorkonCmd, self).Run(raw_argv, *args, **kwargs)
| # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implementation of the 'workon' chromite command."""
import chromite.lib.cros_build_lib as cros_lib
from chromite.shell import subcmd
class WorkonCmd(subcmd.WrappedChrootCmd):
"""Run cros_workon."""
def __init__(self):
"""WorkonCmd constructor."""
# Just call the WrappedChrootCmd superclass, which does most of the work.
# Note that host version uses "./", since it's in src/scripts and not in the
# path...
super(WorkonCmd, self).__init__(
['cros_workon-%s'], ['./cros_workon', '--host'],
need_args=True
)
def Run(self, raw_argv, *args, **kwargs):
"""Run the command.
We do just a slight optimization to help users with a common typo.
Args:
raw_argv: Command line arguments, including this command's name, but not
the chromite command name or chromite options.
args: The rest of the positional arguments. See _DoWrappedChrootCommand.
kwargs: The keyword arguments. See _DoWrappedChrootCommand.
"""
# Slight optimization, just since I do this all the time...
if len(raw_argv) >= 2:
if raw_argv[1] in ('start', 'stop', 'list', 'list-all', 'iterate'):
cros_lib.Warning('OOPS, looks like you forgot a board name. Pick one.')
raw_argv = raw_argv[:1] + [''] + raw_argv[1:]
super(WorkonCmd, self).Run(raw_argv, *args, **kwargs)
| bsd-3-clause | Python |
e9e288d1bd4e55519eeb580ac809d70dfa1fcf4e | Improve TLS/SSL support (#9 progress) | stampery/mongoaudit | src/testers/tls.py | src/testers/tls.py | # -*- coding: utf-8 -*-
import ssl
def available(test):
"""
Check if MongoDB is compiled with OpenSSL support
"""
return 'OpenSSLVersion' in test.tester.info \
or 'openssl' in test.tester.info
def enabled(test):
"""
Check if TLS/SSL is enabled on the server side
"""
if not available(test):
return 3
try:
with test.tester.conn._socket_for_writes() as socket_info:
socket = socket_info.sock
return isinstance(socket, ssl.SSLSocket)
except KeyError, AttributeError:
return False
def valid(test):
"""
Verify if server certificate is valid
"""
if not enabled(test):
return 3
with test.tester.conn._socket_for_writes() as socket_info:
cert = socket_info.sock.getpeercert()
if not cert:
return [2,'Your server is presenting a self-signed certificate, which will not protect your connections from man-in-the-middle attacks.']
return True | # -*- coding: utf-8 -*-
def available(test):
"""
Check if MongoDB is compiled with OpenSSL support
"""
return 'OpenSSLVersion' in test.tester.info \
or 'openssl' in test.tester.info
def enabled(test):
"""
Check if TLS/SSL is enabled on the server side
"""
if not available(test):
return 3
try:
if 'OpenSSLVersion' in test.tester.info:
return bool(test.tester.info['OpenSSLVersion'])
else:
return test.tester.info['openssl']['running'] != 'disabled'
except KeyError:
return False
def valid(test):
"""
Verify if server certificate is valid
"""
conn = test.tester.conn
if not enabled(test):
return 3
with conn._socket_for_writes() as socket_info:
cert = socket_info.sock.getpeercert()
if not cert:
return [2,'Your server is presenting a self-signed certificate, which will not protect your connections from man-in-the-middle attacks.']
return True | mit | Python |
10b4a5384b130ff85ac73587830a414a1187dab0 | Fix field name | Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website | runners/models.py | runners/models.py | from django.db import models
from django.utils.translation import ugettext as _
from platforms.models import Platform
class Runner(models.Model):
""" Model definition for the runners """
name = models.CharField(_("Name"), max_length=127)
slug = models.SlugField(unique=True)
website = models.CharField(_("Website"), max_length=127, blank=True)
icon = models.ImageField(upload_to='runners/icons', blank=True)
platforms = models.ManyToManyField(Platform, related_name='runners')
# pylint: disable=W0232, R0903
class Meta(object):
ordering = ['name']
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
return super(Runner, self).save(*args, **kwargs)
@staticmethod
def autocomplete_search_fields():
return ('name__icontains', )
class RunnerVersion(models.Model):
class Meta(object):
ordering = ('version', 'architecture')
ARCH_CHOICES = (
('i386', '32 bit'),
('x86_64', '64 bit'),
('arm', 'ARM'),
)
def __unicode__(self):
return u"{} v{} ({})".format(self.runner.name,
self.version,
self.architecture)
runner = models.ForeignKey(Runner, related_name='versions')
version = models.CharField(max_length=32)
architecture = models.CharField(max_length=8,
choices=ARCH_CHOICES,
default='x86_64')
url = models.URLField(blank=True)
| from django.db import models
from django.utils.translation import ugettext as _
from platforms.models import Platform
class Runner(models.Model):
""" Model definition for the runners """
name = models.CharField(_("Name"), max_length=127)
slug = models.SlugField(unique=True)
website = models.CharField(_("Website"), max_length=127, blank=True)
icon = models.ImageField(upload_to='runners/icons', blank=True)
platforms = models.ManyToManyField(Platform, related_name='runners')
# pylint: disable=W0232, R0903
class Meta(object):
ordering = ['name']
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
return super(Runner, self).save(*args, **kwargs)
@staticmethod
def autocomplete_search_fields():
return ('name__icontains', )
class RunnerVersion(models.Model):
class Meta(object):
ordering = ('version', 'arch')
ARCH_CHOICES = (
('i386', '32 bit'),
('x86_64', '64 bit'),
('arm', 'ARM'),
)
def __unicode__(self):
return u"{} v{} ({})".format(self.runner.name,
self.version,
self.architecture)
runner = models.ForeignKey(Runner, related_name='versions')
version = models.CharField(max_length=32)
architecture = models.CharField(max_length=8,
choices=ARCH_CHOICES,
default='x86_64')
url = models.URLField(blank=True)
| agpl-3.0 | Python |
8e0a8adf81ef7ff59c3c964eaabce25b6cd43cab | Delete a few names we import that are not intended for the user | mwcraig/vpython-jupyter,mwcraig/vpython-jupyter,mwcraig/vpython-jupyter,BruceSherwood/vpython-jupyter,BruceSherwood/vpython-jupyter,BruceSherwood/vpython-jupyter,mwcraig/vpython-jupyter,BruceSherwood/vpython-jupyter | vpython/__init__.py | vpython/__init__.py | import os
from ._version import get_versions
from .gs_version import glowscript_version
__version__ = get_versions()['version']
__gs_version__ = glowscript_version()
del get_versions
del glowscript_version
# Keep the remaining imports later to ensure that __version__ and
# __gs_version__ exist before importing vpython, which itself imports
# both of those.
def __checkisnotebook(): # returns True if running in Jupyter notebook
try:
if any('SPYDER' in name for name in os.environ):
return False # Spyder detected so return False
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell': # Jupyter notebook or qtconsole?
return True
elif shell == 'TerminalInteractiveShell': # Terminal running IPython?
return False
else:
return False # Other type (?)
except NameError:
return False # Probably standard Python interpreter
_isnotebook = __checkisnotebook()
import platform
__p = platform.python_version()
# Delete platform now that we are done with it
del platform
__ispython3 = (__p[0] == '3')
__require_notebook = (not __ispython3) or (__p[2] < '5') # Python 2.7 or 3.4 require Jupyter notebook
if __require_notebook and (not _isnotebook):
s = "The non-notebook version of vpython requires Python 3.5 or later."
s += "\nvpython does work on Python 2.7 and 3.4 in the Jupyter notebook environment."
raise Exception(s)
from .vpython import canvas
# Need to initialize canvas before user does anything and before
# importing GSprint
scene = canvas()
from .vpython import *
from .shapespaths import *
from ._vector_import_helper import *
from .rate_control import rate
from .gsprint import GSprint
# For some reason gsprint and vpython are showing up in the
# namespace, so delete them
del gsprint, vpython
# import for backwards compatibility
from math import *
from numpy import arange
| import os
from ._version import get_versions
from .gs_version import glowscript_version
__version__ = get_versions()['version']
__gs_version__ = glowscript_version()
del get_versions
del glowscript_version
# Keep the remaining imports later to ensure that __version__ and
# __gs_version__ exist before importing vpython, which itself imports
# both of those.
def __checkisnotebook(): # returns True if running in Jupyter notebook
try:
if any('SPYDER' in name for name in os.environ):
return False # Spyder detected so return False
shell = get_ipython().__class__.__name__
if shell == 'ZMQInteractiveShell': # Jupyter notebook or qtconsole?
return True
elif shell == 'TerminalInteractiveShell': # Terminal running IPython?
return False
else:
return False # Other type (?)
except NameError:
return False # Probably standard Python interpreter
_isnotebook = __checkisnotebook()
import platform
__p = platform.python_version()
__ispython3 = (__p[0] == '3')
__require_notebook = (not __ispython3) or (__p[2] < '5') # Python 2.7 or 3.4 require Jupyter notebook
if __require_notebook and (not _isnotebook):
s = "The non-notebook version of vpython requires Python 3.5 or later."
s += "\nvpython does work on Python 2.7 and 3.4 in the Jupyter notebook environment."
raise Exception(s)
from .vpython import canvas
# Need to initialize canvas before user does anything and before
# importing GSprint
scene = canvas()
from .vpython import *
from .shapespaths import *
from ._vector_import_helper import *
from .rate_control import rate
from .gsprint import GSprint
# import for backwards compatibility
from math import *
from numpy import arange
| mit | Python |
416a42bc01fc5c5c218a43bf5af11bad922c78d2 | update collection selection for record_db | guoyr/geo-caching | coordinator_factory.py | coordinator_factory.py | from twisted.protocols.amp import AMP
from twisted.internet.protocol import Factory
from coordinator_commands import *
from pymongo import MongoClient
class CoordinatorProtocol(AMP):
@FetchData.responder
def fetchData(self, msg):
#TODO: msg contains the
pass
@GetMaster.responder
def getMaster(self, user_id):
#TODO
print "received request for getMaster"
master_id = "WEST"
return {MASTER_SERVER_ID: master_id}
@AddAccessRecord.responder
def addRecord(self, user_id, preferred_store, is_save):
print("received request for addRecord")
record_db] = connect_user_record_db()
user_record = record_db["records"].find_one({"uid":user_id})
if not user_record:
user_record = {
"uid":user_id,
"preferred_store":preferred_store,
"is_save":is_save
}
record_db["records"].save(user_record)
else:
user_record["preferred_store"] = preferred_store
user_record["is_save"] = is_save
record_db["records"].save(user_record)
return {"success": True}
def connect_user_record_db():
db = MongoClient().record_db
return record_db
class CoordinatorFactory(Factory):
protocol=CoordinatorProtocol | from twisted.protocols.amp import AMP
from twisted.internet.protocol import Factory
from coordinator_commands import *
from pymongo import MongoClient
class CoordinatorProtocol(AMP):
@FetchData.responder
def fetchData(self, msg):
#TODO: msg contains the
pass
@GetMaster.responder
def getMaster(self, user_id):
#TODO
print "received request for getMaster"
master_id = "WEST"
return {MASTER_SERVER_ID: master_id}
@AddAccessRecord.responder
def addRecord(self, user_id, preferred_store, is_save):
print("received request for addRecord")
record_db = connect_user_record_db()
user_record = record_db.find_one({"uid":user_id})
if not user_record:
user_record = {
"uid":user_id,
"preferred_store":preferred_store,
"is_save":is_save
}
record_db.save(user_record)
else:
user_record["preferred_store"] = preferred_store
user_record["is_save"] = is_save
record_db.save(user_record)
return {"success": True}
def connect_user_record_db():
db = MongoClient().record_db
return record_db
class CoordinatorFactory(Factory):
protocol=CoordinatorProtocol | mit | Python |
8e423ce743dc5f1cd4dcd6e4ff45e7e29504b02b | Update lev_group.py | jcchin/Hyperloop_v2,jcchin/MagnePlane,jcchin/MagnePlane,andipeng/MagnePlane,jcchin/Hyperloop_v2,kennethdecker/MagnePlane,andipeng/MagnePlane,kennethdecker/MagnePlane | src/hyperloop/Python/lev_group.py | src/hyperloop/Python/lev_group.py | from openmdao.api import Group, Problem, IndepVarComp
from src.hyperloop.Python.breakpointlev import Drag, Mass
class LevGroup(Group):
"""Group containing the breakpointlev.py classes Drag and Mass"""
def __init__(self):
super(LevGroup, self).__init__()
# Creates components of the group.
self.add('Drag', Drag())
self.add('Mass', Mass())
if __name__ == "__main__":
top = Problem()
root = top.root = Group()
# Define Parameters
params = (
('mpod', .375, {'units': 'kg'}),
('lpod', 25.0, {'units': 'm'}),
('Pc', 2.0, {'units': 'm'}),
('vb', 23.0, {'units': 'm/2'}),
('w', 2.0, {'units': 'm'})
)
root.add('input_vars', IndepVarComp(params))
root.add('lev', LevGroup())
root.connect('input_vars.mpod', 'lev.Drag.mpod')
root.connect('input_vars.lpod', 'lev.Drag.lpod')
root.connect('input_vars.lpod', 'lev.Mass.lpod')
root.connect('input_vars.w', 'lev.Drag.w')
root.connect('input_vars.w', 'lev.Mass.w')
top.setup()
top.run()
print('lpod Drag is %f' % top['lev.Drag.lpod'])
print('lpod Mass is %f' % top['lev.Mass.lpod'])
print('\n')
print('w from Drag is %f' % top['lev.Drag.w'])
print('w from Mass is %f' % top['lev.Mass.w'])
| from openmdao.api import Group, Problem, IndepVarComp
from breakpointlev import Drag, Mass
class LevGroup(Group):
"""Group containing the breakpointlev.py classes Drag and Mass"""
def __init__(self):
super(LevGroup, self).__init__()
# Creates components of the group.
self.add('Drag', Drag())
self.add('Mass', Mass())
if __name__ == "__main__":
top = Problem()
root = top.root = Group()
# Define Parameters
params = (
('mpod', .375, {'units': 'kg'}),
('lpod', 25.0, {'units': 'm'}),
('Pc', 2.0, {'units': 'm'}),
('vb', 23.0, {'units': 'm/2'}),
('w', 2.0, {'units': 'm'})
)
root.add('input_vars', IndepVarComp(params))
root.add('lev', LevGroup())
root.connect('input_vars.mpod', 'lev.Drag.mpod')
root.connect('input_vars.lpod', 'lev.Drag.lpod')
root.connect('input_vars.lpod', 'lev.Mass.lpod')
root.connect('input_vars.w', 'lev.Drag.w')
root.connect('input_vars.w', 'lev.Mass.w')
top.setup()
top.run()
print('lpod Drag is %f' % top['lev.Drag.lpod'])
print('lpod Mass is %f' % top['lev.Mass.lpod'])
print('\n')
print('w from Drag is %f' % top['lev.Drag.w'])
print('w from Mass is %f' % top['lev.Mass.w'])
| apache-2.0 | Python |
0209f3a8a3cff785628d6e32cefb676eb0a0b61c | Add comment | techbureau/zaifbot,techbureau/zaifbot | zaifbot/exchange/action.py | zaifbot/exchange/action.py | from abc import ABCMeta, abstractclassmethod
def Action(action):
for cls in _TradeAction.__subclasses__():
if isinstance(action, str):
if cls.is_my_action(action):
return cls(action)
continue
if isinstance(action, _TradeAction):
return action
raise ValueError('illegal argument')
class _TradeAction(metaclass=ABCMeta):
def __init__(self, action): # necessary
self._name = self.name
def __str__(self):
return self._name
def __eq__(self, other):
if isinstance(other, _TradeAction):
return self._name == other._name
if isinstance(other, str):
return self._name == other
return False
@property
@abstractclassmethod
def name(self):
raise NotImplementedError
@abstractclassmethod
def is_my_action(self):
raise NotImplementedError
class _Buy(_TradeAction):
@staticmethod
def is_my_action(action):
return action == 'bid'
@staticmethod
def opposite_action():
return Action('ask')
@property
def name(self):
return 'bid'
class _Sell(_TradeAction):
@staticmethod
def is_my_action(action):
return action == 'ask'
@staticmethod
def opposite_action():
return Action('bid')
@property
def name(self):
return 'ask'
Sell = Action('ask')
Buy = Action('bid')
| from abc import ABCMeta, abstractclassmethod
def Action(action):
for cls in _TradeAction.__subclasses__():
if isinstance(action, str):
if cls.is_my_action(action):
return cls(action)
continue
if isinstance(action, _TradeAction):
return action
raise ValueError('illegal argument')
class _TradeAction(metaclass=ABCMeta):
def __init__(self, action):
self._name = self.name
def __str__(self):
return self._name
def __eq__(self, other):
if isinstance(other, _TradeAction):
return self._name == other._name
if isinstance(other, str):
return self._name == other
return False
@property
@abstractclassmethod
def name(self):
raise NotImplementedError
@abstractclassmethod
def is_my_action(self):
raise NotImplementedError
class _Buy(_TradeAction):
@staticmethod
def is_my_action(action):
return action == 'bid'
@staticmethod
def opposite_action():
return Action('ask')
@property
def name(self):
return 'bid'
class _Sell(_TradeAction):
@staticmethod
def is_my_action(action):
return action == 'ask'
@staticmethod
def opposite_action():
return Action('bid')
@property
def name(self):
return 'ask'
Sell = Action('ask')
Buy = Action('bid')
| mit | Python |
db0aad8333266905bb730f928e7f1c6dfc77f7e4 | Fix some missing imports | tchx84/debian-pkg-sugar-toolkit,Daksh/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,puneetgkaur/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,sugarlabs/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3 | examples/terminal/terminal.py | examples/terminal/terminal.py | import os
import gtk
import vte
import pango
from sugar.activity.Activity import Activity
_TERMINAL_ACTIVITY_TYPE = "_terminal._tcp"
class Terminal(gtk.HBox):
def __init__(self):
gtk.HBox.__init__(self, False, 4)
self._vte = vte.Terminal()
self._configure_vte()
self._vte.set_size(30, 5)
self._vte.set_size_request(200, 50)
self._vte.show()
self.pack_start(self._vte)
self._scrollbar = gtk.VScrollbar(self._vte.get_adjustment())
self._scrollbar.show()
self.pack_start(self._scrollbar, False, False, 0)
self._vte.connect("child-exited", lambda term: term.fork_command())
self._vte.fork_command()
def _configure_vte(self):
self._vte.set_font(pango.FontDescription('Monospace 10'))
self._vte.set_colors(gtk.gdk.color_parse ('#AAAAAA'),
gtk.gdk.color_parse ('#000000'),
[])
self._vte.set_cursor_blinks(False)
self._vte.set_audible_bell(False)
self._vte.set_scrollback_lines(100)
self._vte.set_allow_bold(True)
self._vte.set_scroll_on_keystroke(False)
self._vte.set_scroll_on_output(False)
self._vte.set_emulation('xterm')
self._vte.set_visible_bell(False)
def on_gconf_notification(self, client, cnxn_id, entry, what):
self.reconfigure_vte()
def on_vte_button_press(self, term, event):
if event.button == 3:
self.do_popup(event)
return True
def on_vte_popup_menu(self, term):
pass
class TerminalActivity(Activity):
def __init__(self):
Activity.__init__(self, _TERMINAL_ACTIVITY_TYPE)
def on_connected_to_shell(self):
self.set_tab_text("Terminal")
plug = self.gtk_plug()
terminal = Terminal()
plug.add(terminal)
terminal.show()
plug.show()
activity = TerminalActivity()
activity.connect_to_shell()
try:
gtk.main()
except KeyboardInterrupt:
pass
| import os
import gtk
import vte
from sugar.activity.Activity import Activity
_TERMINAL_ACTIVITY_TYPE = "_terminal._tcp"
class Terminal(gtk.HBox):
def __init__(self):
gtk.HBox.__init__(self, False, 4)
self._vte = vte.Terminal()
self._configure_vte()
self._vte.set_size(30, 5)
self._vte.set_size_request(200, 50)
self._vte.show()
self.pack_start(self._vte)
self._scrollbar = gtk.VScrollbar(self._vte.get_adjustment())
self._scrollbar.show()
self.pack_start(self._scrollbar, False, False, 0)
self._vte.connect("child-exited", lambda term: term.fork_command())
self._vte.fork_command()
def _configure_vte(self):
self._vte.set_font(pango.FontDescription('Monospace 10'))
self._vte.set_colors(gtk.gdk.color_parse ('#AAAAAA'),
gtk.gdk.color_parse ('#000000'),
[])
self._vte.set_cursor_blinks(False)
self._vte.set_audible_bell(False)
self._vte.set_scrollback_lines(100)
self._vte.set_allow_bold(True)
self._vte.set_scroll_on_keystroke(False)
self._vte.set_scroll_on_output(False)
self._vte.set_emulation('xterm')
self._vte.set_visible_bell(False)
def on_gconf_notification(self, client, cnxn_id, entry, what):
self.reconfigure_vte()
def on_vte_button_press(self, term, event):
if event.button == 3:
self.do_popup(event)
return True
def on_vte_popup_menu(self, term):
pass
class TerminalActivity(Activity):
def __init__(self):
Activity.__init__(self, _TERMINAL_ACTIVITY_TYPE)
def on_connected_to_shell(self):
self.set_tab_text("Terminal")
plug = self.gtk_plug()
terminal = Terminal()
plug.add(terminal)
terminal.show()
plug.show()
activity = TerminalActivity()
activity.connect_to_shell()
gtk.main()
| lgpl-2.1 | Python |
9e0d37d581b22b46b60bb563beb8fe3c44063a37 | fix download view for locked items: allow for admin, 403 for everybody else | makefu/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server,bepasty/bepasty-server,bepasty/bepasty-server,bepasty/bepasty-server,makefu/bepasty-server | bepasty/views/download.py | bepasty/views/download.py | # Copyright: 2013 Bastian Blank <bastian@waldi.eu.org>
# License: BSD 2-clause, see LICENSE for details.
import errno
import time
from flask import Response, current_app, render_template, stream_with_context, abort
from flask.views import MethodView
from werkzeug.exceptions import NotFound
from ..utils.name import ItemName
from ..utils.permissions import *
from . import blueprint
class DownloadView(MethodView):
content_disposition = 'attachment' # to trigger download
def get(self, name):
try:
item = current_app.storage.openwrite(name)
except OSError as e:
if e.errno == errno.ENOENT:
raise NotFound()
raise
if not item.meta.get('complete'):
error = 'Upload incomplete. Try again later.'
else:
error = None
if error:
try:
return render_template('display_error.html', name=name, item=item, error=error), 409
finally:
item.close()
if not item.meta.get('unlocked') and not may(ADMIN):
abort(403)
def stream():
with item as _item:
# Stream content from storage
offset = 0
size = _item.data.size
while offset < size:
buf = _item.data.read(16 * 1024, offset)
offset += len(buf)
yield buf
item.meta['timestamp-download'] = int(time.time())
ret = Response(stream_with_context(stream()))
ret.headers['Content-Disposition'] = '{}; filename="{}"'.format(
self.content_disposition, item.meta['filename'])
ret.headers['Content-Length'] = item.meta['size']
ret.headers['Content-Type'] = item.meta['type'] # 'application/octet-stream'
return ret
class InlineView(DownloadView):
content_disposition = 'inline' # to trigger viewing in browser, for some types
blueprint.add_url_rule('/<itemname:name>/+download', view_func=DownloadView.as_view('download'))
blueprint.add_url_rule('/<itemname:name>/+inline', view_func=InlineView.as_view('inline'))
| # Copyright: 2013 Bastian Blank <bastian@waldi.eu.org>
# License: BSD 2-clause, see LICENSE for details.
import errno
import time
from flask import Response, current_app, render_template, stream_with_context
from flask.views import MethodView
from werkzeug.exceptions import NotFound
from ..utils.name import ItemName
from . import blueprint
class DownloadView(MethodView):
content_disposition = 'attachment' # to trigger download
def get(self, name):
try:
item = current_app.storage.openwrite(name)
except OSError as e:
if e.errno == errno.ENOENT:
raise NotFound()
raise
if not item.meta.get('unlocked'):
error = 'File Locked.'
elif not item.meta.get('complete'):
error = 'Upload incomplete. Try again later.'
else:
error = None
if error:
try:
return render_template('display_error.html', name=name, item=item, error=error), 409
finally:
item.close()
def stream():
with item as _item:
# Stream content from storage
offset = 0
size = _item.data.size
while offset < size:
buf = _item.data.read(16 * 1024, offset)
offset += len(buf)
yield buf
item.meta['timestamp-download'] = int(time.time())
ret = Response(stream_with_context(stream()))
ret.headers['Content-Disposition'] = '{}; filename="{}"'.format(
self.content_disposition, item.meta['filename'])
ret.headers['Content-Length'] = item.meta['size']
ret.headers['Content-Type'] = item.meta['type'] # 'application/octet-stream'
return ret
class InlineView(DownloadView):
content_disposition = 'inline' # to trigger viewing in browser, for some types
blueprint.add_url_rule('/<itemname:name>/+download', view_func=DownloadView.as_view('download'))
blueprint.add_url_rule('/<itemname:name>/+inline', view_func=InlineView.as_view('inline'))
| bsd-2-clause | Python |
2f6aae3c0ec7280b0992ac80da25e08525852d35 | change module getattr import error to attribute error | machow/siuba | siuba/sql/verbs/__init__.py | siuba/sql/verbs/__init__.py | from . import (
arrange,
compute,
conditional,
count,
distinct,
explain,
filter,
group_by,
head,
join,
mutate,
select,
summarize,
)
def __getattr__(name):
import warnings
if name == "LazyTbl":
from ..backend import LazyTbl
warnings.warn(
"Importing LazyTbl from siuba.sql.verbs is deprecated. Please use "
"`from siuba.sql import LazyTbl`",
DeprecationWarning
)
return LazyTbl
raise AttributeError(f"module 'siuba.sql.verbs' has no attribute '{name}'")
| from . import (
arrange,
compute,
conditional,
count,
distinct,
explain,
filter,
group_by,
head,
join,
mutate,
select,
summarize,
)
def __getattr__(name):
import warnings
if name == "LazyTbl":
from ..backend import LazyTbl
warnings.warn(
"Importing LazyTbl from siuba.sql.verbs is deprecated. Please use "
"`from siuba.sql import LazyTbl`",
DeprecationWarning
)
return LazyTbl
raise ImportError(f"cannot import name '{name}' from 'siuba.sql.verbs'")
| mit | Python |
c231cc4a54e792d1e0c5f8d4f9540670004505a5 | Add prediction module | johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification | predict.py | predict.py | from bird import utils
from bird.models.cuberun import CubeRun
import bird.loader as loader
import bird.signal_processing as sp
import scipy
import numpy as np
def predict(model, segment_names, directory):
class_index = loader.build_class_index(directory)
batch = []
for segment_name in segment_names:
# load input data
fs, wave = utils.read_wave_file(segment_name)
Sxx = sp.wave_to_sample_spectrogram(wave, fs)
Sxx = scipy.misc.imresize(Sxx, (256, 512))
batch.append(Sxx)
batch = np.array(batch)
batch = batch.reshape(batch.shape[0], batch.shape[1], batch.shape[2], 1)
y_probs = model.predict(batch, batch_size=16, verbose=1)
y_cats = [int(np.argmax(y_prob)) for y_prob in y_probs]
species = [class_index[y_cat] for y_cat in y_cats]
return species
def binary_to_id(Y):
i = 0
r = []
for y in Y:
if y == 1:
r.append(i)
i = i+1
return r
| from models.cuberun import CubeRun
import numpy as np
import utils
def get_model():
nb_classes = 19
input_shape = (257, 624, 1)
model = CubeRun(nb_classes=nb_classes, input_shape=input_shape)
model.load_weights("../weights/2016_11_16_06:31:03_cuberun.h5")
return model
def predict(model, filename):
fs, x = utils.read_wave_file("../datasets/mlsp2013/test/"+filename+".wav")
(f, t, Sxx) = utils.wave_to_spectrogram(x, fs)
X = np.array([Sxx])
X = X.reshape(X.shape[0], 257, 624, 1)
y = model.predict(X, batch_size=32, verbose=1)
y = np.round(y)
return [binary_to_id(v) for v in y]
def binary_to_id(Y):
i = 0
r = []
for y in Y:
if y == 1:
r.append(i)
i = i+1
return r
| mit | Python |
7429372801d6b4cc3a0a3235f41c0b97d17ce1aa | Test `max_elements_per_line` setting for primitive arrays | Squareys/PyDDL | tests/DdlTextWrterTest.py | tests/DdlTextWrterTest.py | import os
import unittest
from pyddl import *
from pyddl.enum import *
__author__ = "Jonathan Hale"
class DdlTextWriterTest(unittest.TestCase):
def tearDown(self):
try:
os.remove("test.oddl")
except FileNotFoundError:
pass # test_empty failed?
def test_empty(self):
# create document
document = DdlDocument()
# write document
DdlTextWriter(document).write("test.oddl")
# check if file was created
try:
self.assertTrue(os.path.isfile("test.oddl"))
except FileNotFoundError:
self.fail("DdlTextWriter did not create the specified file.")
def test_full(self):
# create document
document = DdlDocument()
document.add_structure(B"Human", None,
[DdlStructure(B"Name", None, [DdlPrimitive(PrimitiveType.string, ["Peter"])]),
DdlStructure(B"Age", None, [DdlPrimitive(PrimitiveType.unsigned_int16, [21])])]
)
prim = DdlPrimitive(PrimitiveType.int32, range(1, 100))
DdlTextWriter.set_max_elements_per_line(prim, 10)
document.add_structure(B"SomethingElse", None,
[DdlStructure(B"AnArray", None, [prim])]
)
document.add_structure(B"MoreElse", None,
[DdlStructure(B"AnVectorArray", None,
[DdlPrimitive(PrimitiveType.int32,
[(1, 2), (12, 42), (13, 31)], None, 2)])]
)
# write document
DdlTextWriter(document).write("test.oddl")
if __name__ == "__main__":
unittest.main()
| import os
import unittest
from pyddl import *
from pyddl.enum import *
__author__ = "Jonathan Hale"
class DdlTextWriterTest(unittest.TestCase):
def tearDown(self):
try:
os.remove("test.oddl")
except FileNotFoundError:
pass # test_empty failed?
def test_empty(self):
# create document
document = DdlDocument()
# write document
DdlTextWriter(document).write("test.oddl")
# check if file was created
try:
self.assertTrue(os.path.isfile("test.oddl"))
except FileNotFoundError:
self.fail("DdlTextWriter did not create the specified file.")
def test_full(self):
# create document
document = DdlDocument()
document.add_structure(B"Human", None,
[DdlStructure(B"Name", None, [DdlPrimitive(PrimitiveType.string, ["Peter"])]),
DdlStructure(B"Age", None, [DdlPrimitive(PrimitiveType.unsigned_int16, [21])])]
)
document.add_structure(B"SomethingElse", None,
[DdlStructure(B"AnArray", None, [DdlPrimitive(PrimitiveType.int32, range(1, 100))])]
)
document.add_structure(B"MoreElse", None,
[DdlStructure(B"AnVectorArray", None,
[DdlPrimitive(PrimitiveType.int32,
[(1, 2), (12, 42), (13, 31)], None, 2)])]
)
# write document
DdlTextWriter(document).write("test.oddl")
if __name__ == "__main__":
unittest.main()
| mit | Python |
698eb220ababb935649860010c0cedd693bcb76a | Refactor form | cuducos/csvsimpletools,cuducos/csvsimpletools,cuducos/csvsimpletools | csvsimpletools/forms.py | csvsimpletools/forms.py | from flask_babel import gettext
from flask_wtf import Form
from flask_wtf.file import FileField, FileRequired, FileAllowed
from wtforms import RadioField, SelectField
from csv_commands import ordered_commands
COMMANDS = tuple((c.method.__name__, c.title) for c in ordered_commands)
DELIMITERS = ((',', ','), (';', ';'), ('\t', 'tab'))
FILE_VALIDATOR = (
FileRequired(),
FileAllowed(('csv', 'txt'), gettext('Please use a CSV (.txt or .csv)'))
)
class GetCSV(Form):
csv = FileField('CSV File', validators=FILE_VALIDATOR)
command = RadioField('Commands to execute', choices=COMMANDS)
input_delimiter = SelectField('Input delimiter', choices=DELIMITERS)
output_delimiter = SelectField('Output delimiter', choices=DELIMITERS)
| # coding: utf-8
from csv_commands import command_list, commands
from flask.ext.babel import gettext
from flask_wtf import Form
from flask_wtf.file import FileField, FileRequired, FileAllowed
from wtforms import RadioField, SelectField
class GetCSV(Form):
csv = FileField(
'CSV File',
validators=[FileRequired(),
FileAllowed(['csv', 'txt'],
gettext('Please, a CSV file (.txt or .csv)'))])
input_delimiter = SelectField('Input delimiter',
choices=[(',', ','),
(';', ';'),
('\t', 'tab')])
output_delimiter = SelectField('Output delimiter',
choices=[(',', ','),
(';', ';'),
('\t', 'tab')])
command = RadioField('Commands to execute',
choices=[(c, commands[c]) for c in command_list])
| mit | Python |
ba8e567592c96dacb697e067004dc71799e4e93f | Store the basename of the generated files, to allow the unittests to clean up in the tearDown method. | sugarmanz/ctypeslib | ctypeslib/test/stdio.py | ctypeslib/test/stdio.py | import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
_gen_basename = include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
/* Silly comment */
""",
persist=False)
| import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
| mit | Python |
1235955edf07bb3815628200cfd247057b91be19 | fix path | weng-lab/SnoPlowPy | snoPlowPy/tests/conftest.py | snoPlowPy/tests/conftest.py | import pytest
@pytest.fixture
def remote_f():
return ('https://raw.githubusercontent.com/weng-lab/' +
'SnoPlowPy/master/snoPlowPy/tests/data/a')
| import pytest
@pytest.fixture
def remote_f():
return ('https://raw.githubusercontent.com/kepbod/' +
'SnoPlowPy/master/snoPlowPy/tests/data/a')
| mit | Python |
788f0977d21ff88092dacf7840ddf38b0a660288 | Remove unused constant | lowRISC/ot-sca,lowRISC/ot-sca | cw/cw305/util/device.py | cw/cw305/util/device.py | # Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
r"""CW305 utility functions. Used to configure FPGA with OpenTitan design."""
import subprocess
import time
import chipwhisperer as cw
class OpenTitan(object):
def __init__(self, fw_programmer, bitstream, pll_frequency, baudrate):
self.fpga = self.initialize_fpga(bitstream, pll_frequency)
self.scope = self.initialize_scope()
self.target = self.initialize_target(self.scope, fw_programmer, baudrate)
def initialize_fpga(self, bitstream, pll_frequency):
"""Initializes FPGA bitstream and sets PLL frequency."""
print('Connecting and loading FPGA')
fpga = cw.capture.targets.CW305()
# Do not program the FPGA if it is already programmed.
fpga.con(bsfile=bitstream, force=False)
fpga.vccint_set(1.0)
print('Initializing PLL1')
fpga.pll.pll_enable_set(True)
fpga.pll.pll_outenable_set(False, 0)
fpga.pll.pll_outenable_set(True, 1)
fpga.pll.pll_outenable_set(False, 2)
fpga.pll.pll_outfreq_set(pll_frequency, 1)
# 1ms is plenty of idling time
fpga.clkusbautooff = True
fpga.clksleeptime = 1
return fpga
def initialize_scope(self):
"""Initializes chipwhisperer scope."""
scope = cw.scope()
scope.gain.db = 27.5
# Samples per trace - We oversample by 10x and AES is doing ~12/16 cycles per encryption.
scope.adc.samples = 180
scope.adc.offset = 0
scope.adc.basic_mode = "rising_edge"
scope.clock.clkgen_freq = 100000000
# We sample using the target clock (100 MHz).
scope.clock.adc_src = "extclk_dir"
scope.trigger.triggers = "tio4"
scope.io.tio1 = "serial_tx"
scope.io.tio2 = "serial_rx"
scope.io.hs2 = "disabled"
# TODO: Need to update error handling.
scope.clock.reset_adc()
time.sleep(0.5)
assert (scope.clock.adc_locked), "ADC failed to lock"
return scope
def initialize_target(self, scope, fw_programmer, baudrate):
"""Loads firmware image and initializes test target."""
fw_programmer.run()
time.sleep(0.5)
target = cw.target(scope)
target.output_len = 16
target.baud = baudrate
target.flush()
return target
| # Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
r"""CW305 utility functions. Used to configure FPGA with OpenTitan design."""
import subprocess
import time
import chipwhisperer as cw
SPIFLASH=r'bin/linux/spiflash'
class OpenTitan(object):
def __init__(self, fw_programmer, bitstream, pll_frequency, baudrate):
self.fpga = self.initialize_fpga(bitstream, pll_frequency)
self.scope = self.initialize_scope()
self.target = self.initialize_target(self.scope, fw_programmer, baudrate)
def initialize_fpga(self, bitstream, pll_frequency):
"""Initializes FPGA bitstream and sets PLL frequency."""
print('Connecting and loading FPGA')
fpga = cw.capture.targets.CW305()
# Do not program the FPGA if it is already programmed.
fpga.con(bsfile=bitstream, force=False)
fpga.vccint_set(1.0)
print('Initializing PLL1')
fpga.pll.pll_enable_set(True)
fpga.pll.pll_outenable_set(False, 0)
fpga.pll.pll_outenable_set(True, 1)
fpga.pll.pll_outenable_set(False, 2)
fpga.pll.pll_outfreq_set(pll_frequency, 1)
# 1ms is plenty of idling time
fpga.clkusbautooff = True
fpga.clksleeptime = 1
return fpga
def initialize_scope(self):
"""Initializes chipwhisperer scope."""
scope = cw.scope()
scope.gain.db = 27.5
# Samples per trace - We oversample by 10x and AES is doing ~12/16 cycles per encryption.
scope.adc.samples = 180
scope.adc.offset = 0
scope.adc.basic_mode = "rising_edge"
scope.clock.clkgen_freq = 100000000
# We sample using the target clock (100 MHz).
scope.clock.adc_src = "extclk_dir"
scope.trigger.triggers = "tio4"
scope.io.tio1 = "serial_tx"
scope.io.tio2 = "serial_rx"
scope.io.hs2 = "disabled"
# TODO: Need to update error handling.
scope.clock.reset_adc()
time.sleep(0.5)
assert (scope.clock.adc_locked), "ADC failed to lock"
return scope
def initialize_target(self, scope, fw_programmer, baudrate):
"""Loads firmware image and initializes test target."""
fw_programmer.run()
time.sleep(0.5)
target = cw.target(scope)
target.output_len = 16
target.baud = baudrate
target.flush()
return target
| apache-2.0 | Python |
bd677acb1869a4137e78ee137ed1a676928dadf1 | Update test_complexityclass_comparisons.py | pberkes/big_O | big_o/test/test_complexityclass_comparisons.py | big_o/test/test_complexityclass_comparisons.py | import unittest
from big_o.complexities import ComplexityClass
class FirstComplexityClass(ComplexityClass):
order = 1
class AltFirstComplexityClass(ComplexityClass):
order = 1
class SecondComplexityClass(ComplexityClass):
order = 2
class TestComplexities(unittest.TestCase):
def setUp(self):
self.first_complexity = FirstComplexityClass()
self.alt_first_complexity = AltFirstComplexityClass()
self.second_complexity = SecondComplexityClass()
def test_ge(self):
self.assertFalse(self.first_complexity >= self.second_complexity)
self.assertTrue(self.second_complexity >= self.first_complexity)
self.assertTrue(self.first_complexity >= self.first_complexity)
self.assertTrue(self.alt_first_complexity >= self.first_complexity)
self.assertTrue(self.first_complexity >= self.alt_first_complexity)
def test_le(self):
self.assertTrue(self.first_complexity <= self.second_complexity)
self.assertFalse(self.second_complexity <= self.first_complexity)
self.assertTrue(self.first_complexity <= self.first_complexity)
self.assertTrue(self.alt_first_complexity <= self.first_complexity)
self.assertTrue(self.first_complexity <= self.alt_first_complexity)
def test_l(self):
self.assertTrue(self.first_complexity < self.second_complexity)
self.assertFalse(self.second_complexity < self.first_complexity)
self.assertFalse(self.first_complexity < self.alt_first_complexity)
def test_g(self):
self.assertFalse(self.first_complexity > self.second_complexity)
self.assertTrue(self.second_complexity > self.first_complexity)
self.assertFalse(self.first_complexity > self.alt_first_complexity)
def test_eq(self):
self.assertFalse(self.first_complexity == self.second_complexity)
self.assertTrue(self.first_complexity == self.first_complexity)
self.assertTrue(self.first_complexity == self.alt_first_complexity)
| import unittest
import numpy as np
from big_o.complexities import ComplexityClass
class FirstComplexityClass(ComplexityClass):
order = 1
class AltFirstComplexityClass(ComplexityClass):
order = 1
class SecondComplexityClass(ComplexityClass):
order = 2
class TestComplexities(unittest.TestCase):
def setUp(self):
print('here')
self.first_complexity = FirstComplexityClass()
self.alt_first_complexity = AltFirstComplexityClass()
self.second_complexity = SecondComplexityClass()
def test_ge(self):
self.assertFalse(self.first_complexity >= self.second_complexity)
self.assertTrue(self.second_complexity >= self.first_complexity)
self.assertTrue(self.first_complexity >= self.first_complexity)
self.assertTrue(self.alt_first_complexity >= self.first_complexity)
self.assertTrue(self.first_complexity >= self.alt_first_complexity)
def test_le(self):
self.assertTrue(self.first_complexity <= self.second_complexity)
self.assertFalse(self.second_complexity <= self.first_complexity)
self.assertTrue(self.first_complexity <= self.first_complexity)
self.assertTrue(self.alt_first_complexity <= self.first_complexity)
self.assertTrue(self.first_complexity <= self.alt_first_complexity)
def test_l(self):
self.assertTrue(self.first_complexity < self.second_complexity)
self.assertFalse(self.second_complexity < self.first_complexity)
self.assertFalse(self.first_complexity < self.alt_first_complexity)
def test_g(self):
self.assertFalse(self.first_complexity > self.second_complexity)
self.assertTrue(self.second_complexity > self.first_complexity)
self.assertFalse(self.first_complexity > self.alt_first_complexity)
def test_eq(self):
self.assertFalse(self.first_complexity == self.second_complexity)
self.assertTrue(self.first_complexity == self.first_complexity)
self.assertTrue(self.first_complexity == self.alt_first_complexity)
| bsd-3-clause | Python |
40eda9e5d9cb6647e28d433e37457e4e8c86b9ad | Fix inconsistent docstring and ValueError message | deepmind/acme,deepmind/acme | acme/tf/losses/huber.py | acme/tf/losses/huber.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Huber loss."""
import tensorflow as tf
def huber(inputs: tf.Tensor, quadratic_linear_boundary: float) -> tf.Tensor:
"""Calculates huber loss of `inputs`.
For each value x in `inputs`, the following is calculated:
```
0.5 * x^2 if |x| <= d
0.5 * d^2 + d * (|x| - d) if |x| > d
```
where d is `quadratic_linear_boundary`.
Args:
inputs: Input Tensor to calculate the huber loss on.
quadratic_linear_boundary: The point where the huber loss function changes
from a quadratic to linear.
Returns:
`Tensor` of the same shape as `inputs`, containing values calculated
in the manner described above.
Raises:
ValueError: if quadratic_linear_boundary < 0.
"""
if quadratic_linear_boundary < 0:
raise ValueError("quadratic_linear_boundary must be >= 0.")
abs_x = tf.abs(inputs)
delta = tf.constant(quadratic_linear_boundary)
quad = tf.minimum(abs_x, delta)
# The following expression is the same in value as
# tf.maximum(abs_x - delta, 0), but importantly the gradient for the
# expression when abs_x == delta is 0 (for tf.maximum it would be 1). This
# is necessary to avoid doubling the gradient, since there is already a
# nonzero contribution to the gradient from the quadratic term.
lin = (abs_x - quad)
return 0.5 * quad**2 + delta * lin
| # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Huber loss."""
import tensorflow as tf
def huber(inputs: tf.Tensor, quadratic_linear_boundary: float) -> tf.Tensor:
"""Calculates huber loss of `inputs`.
For each value x in `inputs`, the following is calculated:
```
0.5 * x^2 if |x| <= d
0.5 * d^2 + d * (|x| - d) if |x| > d
```
where d is `quadratic_linear_boundary`.
Args:
inputs: Input Tensor to calculate the huber loss on.
quadratic_linear_boundary: The point where the huber loss function changes
from a quadratic to linear.
Returns:
`Tensor` of the same shape as `inputs`, containing values calculated
in the manner described above.
Raises:
ValueError: if quadratic_linear_boundary <= 0.
"""
if quadratic_linear_boundary < 0:
raise ValueError("quadratic_linear_boundary must be > 0.")
abs_x = tf.abs(inputs)
delta = tf.constant(quadratic_linear_boundary)
quad = tf.minimum(abs_x, delta)
# The following expression is the same in value as
# tf.maximum(abs_x - delta, 0), but importantly the gradient for the
# expression when abs_x == delta is 0 (for tf.maximum it would be 1). This
# is necessary to avoid doubling the gradient, since there is already a
# nonzero contribution to the gradient from the quadratic term.
lin = (abs_x - quad)
return 0.5 * quad**2 + delta * lin
| apache-2.0 | Python |
b21c2e6f783a97f2dabe9e9cfbe34134f7bad15c | save metadata in scenes | sat-utils/sat-search | satsearch/main.py | satsearch/main.py | import os
import sys
import json
import logging
from .version import __version__
from satsearch import Search, Scenes
from satsearch.parser import SatUtilsParser
logger = logging.getLogger(__name__)
logging.getLogger('requests').setLevel(logging.CRITICAL)
def main(review=False, printsearch=False, printmd=None, printcal=False,
load=None, save=None, append=False, download=None, **kwargs):
""" Main function for performing a search """
if load is None:
if printsearch:
txt = 'Search for scenes matching criteria:\n'
for kw in kwargs:
if kw == 'intersects':
geom = json.dumps(json.loads(kwargs[kw])['geometry'])
txt += ('{:>20}: {:<40} ...\n'.format(kw, geom[0:70]))
else:
txt += ('{:>20}: {:<40}\n'.format(kw, kwargs[kw]))
print(txt)
# get scenes from search
search = Search(**kwargs)
scenes = Scenes(search.scenes(), metadata={'search': kwargs})
else:
search = None
scenes = Scenes.load(load)
if review:
if not os.getenv('IMGCAT', None):
raise ValueError('Set IMGCAT envvar to terminal image display program to use review feature')
scenes.review_thumbnails()
# print summary
if printmd is not None:
scenes.print_scenes(printmd)
# print calendar
if printcal:
print(scenes.text_calendar())
# save all metadata in JSON file
if save is not None:
scenes.save(filename=save, append=append)
print('%s scenes found' % len(scenes))
# download files given keys
if download is not None:
for key in download:
scenes.download(key=key)
return scenes
def cli():
parser = SatUtilsParser(description='sat-search (v%s)' % __version__)
args = parser.parse_args(sys.argv[1:])
# read the GeoJSON file
if 'intersects' in args:
if os.path.exists(args['intersects']):
with open(args['intersects']) as f:
args['intersects'] = json.dumps(json.loads(f.read()))
# enable logging
logging.basicConfig(stream=sys.stdout, level=args.pop('verbosity') * 10)
scenes = main(**args)
return len(scenes)
if __name__ == "__main__":
cli()
| import os
import sys
import json
import logging
from .version import __version__
from satsearch import Search, Scenes
from satsearch.parser import SatUtilsParser
logger = logging.getLogger(__name__)
logging.getLogger('requests').setLevel(logging.CRITICAL)
def main(review=False, printsearch=False, printmd=None, printcal=False,
load=None, save=None, append=False, download=None, **kwargs):
""" Main function for performing a search """
if load is None:
if printsearch:
txt = 'Search for scenes matching criteria:\n'
for kw in kwargs:
if kw == 'intersects':
geom = json.dumps(json.loads(kwargs[kw])['geometry'])
txt += ('{:>20}: {:<40} ...\n'.format(kw, geom[0:70]))
else:
txt += ('{:>20}: {:<40}\n'.format(kw, kwargs[kw]))
print(txt)
# get scenes from search
search = Search(**kwargs)
scenes = Scenes(search.scenes(), metadata={'search': kwargs})
else:
search = None
scenes = Scenes.load(load)
if review:
if not os.getenv('IMGCAT', None):
raise ValueError('Set IMGCAT envvar to terminal image display program to use review feature')
scenes.review_thumbnails()
# print summary
if printmd is not None:
scenes.print_scenes(printmd)
# print calendar
if printcal:
print(scenes.text_calendar())
# save all metadata in JSON file
if save is not None:
scenes.save(filename=save, append=append, metadata=search)
print('%s scenes found' % len(scenes))
# download files given keys
if download is not None:
for key in download:
scenes.download(key=key)
return scenes
def cli():
parser = SatUtilsParser(description='sat-search (v%s)' % __version__)
args = parser.parse_args(sys.argv[1:])
# read the GeoJSON file
if 'intersects' in args:
if os.path.exists(args['intersects']):
with open(args['intersects']) as f:
args['intersects'] = json.dumps(json.loads(f.read()))
# enable logging
logging.basicConfig(stream=sys.stdout, level=args.pop('verbosity') * 10)
scenes = main(**args)
return len(scenes)
if __name__ == "__main__":
cli()
| mit | Python |
c2adfa9e9accfa7ef9c2ff91c8fa23eee5516cf6 | update to upstream core-svn-01-Feb-2014 | Benocs/core,Benocs/core,Benocs/core,Benocs/core,Benocs/core | src/daemon/core/__init__.py | src/daemon/core/__init__.py | # Copyright (c)2010-2012 the Boeing Company.
# See the LICENSE file included in this distribution.
"""core
Top-level Python package containing CORE components.
See http://www.nrl.navy.mil/itd/ncs/products/core and
http://code.google.com/p/coreemu/ for more information on CORE.
Pieces can be imported individually, for example
import core.netns.vnode
or everything listed in __all__ can be imported using
from core import *
"""
__all__ = []
# Automatically import all add-ons listed in addons.__all__
from .addons import *
| # Copyright (c)2010-2012 the Boeing Company.
# See the LICENSE file included in this distribution.
"""core
Top-level Python package containing CORE components.
See http://cs.itd.nrl.navy.mil/work/core/ and
http://code.google.com/p/coreemu/ for more information on CORE.
Pieces can be imported individually, for example
import core.netns.vnode
or everything listed in __all__ can be imported using
from core import *
"""
__all__ = []
# Automatically import all add-ons listed in addons.__all__
from .addons import *
| bsd-3-clause | Python |
89d7c72e0848f0c94e5923e45624bccfb421a641 | change patch list to group_id in runner | johnnygreco/hugs | scripts/runner.py | scripts/runner.py | """
Run hugs-pipe on an HSC patch.
"""
import os
import numpy as np
import hugs_pipe
hugs_pipe_io = os.environ.get('HUGS_PIPE_IO')
def main(tract, patch, config, outdir):
data_id = {'tract': tract, 'patch': patch, 'filter': 'HSC-I'}
hugs_pipe.utils.mkdir_if_needed(outdir)
prefix = os.path.join(outdir, 'hugs-pipe-{}-{}'.format(tract, patch))
if type(config)==str:
config = hugs_pipe.Config(data_id=data_id,
config_fn=config,
log_fn=prefix+'.log')
else:
config.set_data_id(data_id)
sources = hugs_pipe.run(config)
sources.write(prefix+'.cat', format='ascii')
if __name__=='__main__':
from argparse import ArgumentParser
parser = ArgumentParser('run hugs-pipe')
parser.add_argument('-t', '--tract', type=int, help='HSC tract')
parser.add_argument('-p', '--patch', type=str, help='HSC patch')
parser.add_argument('-g', '--group_id', type=int, help='group id',
default=None)
parser.add_argument('-c', '--config', type=str, help='config file name',
default=None)
parser.add_argument('-o', '--outdir', type=str, help='output directory',
default=hugs_pipe_io)
args = parser.parse_args()
if args.group_id is None:
assert args.tract is not None
assert args.patch is not None
main(args.tract, args.patch, args.config, args.outdir)
else:
from astropy.table import Table
outdir = os.path.join(args.outdir, 'group_'+str(args.group_id))
log_fn = os.path.join(outdir, 'hugs-pipe.log')
config = hugs_pipe.Config(log_fn=log_fn)
regions_fn = 'cat_z0.065_Mh12.75-14.0_tracts_n_patches.npy'
regions_fn = os.path.join(hugs_pipe_io, regions_fn)
regions_dict = np.load(regions_fn).item()
regions = Table(regions_dict[args.group_id])
for tract, patch in regions['tract', 'patch']:
main(tract, patch, config, outdir)
| """
Run hugs-pipe on an HSC patch.
"""
import os
import hugs_pipe
def main(tract, patch, config, outdir):
data_id = {'tract': tract, 'patch': patch, 'filter': 'HSC-I'}
hugs_pipe.utils.mkdir_if_needed(outdir)
prefix = os.path.join(outdir, 'hugs-pipe-{}-{}'.format(tract, patch))
if type(config)==str:
config = hugs_pipe.Config(data_id=data_id,
config_fn=config,
log_fn=prefix+'.log')
else:
config.set_data_id(data_id)
sources = hugs_pipe.run(config)
sources.write(prefix+'.cat', format='ascii')
if __name__=='__main__':
from argparse import ArgumentParser
parser = ArgumentParser('run hugs-pipe')
parser.add_argument('-t', '--tract', type=int, help='HSC tract')
parser.add_argument('-p', '--patch', type=str, help='HSC patch')
parser.add_argument('-pl', '--patch_list', type=str, help='patch list',
default=None)
parser.add_argument('-c', '--config', type=str, help='config file name',
default=None)
parser.add_argument('-o', '--outdir', type=str, help='output directory',
default='/home/jgreco/hugs-pipe-out')
args = parser.parse_args()
if args.patch_list is None:
main(args.tract, args.patch, args.config, args.outdir)
else:
from astropy.table import Table
log_fn = os.path.join(args.outdir, 'hugs-pipe.log')
config = hugs_pipe.Config(log_fn=log_fn)
regions = Table.read(args.patch_list, format='ascii')
for tract, patch in regions['tract', 'patch']:
main(tract, patch, config, args.outdir)
| mit | Python |
7b2adddc368618b6ea035eb93e2b48f612f4cb00 | Remove comment | FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition | suasimageparser.py | suasimageparser.py | from SUASImageParser import ImageParser
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| from SUASImageParser import ImageParser
from options import parseOptions
from options import getOption
import cv2
# ------------------------ Creating option parser -------------------------
parseOptions()
# ------------------------ Loading image & parsing ------------------------
my_parser = ImageParser(mode="ADLC", debug=True)
img = my_parser.parse(getOption("image"))#'images/targets_400.JPG')
# ------------------------ Displaying loaded image ------------------------
cv2.imshow("Parsed", img)
cv2.waitKey( 0 )
cv2.destroyAllWindows()
| mit | Python |
71b6af52b1d26b1c6c366758c369ac2e35ee0636 | update the kth | frankgu/3d-DenseNet | data_providers/utils.py | data_providers/utils.py | from .data import DataProvider
def get_data_provider_by_name(name, train_params):
"""Return required data provider class"""
if name == 'UCF101':
return DataProvider(**train_params)
if name == 'MERL':
return DataProvider(**train_params)
if name == 'KTH':
return DataProvider(**train_params)
else:
print("Sorry, data provider for `%s` dataset "
"was not implemented yet" % name)
exit()
| from .data import DataProvider
def get_data_provider_by_name(name, train_params):
"""Return required data provider class"""
if name == 'UCF101':
return DataProvider(**train_params)
if name == 'MERL':
return DataProvider(**train_params)
else:
print("Sorry, data provider for `%s` dataset "
"was not implemented yet" % name)
exit()
| mit | Python |
d71284dec713f46ae9e6e9c7cf90517cd6ad3e60 | bump version | informatics-isi-edu/synspy,informatics-isi-edu/synspy,informatics-isi-edu/synspy | synspy/__init__.py | synspy/__init__.py |
#
# Copyright 2015 University of Southern California
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
#
#import analyze
#import viewer
__version__ = "20171207.0"
|
#
# Copyright 2015 University of Southern California
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
#
#import analyze
#import viewer
__version__ = "20171206.0"
| bsd-3-clause | Python |
6425471dae9e042c335f93e9f7ca9b2abd47ae04 | Fix callable (2nd) | jar3b/py-phias,jar3b/py-phias,jar3b/py-phias,jar3b/py-phias | aore/miscutils/bottlecl.py | aore/miscutils/bottlecl.py | # -*- coding: utf-8 -*-
from bottle import Bottle
class BottleCL(object):
def __init__(self):
self._app = Bottle()
self.init_routes()
def __call__(self, environ, start_response):
self._app.__call__(environ, start_response)
def init_routes(self):
pass
def add_route(self, route_path, handler):
self._app.route(route_path, callback=handler)
def add_error(self, error_code, handler):
if not self._app.error_handler:
self._app.error_handler = {error_code: handler}
else:
self._app.error_handler[error_code] = handler
def start(self, **kwargs):
self._app.run(**kwargs)
| # -*- coding: utf-8 -*-
from bottle import Bottle
class BottleCL(object):
def __init__(self):
self._app = Bottle()
self.init_routes()
def __call__(self, *args):
self._app(*args)
def init_routes(self):
pass
def add_route(self, route_path, handler):
self._app.route(route_path, callback=handler)
def add_error(self, error_code, handler):
if not self._app.error_handler:
self._app.error_handler = {error_code: handler}
else:
self._app.error_handler[error_code] = handler
def start(self, **kwargs):
self._app.run(**kwargs)
| bsd-3-clause | Python |
3123f01f28c68634f3993d40c7c71c4c617c0885 | Add support for tab order specification to QtView extra. | brett-patterson/pyface,geggo/pyface,geggo/pyface,pankajp/pyface | enthought/traits/ui/qt4/extra/qt_view.py | enthought/traits/ui/qt4/extra/qt_view.py | #------------------------------------------------------------------------------
# Copyright (c) 2011, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Evan Patterson
#------------------------------------------------------------------------------
""" Defines a Traits UI View that allows for the customization of Qt-specific
widget properties.
"""
# Standard library imports.
import logging
# System library imports.
from enthought.qt import QtGui
# Enthought library imports.
from enthought.traits.api import File, List, Str
from enthought.traits.ui.view import View
# Logger.
logger = logging.getLogger(__name__)
class QtView(View):
""" A View that allows the specification of Qt style sheets.
"""
# An optional string containing a Qt style sheet.
style_sheet = Str
# An optional file path for a Qt style sheet.
style_sheet_path = File
# A list of trait names that defines the order for focus switching via
# Tab/Shift+Tab. If the view contains multiple items for a specified trait
# name, the order is undefined.
tab_order = List(Str)
#---------------------------------------------------------------------------
# Creates a UI user interface object:
#---------------------------------------------------------------------------
def ui(self, context, parent=None, kind=None, view_elements=None,
handler=None, id='', scrollable=None, args=None):
ui = super(QtView, self).ui(context, parent, kind, view_elements,
handler, id, scrollable, args)
if self.style_sheet:
ui.control.setStyleSheet(self.style_sheet)
if self.style_sheet_path:
try:
with open(self.style_sheet_path, 'r') as f:
ui.control.setStyleSheet(f.read())
except IOError:
logger.exception("Error loading Qt style sheet")
if len(self.tab_order) >= 2:
previous = self._get_editor_control(ui, self.tab_order[0])
for i in xrange(1, len(self.tab_order)):
current = self._get_editor_control(ui, self.tab_order[i])
QtGui.QWidget.setTabOrder(previous, current)
previous = current
return ui
#---------------------------------------------------------------------------
# Private interface:
#---------------------------------------------------------------------------
def _get_editor_control(self, ui, name):
control = None
editors = ui.get_editors(name)
if editors:
control = editors[0].control
else:
logger.warning("No item for '%s' trait" % name)
return control
| #------------------------------------------------------------------------------
# Copyright (c) 2009, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Evan Patterson
# Date: 07/21/09
#------------------------------------------------------------------------------
""" Defines a Traits UI View that allows for the customization of Qt-specific
widget properties.
"""
# ETS imports
from enthought.traits.api import Str, File
from enthought.traits.ui.view import View
class QtView(View):
""" A View that allows the specification of Qt style sheets.
"""
# An optional string containing a Qt style sheet
style_sheet = Str
# An optional path to a Qt style sheet
style_sheet_path = File
#---------------------------------------------------------------------------
# Creates a UI user interface object:
#---------------------------------------------------------------------------
def ui(self, context, parent=None, kind=None, view_elements=None,
handler=None, id='', scrollable=None, args=None):
""" Reimplemented to set style sheets.
"""
ui = super(QtView, self).ui(context, parent, kind, view_elements,
handler, id, scrollable, args)
if self.style_sheet:
ui.control.setStyleSheet(self.style_sheet)
if self.style_sheet_path:
try:
f = open(self.style_sheet_path, 'r')
try:
ui.control.setStyleSheet(f.read())
finally:
f.close()
except IOError, error:
print "Error loading Qt style sheet:", error
return ui
| bsd-3-clause | Python |
1ff4dab34d4aa6935d4d1b54aa354882790b9b44 | Add config items for server, timeout | imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery | astroquery/astrometry_net/__init__.py | astroquery/astrometry_net/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
<Put Your Tool Name Here>
-------------------------
:author: <your name> (<your email>)
"""
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
server = _config.ConfigItem('http://nova.astrometry.net', 'Name of server')
timeout = _config.ConfigItem(60,
'Default timeout for connecting to server')
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
<Put Your Tool Name Here>
-------------------------
:author: <your name> (<your email>)
"""
# Make the URL of the server, timeout and other items configurable
# See <http://docs.astropy.org/en/latest/config/index.html#developer-usage>
# for docs and examples on how to do this
# Below is a common use case
from astropy import config as _config
class Conf(_config.ConfigNamespace):
""" Configuration parameters for `astroquery.astrometry_net` """
api_key = _config.ConfigItem(
'',
"The Astrometry.net API key."
)
conf = Conf()
# Now import your public class
# Should probably have the same name as your module
from .core import AstrometryNet, AstrometryNetClass
__all__ = ['AstrometryNet', 'AstrometryNetClass']
| bsd-3-clause | Python |
4a5fc15593e2a3acd6928d41989bcdccad77e69b | add word2vec test script | 1900492968/SwiftSnails,Superjom/SwiftSnails,1900492968/SwiftSnails,1900492968/SwiftSnails,Superjom/SwiftSnails,1900492968/SwiftSnails,Superjom/SwiftSnails,Superjom/SwiftSnails | src/tools/gen-word2vec-data.py | src/tools/gen-word2vec-data.py | import sys
import random
def gen_one_record(num_feas):
feas = []
target = random.random()
for i in xrange(random.randint(6, num_feas)):
id = random.randint(0, 300)
feas.append("%d" % id)
return " ".join(feas)
def gen_data(num):
for i in xrange(num):
line = gen_one_record(15)
print line
if __name__ == '__main__':
gen_data(10000)
| import sys
import random
def gen_one_record(num_feas):
feas = []
target = random.random()
for i in xrange(random.randint(6, num_feas)):
id = random.randint(0, 300)
feas.append("%d" % id)
return " ".join(feas)
def gen_data(num):
for i in xrange(num):
line = gen_one_record(15)
print line
if __name__ == '__main__':
gen_data(1000)
| apache-2.0 | Python |
bfbc58922efe00a21daa82900c9306419c65a94f | add a (failing) test for _protected variables in fabfiles | ericholscher/fabric,felix-d/fabric,rodrigc/fabric,xLegoz/fabric,tekapo/fabric,rane-hs/fabric-py3,SamuelMarks/fabric,itoed/fabric,tolbkni/fabric,qinrong/fabric,askulkarni2/fabric,ploxiln/fabric,sdelements/fabric,MjAbuz/fabric,likesxuqiang/fabric,opavader/fabric,StackStorm/fabric,rbramwell/fabric,hrubi/fabric,elijah513/fabric,cmattoon/fabric,mathiasertl/fabric,pashinin/fabric,bitmonk/fabric,kxxoling/fabric,jaraco/fabric,akaariai/fabric,getsentry/fabric,amaniak/fabric,simon-engledew/fabric,kmonsoor/fabric,raimon49/fabric,fernandezcuesta/fabric,cgvarela/fabric,haridsv/fabric,bspink/fabric,pgroudas/fabric,TarasRudnyk/fabric,bitprophet/fabric | prototypes/fabfile.py | prototypes/fabfile.py |
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures(**kwargs):
set(fab_hosts = ['127.0.0.1', 'localhost'])
exc = run
if 'sudo' in kwargs:
exc = sudo
exc('false 1', fail='ignore')
exc('false 2', fail='warn')
exc('echo must print')
exc('false 3') # default fail is abort
exc('echo must NOT print')
import datetime
from StringIO import StringIO
re = __import__('re')
global_variables_are_available = True
def test_imports():
assert datetime is not None
assert StringIO is not None
assert re is not None
global global_variables_are_available
assert global_variables_are_available
global_variables_are_available = 1
local("echo all good.")
set(test_imports_has_run=True)
def test_global_assignment():
require('test_imports_has_run', provided_by=[test_imports])
global global_variables_are_available
assert global_variables_are_available == 1
local("echo all double-good.")
def test_prompting():
# Simplest form:
prompt('environment', 'Please specify target environment')
# With default:
prompt('dish', 'Specify favorite dish', default='spam & eggs')
# With validation, i.e. require integer input:
prompt('nice', 'Please specify process nice level', validate=int)
# With validation against a regular expression:
prompt('release', 'Please supply a release name',
validate=r'^\w+-\d+(\.\d+)?$')
def hello():
local("echo hello")
@hosts('localhost')
def test_nested_remotes_part_2():
"used by test_nested_remotes"
run("echo 2-5 $(fab_host)")
# this won't connect to 127.0.0.1 when called by test_nested_remotes()!
@hosts('localhost', '127.0.0.1')
def test_nested_remotes_part_4():
"used by test_nested_remotes"
run("echo 4-5 $(fab_host)")
@hosts('localhost')
def test_nested_remotes():
"Tests nesting of commands that require connections."
run("echo 1-5")
test_nested_remotes_part_2()
run("echo 3-5")
test_nested_remotes_part_4()
run("echo 5-5 and done.")
_protected = "Yes!"
def test_protected_names():
print "Do we have access to _protected names?", _protected
|
def test_local_failures():
local('false 1', fail='ignore')
local('false 2', fail='warn')
local('echo must print')
local('false 3') # default fail is abort
local('echo must NOT print')
def test_remote_failures(**kwargs):
set(fab_hosts = ['127.0.0.1', 'localhost'])
exc = run
if 'sudo' in kwargs:
exc = sudo
exc('false 1', fail='ignore')
exc('false 2', fail='warn')
exc('echo must print')
exc('false 3') # default fail is abort
exc('echo must NOT print')
import datetime
from StringIO import StringIO
re = __import__('re')
global_variables_are_available = True
def test_imports():
assert datetime is not None
assert StringIO is not None
assert re is not None
global global_variables_are_available
assert global_variables_are_available
global_variables_are_available = 1
local("echo all good.")
set(test_imports_has_run=True)
def test_global_assignment():
require('test_imports_has_run', provided_by=[test_imports])
global global_variables_are_available
assert global_variables_are_available == 1
local("echo all double-good.")
def test_prompting():
# Simplest form:
prompt('environment', 'Please specify target environment')
# With default:
prompt('dish', 'Specify favorite dish', default='spam & eggs')
# With validation, i.e. require integer input:
prompt('nice', 'Please specify process nice level', validate=int)
# With validation against a regular expression:
prompt('release', 'Please supply a release name',
validate=r'^\w+-\d+(\.\d+)?$')
def hello():
local("echo hello")
@hosts('localhost')
def test_nested_remotes_part_2():
"used by test_nested_remotes"
run("echo 2-5 $(fab_host)")
# this won't connect to 127.0.0.1 when called by test_nested_remotes()!
@hosts('localhost', '127.0.0.1')
def test_nested_remotes_part_4():
"used by test_nested_remotes"
run("echo 4-5 $(fab_host)")
@hosts('localhost')
def test_nested_remotes():
"Tests nesting of commands that require connections."
run("echo 1-5")
test_nested_remotes_part_2()
run("echo 3-5")
test_nested_remotes_part_4()
run("echo 5-5 and done.")
| bsd-2-clause | Python |
34e67f6e5328fc3a2579debaa9f7901b1422caa1 | Update functional test with new syntax | LauritzThaulow/fakelargefile,LauritzThaulow/fakelargefile | tests/functional_tests.py | tests/functional_tests.py | '''
Created on Oct 25, 2014
@author: lauritz
'''
from fakelargefile import FakeLargeFile, RepeatingSegment
BG = """\
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
"""
import logging
log = logging.getLogger(__name__)
def test_usage():
flf = FakeLargeFile([RepeatingSegment.example(start=0, size="10G")])
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
deleted = flf.deleteline(count=2)
assert flf.read(10).strip() == "Copyright"
assert flf.readline() == (
" (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>\n")
flf.insert_literal(deleted)
flf.seek(len(BG) * 10)
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
assert flf.readline().strip() == "Version 3, 29 June 2007"
flf.seek(0, 2)
fasit_end_pos = 10 * 1024 * 1024 * 1024
assert flf.tell() == fasit_end_pos
flf.seek(-10, 1)
BG_pos = (fasit_end_pos - 10) % len(BG)
fasit = (BG + BG)[BG_pos:BG_pos + 10]
assert flf.read(10) == fasit
flf.seek(len(BG))
flf.delete(len(BG))
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
| '''
Created on Oct 25, 2014
@author: lauritz
'''
from fakelargefile import FakeLargeFile, RepeatingSegment
BG = """\
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
"""
def test_usage():
flf = FakeLargeFile([RepeatingSegment.example(start=0, size="10G")])
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
deleted = flf.deleteline(count=2)
assert flf.read(10).strip() == "Copyright"
assert flf.readline() == (
" (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>")
flf.insert(deleted)
flf.seek(len(BG) * 10)
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
assert flf.readline().strip() == "Version 3, 29 June 2007"
flf.seek(0, 2)
fasit_end_pos = 10 * 1024 * 1024 * 1024
assert flf.tell() == fasit_end_pos
flf.seek(-10, 1)
BG_pos = (fasit_end_pos - 10) % len(BG)
fasit = (BG + BG)[BG_pos:BG_pos + 10]
assert flf.read(10) == fasit
flf.seek(len(BG))
flf.delete(len(BG))
assert flf.readline().strip() == "GNU GENERAL PUBLIC LICENSE"
| agpl-3.0 | Python |
70b11f2a824fa4b85bc7031aa07271e74d36f7d5 | remove now-unused RwhoClient constructor args | grawity/rwho,grawity/rwho,grawity/rwho,grawity/rwho,grawity/rwho | agent/lib/api_client.py | agent/lib/api_client.py | import json
import requests
import socket
import sys
from .exceptions import *
from .log_util import *
class RwhoClient():
def __init__(self, url,
host_name=None,
host_fqdn=None):
self.url = url
self.host_name = host_name
self.host_fqdn = host_fqdn
self.ua = requests.Session()
def auth_set_basic(self, username, password):
import requests.auth
self.ua.auth = requests.auth.HTTPBasicAuth(username, password)
def auth_set_kerberos(self, service="HTTP"):
import gssapi
import requests_gssapi
spnego = gssapi.Mechanism.from_sasl_name("SPNEGO")
self.ua.auth = requests_gssapi.HTTPSPNEGOAuth(target_name=service,
mech=spnego,
opportunistic_auth=True)
def upload(self, action, data):
log_debug("api: calling %r with %d items", action, len(data))
payload = {
"host": self.host_fqdn,
"action": action,
"utmp": json.dumps(data),
}
resp = self.ua.post(self.url, data=payload)
resp.raise_for_status()
log_debug("api: server returned %r", resp.text)
if resp.text.strip() == "OK":
return True
elif resp.text.startswith("KOD"):
raise RwhoShutdownRequestedError(resp.text.strip())
else:
raise RwhoUploadRejectedError(resp.text.strip())
def put_sessions(self, sessions):
return self.upload(action="put", data=[*sessions])
def remove_host(self):
return self.upload(action="destroy", data=[])
| import json
import requests
import socket
import sys
from .exceptions import *
from .log_util import *
class RwhoClient():
def __init__(self, url,
host_name=None,
host_fqdn=None,
auth_method=None,
auth_user=None,
auth_pass=None):
self.url = url
self.host_name = host_name
self.host_fqdn = host_fqdn
self.auth_method = auth_method
self.auth_user = auth_user or host_fqdn
self.auth_pass = auth_pass
self.ua = requests.Session()
def auth_set_basic(self, username, password):
import requests.auth
self.ua.auth = requests.auth.HTTPBasicAuth(username, password)
def auth_set_kerberos(self, service="HTTP"):
import gssapi
import requests_gssapi
spnego = gssapi.Mechanism.from_sasl_name("SPNEGO")
self.ua.auth = requests_gssapi.HTTPSPNEGOAuth(target_name=service,
mech=spnego,
opportunistic_auth=True)
def upload(self, action, data):
log_debug("api: calling %r with %d items", action, len(data))
payload = {
"host": self.host_fqdn,
"action": action,
"utmp": json.dumps(data),
}
resp = self.ua.post(self.url, data=payload)
resp.raise_for_status()
log_debug("api: server returned %r", resp.text)
if resp.text.strip() == "OK":
return True
elif resp.text.startswith("KOD"):
raise RwhoShutdownRequestedError(resp.text.strip())
else:
raise RwhoUploadRejectedError(resp.text.strip())
def put_sessions(self, sessions):
return self.upload(action="put", data=[*sessions])
def remove_host(self):
return self.upload(action="destroy", data=[])
| mit | Python |
221bf7e5faf612feab30680b065a81b85f0ac634 | Upgrade config tests. | tdda/tdda,tdda/tdda,tdda/tdda,tdda/tdda | tdda/testconfig.py | tdda/testconfig.py | import os
import sys
import unittest
class TestSystemConfig(unittest.TestCase):
def test_01_tdda_path(self):
print('\ntype tdda')
with os.popen('type tdda') as f:
path = f.read()
print(path)
print('which tdda')
with os.popen('which tdda') as f:
path = f.read()
print(path)
def test_02_path(self):
path = os.environ.get('PATH')
print('PATH=%s' % path)
print('COMPONENTS:')
for p in path.split(':'):
print(p)
print()
def test_03_pythonpath(self):
print('PYTHON PATH:')
for p in sys.path:
print(p)
if __name__ == '__main__':
unittest.main()
| import os
import sys
import unittest
class TestSystemConfig(unittest.TestCase):
def test_01_tdda_path(self):
print('\ntype tdda')
os.system('type tdda')
print('which tdda')
os.system('which tdda')
def test_02_path(self):
print('$PATH')
os.system('echo $PATH')
def test_03_pythonpath(self):
print('PYTHON PATH')
for p in sys.path:
print(p)
if __name__ == '__main__':
unittest.main()
| mit | Python |
a6bdff1bad08a0395f3ed29004c0d6a064061d99 | Bump version | thombashi/sqliteschema | sqliteschema/__version__.py | sqliteschema/__version__.py | # encoding: utf-8
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.17.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016, {}".format(__author__)
__license__ = "MIT License"
__version__ = "0.16.2"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
d97947fa3f8ecb145392992a90a1c19b4567ab2c | Tag new release: 2.2.10 | Floobits/floobits-sublime,Floobits/floobits-sublime | floo/version.py | floo/version.py | PLUGIN_VERSION = '2.2.10'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.03'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| PLUGIN_VERSION = '2.2.9'
# The line above is auto-generated by tag_release.py. Do not change it manually.
try:
from .common import shared as G
assert G
except ImportError:
from common import shared as G
G.__VERSION__ = '0.03'
G.__PLUGIN_VERSION__ = PLUGIN_VERSION
| apache-2.0 | Python |
a0cc5b209dd84e58649ea49f3de25472350bd938 | Remove unnecessary print statement | sirmarcel/floq | floq/helpers.py | floq/helpers.py | import numpy as np
def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
def is_unitary(u, tolerance=1e-10):
unitary = np.eye(u.shape[0])
umat = np.mat(u)
product = umat.H * umat
return np.allclose(product, unitary, atol=tolerance)
| import numpy as np
def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
def is_unitary(u, tolerance=1e-10):
unitary = np.eye(u.shape[0])
umat = np.mat(u)
product = umat.H * umat
print product
return np.allclose(product, unitary, atol=tolerance)
| mit | Python |
22e2e4887eea8b4ade6da7c72d46f91d74d643e5 | Remove unused string import. | genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2 | wqflask/wqflask/database.py | wqflask/wqflask/database.py | # Module to initialize sqlalchemy with flask
import os
import sys
from typing import Tuple
from urllib.parse import urlparse
import importlib
import contextlib
import MySQLdb
def read_from_pyfile(pyfile, setting):
orig_sys_path = sys.path[:]
sys.path.insert(0, os.path.dirname(pyfile))
module = importlib.import_module(os.path.basename(pyfile).strip(".py"))
sys.path = orig_sys_path[:]
return module.__dict__.get(setting)
def sql_uri():
"""Read the SQL_URI from the environment or settings file."""
return os.environ.get(
"SQL_URI", read_from_pyfile(
os.environ.get(
"GN2_SETTINGS", os.path.abspath("../etc/default_settings.py")),
"SQL_URI"))
def parse_db_url(sql_uri: str) -> Tuple:
"""
Parse SQL_URI env variable from an sql URI
e.g. 'mysql://user:pass@host_name/db_name'
"""
parsed_db = urlparse(sql_uri)
return (
parsed_db.hostname, parsed_db.username, parsed_db.password,
parsed_db.path[1:], parsed_db.port)
@contextlib.contextmanager
def database_connection():
"""Provide a context manager for opening, closing, and rolling
back - if supported - a database connection. Should an error occur,
and if the table supports transactions, the connection will be
rolled back.
"""
host, user, passwd, db_name, port = parse_db_url(sql_uri())
connection = MySQLdb.connect(
db=db_name, user=user, passwd=passwd or '', host=host, port=port,
autocommit=False # Required for roll-backs
)
try:
yield connection
except Exception:
connection.rollback()
raise
else:
connection.commit()
finally:
connection.close()
| # Module to initialize sqlalchemy with flask
import os
import sys
from string import Template
from typing import Tuple
from urllib.parse import urlparse
import importlib
import contextlib
import MySQLdb
def read_from_pyfile(pyfile, setting):
orig_sys_path = sys.path[:]
sys.path.insert(0, os.path.dirname(pyfile))
module = importlib.import_module(os.path.basename(pyfile).strip(".py"))
sys.path = orig_sys_path[:]
return module.__dict__.get(setting)
def sql_uri():
"""Read the SQL_URI from the environment or settings file."""
return os.environ.get(
"SQL_URI", read_from_pyfile(
os.environ.get(
"GN2_SETTINGS", os.path.abspath("../etc/default_settings.py")),
"SQL_URI"))
def parse_db_url(sql_uri: str) -> Tuple:
"""
Parse SQL_URI env variable from an sql URI
e.g. 'mysql://user:pass@host_name/db_name'
"""
parsed_db = urlparse(sql_uri)
return (
parsed_db.hostname, parsed_db.username, parsed_db.password,
parsed_db.path[1:], parsed_db.port)
@contextlib.contextmanager
def database_connection():
"""Provide a context manager for opening, closing, and rolling
back - if supported - a database connection. Should an error occur,
and if the table supports transactions, the connection will be
rolled back.
"""
host, user, passwd, db_name, port = parse_db_url(sql_uri())
connection = MySQLdb.connect(
db=db_name, user=user, passwd=passwd or '', host=host, port=port,
autocommit=False # Required for roll-backs
)
try:
yield connection
except Exception:
connection.rollback()
raise
else:
connection.commit()
finally:
connection.close()
| agpl-3.0 | Python |
909711c30817276c8817880a06577d5821d4032c | Update driver.py | apache8080/NVIDIABot,Talos4757/NVIDIABot | tegrabot/driver.py | tegrabot/driver.py | '''
Copyright (c) 2014, Rishi Desai
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import serial
import time
import math
class Driver:
#ser = serial.Serial("/dev/ttyACM0", 9600)
#motorPorts=[]
#motor setup 2
def __init__(self):
def driveMotor(self, speed):
motorSpeed =90
if(speed > 0):
motorSpeed = speed*90
motorSpeed+=90
else:
motorSpeed = speed*90
motorSpeed +=90
return motorSpeed
def tankDrive(self, leftJoy, rightJoy):
rightSpeed = 90
leftSpeed = 90
if(leftJoy > 0.05):
leftSpeed = leftSpeed*90
leftSpeed+=90
elif(leftJoy<-0.05):
leftSpeed = leftSpeed*90
leftSpeed +=90
if(rightJoy > 0.05):
rightSpeed = rightSpeed*90
rightSpeed+=90
elif(rightJoy<-0.05):
rightSpeed = rightSpeed*90
rightSpeed +=90
speeds = [rightSpeed,leftSpeed]
return speeds
| import serial
import time
import math
class Driver:
#ser = serial.Serial("/dev/ttyACM0", 9600)
#motorPorts=[]
#motor setup 2
def __init__(self):
def driveMotor(self, speed):
motorSpeed =90
if(speed > 0):
motorSpeed = speed*90
motorSpeed+=90
else:
motorSpeed = speed*90
motorSpeed +=90
return motorSpeed
def tankDrive(self, leftJoy, rightJoy):
rightSpeed = 90
leftSpeed = 90
if(leftJoy > 0.05):
leftSpeed = leftSpeed*90
leftSpeed+=90
elif(leftJoy<-0.05):
leftSpeed = leftSpeed*90
leftSpeed +=90
if(rightJoy > 0.05):
rightSpeed = rightSpeed*90
rightSpeed+=90
elif(rightJoy<-0.05):
rightSpeed = rightSpeed*90
rightSpeed +=90
speeds = [rightSpeed,leftSpeed]
return speeds
| bsd-2-clause | Python |
5ca5395bce09cdbba40550efdcace2d3faff3c65 | Check for k and R | poliastro/poliastro | tests/test_bodies.py | tests/test_bodies.py | import pytest
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Body, Earth, Jupiter, Sun
def test_body_has_k_given_in_constructor():
k = 3.98e5 * u.km ** 3 / u.s ** 2
earth = Body(None, k, "")
assert earth.k == k
def test_body_from_parameters_raises_valueerror_if_k_units_not_correct():
wrong_k = 4902.8 * u.kg
_name = _symbol = ""
_R = 0
with pytest.raises(u.UnitsError) as excinfo:
Body.from_parameters(None, wrong_k, _name, _symbol, _R)
assert (
"UnitsError: Argument 'k' to function 'from_parameters' must be in units convertible to 'km3 / s2'."
in excinfo.exconly()
)
def test_body_from_parameters_returns_body_object():
k = 1.26712763e17 * u.m ** 3 / u.s ** 2
R = 71492000 * u.m
_name = _symbol = "jupiter"
jupiter = Body.from_parameters(Sun, k, _name, _symbol, Jupiter.R)
assert jupiter.k == k
assert jupiter.R == R
def test_body_printing_has_name_and_symbol():
name = "2 Pallas"
symbol = u"\u26b4"
k = 1.41e10 * u.m ** 3 / u.s ** 2
pallas2 = Body(None, k, name, symbol)
assert name in str(pallas2)
assert symbol in str(pallas2)
def test_earth_has_k_given_in_literature():
expected_k = 3.986004418e14 * u.m ** 3 / u.s ** 2
k = Earth.k
assert_quantity_allclose(k.decompose([u.km, u.s]), expected_k)
def test_earth_has_angular_velocity_given_in_literature():
expected_k = 7.292114e-5 * u.rad / u.s
k = Earth.angular_velocity
assert_quantity_allclose(k.decompose([u.rad, u.s]), expected_k)
def test_from_relative():
TRAPPIST1 = Body.from_relative(
reference=Sun,
parent=None,
k=0.08, # Relative to the Sun
name="TRAPPIST",
symbol=None,
R=0.114,
) # Relative to the Sun
# Check values properly calculated
VALUECHECK = Body.from_relative(
reference=Earth,
parent=TRAPPIST1,
k=1,
name="VALUECHECK",
symbol=None,
R=1,
)
assert Earth.k == VALUECHECK.k
assert Earth.R == VALUECHECK.R
| import pytest
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Body, Earth, Jupiter, Sun
def test_body_has_k_given_in_constructor():
k = 3.98e5 * u.km ** 3 / u.s ** 2
earth = Body(None, k, "")
assert earth.k == k
def test_body_from_parameters_raises_valueerror_if_k_units_not_correct():
wrong_k = 4902.8 * u.kg
_name = _symbol = ""
_R = 0
with pytest.raises(u.UnitsError) as excinfo:
Body.from_parameters(None, wrong_k, _name, _symbol, _R)
assert (
"UnitsError: Argument 'k' to function 'from_parameters' must be in units convertible to 'km3 / s2'."
in excinfo.exconly()
)
def test_body_from_parameters_returns_body_object():
k = 3.98e5 * u.km ** 3 / u.s ** 2
_name = _symbol = "jupiter"
jupiter = Body.from_parameters(Sun, k, _name, _symbol, Jupiter.R)
assert type(jupiter) == Body
def test_body_printing_has_name_and_symbol():
name = "2 Pallas"
symbol = u"\u26b4"
k = 1.41e10 * u.m ** 3 / u.s ** 2
pallas2 = Body(None, k, name, symbol)
assert name in str(pallas2)
assert symbol in str(pallas2)
def test_earth_has_k_given_in_literature():
expected_k = 3.986004418e14 * u.m ** 3 / u.s ** 2
k = Earth.k
assert_quantity_allclose(k.decompose([u.km, u.s]), expected_k)
def test_earth_has_angular_velocity_given_in_literature():
expected_k = 7.292114e-5 * u.rad / u.s
k = Earth.angular_velocity
assert_quantity_allclose(k.decompose([u.rad, u.s]), expected_k)
def test_from_relative():
TRAPPIST1 = Body.from_relative(
reference=Sun,
parent=None,
k=0.08, # Relative to the Sun
name="TRAPPIST",
symbol=None,
R=0.114,
) # Relative to the Sun
# Check values properly calculated
VALUECHECK = Body.from_relative(
reference=Earth,
parent=TRAPPIST1,
k=1,
name="VALUECHECK",
symbol=None,
R=1,
)
assert Earth.k == VALUECHECK.k
assert Earth.R == VALUECHECK.R
| mit | Python |
ca5151ee58f84715850cd90b5c0a431bcc109452 | add not implement error for kugou | smart-techs/you-get,linhua55/you-get,zmwangx/you-get,forin-xyz/you-get,flwh/you-get,candlewill/you-get,CzBiX/you-get,pastebt/you-get,cnbeining/you-get,FelixYin66/you-get,specter4mjy/you-get,runningwolf666/you-get,qzane/you-get,fffonion/you-get,tigerface/you-get,lilydjwg/you-get,shanyimin/you-get,rain1988/you-get,dream1986/you-get,j4s0nh4ck/you-get,xyuanmu/you-get,cnbeining/you-get,chares-zhang/you-get,pitatensai/you-get,lilydjwg/you-get,power12317/you-get,xyuanmu/you-get,Red54/you-get,jindaxia/you-get,zmwangx/you-get,linhua55/you-get,XiWenRen/you-get,qzane/you-get,smart-techs/you-get | src/you_get/extractor/kugou.py | src/you_get/extractor/kugou.py | #!/usr/bin/env python
__all__ = ['kugou_download']
from ..common import *
from json import loads
from base64 import b64decode
def kugou_download(url, output_dir=".", merge=True, info_only=False):
if url.lower().find("5sing")!=-1:
#for 5sing.kugou.com
html=get_html(url)
ticket=r1(r'"ticket":\s*"(.*)"',html)
j=loads(str(b64decode(ticket),encoding="utf-8"))
url=j[u'file']
title=j[u'songName']
songtype, ext, size = url_info(url)
print_info(site_info, title, songtype, size)
if not info_only:
download_urls([url], title, ext, size, output_dir)
else:
#for the www.kugou.com/
raise NotImplementedError(url)
site_info = "kugou.com"
download = kugou_download
download_playlist = playlist_not_supported("kugou")
| #!/usr/bin/env python
__all__ = ['kugou_download']
from ..common import *
from json import loads
from base64 import b64decode
def kugou_download(url, output_dir=".", merge=True, info_only=False):
if url.lower().find("5sing")!=-1:
html=get_html(url)
ticket=r1(r'"ticket":\s*"(.*)"',html)
j=loads(str(b64decode(ticket),encoding="utf-8"))
url=j[u'file']
title=j[u'songName']
songtype, ext, size = url_info(url)
print_info(site_info, title, songtype, size)
if not info_only:
download_urls([url], title, ext, size, output_dir)
# html = get_html(url)
# title = r1(r'var SongName = "(.*)";', html)
# url = r1(r'file: "(\S*)"', html)
site_info = "kugou.com"
download = kugou_download
download_playlist = playlist_not_supported("kugou")
| mit | Python |
89e3d4ae0a7be5baef6354324b2e7f8623564c94 | Use 'array' rather than 'real' for data array name in olfactory stimulus generation script. | cerrno/neurokernel | examples/olfaction/data/gen_olf_input.py | examples/olfaction/data/gen_olf_input.py | #!/usr/bin/env python
"""
Generate sample olfactory model stimulus.
"""
import numpy as np
import h5py
osn_num = 1375
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data points in time
t = np.arange(0, dt*Nt, dt)
I = 0.5195 # amplitude of odorant concentration
u_on = I*np.ones(Ot, dtype=np.float64)
u_off = np.zeros(Ot, dtype=np.float64)
u_reset = np.zeros(Rt, dtype=np.float64)
u = np.concatenate((u_off, u_reset, u_on, u_reset, u_off, u_reset, u_on))
u_all = np.transpose(np.kron(np.ones((osn_num, 1)), u))
with h5py.File('olfactory_input.h5', 'w') as f:
f.create_dataset('array', (Nt, osn_num),
dtype=np.float64,
data=u_all)
| #!/usr/bin/env python
"""
Generate sample olfactory model stimulus.
"""
import numpy as np
import h5py
osn_num = 1375
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data points in time
t = np.arange(0, dt*Nt, dt)
I = 0.5195 # amplitude of odorant concentration
u_on = I*np.ones(Ot, dtype=np.float64)
u_off = np.zeros(Ot, dtype=np.float64)
u_reset = np.zeros(Rt, dtype=np.float64)
u = np.concatenate((u_off, u_reset, u_on, u_reset, u_off, u_reset, u_on))
u_all = np.transpose(np.kron(np.ones((osn_num, 1)), u))
with h5py.File('olfactory_input.h5', 'w') as f:
f.create_dataset('real', (Nt, osn_num),
dtype=np.float64,
data=u_all)
| bsd-3-clause | Python |
248588d141f350d6b00ef24ed1f99bf1e460b25c | add new test | lapisdecor/bzoinq | tests/test_bzoinq.py | tests/test_bzoinq.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_bzoinq
----------------------------------
Tests for `bzoinq` module.
"""
import pytest
from bzoinq import bzoinq
# @pytest.fixture
# def response():
# """Sample pytest fixture.
# See more at: http://doc.pytest.org/en/latest/fixture.html
# """
# # import requests
# # return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
#
#
# def test_content(response):
# """Sample pytest test function with the pytest fixture as an argument.
# """
# # from bs4 import BeautifulSoup
# # assert 'GitHub' in BeautifulSoup(response.content).title.string
def test_to_datetime():
import datetime
mytime = "2017-10-1 10:20:00"
assert bzoinq.to_datetime(mytime) == datetime.datetime(2017, 10, 1, 10, 20, 0)
def test_sound_and_task():
a = bzoinq.Bzoinq()
a.create_task()
# test that the first id is 1
assert a.task_id == 1
def test_monitor():
import time
a = bzoinq.Bzoinq()
a.create_task("First task")
b = bzoinq.Monitor(a)
b.start()
time.sleep(5)
b.stop()
def test_two_tasks():
import datetime
import time
current_time = datetime.datetime.now()
time_in_10 = current_time + datetime.timedelta(seconds=10)
time_in_5 = current_time + datetime.timedelta(seconds=5)
a = bzoinq.Bzoinq()
a.create_task("10 seconds task", time_in_10)
a.create_task("5 seconds task", time_in_5)
b = bzoinq.Monitor(a)
b.start()
time.sleep(15)
b.stop()
def test_monitor_again():
import time
a = bzoinq.Bzoinq()
b = bzoinq.Monitor(a)
b.start()
a.create_task("Task to test the Monitor")
time.sleep(3)
a.create_task("Second task to test the Monitor")
time.sleep(3)
b.stop()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_bzoinq
----------------------------------
Tests for `bzoinq` module.
"""
import pytest
from bzoinq import bzoinq
# @pytest.fixture
# def response():
# """Sample pytest fixture.
# See more at: http://doc.pytest.org/en/latest/fixture.html
# """
# # import requests
# # return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
#
#
# def test_content(response):
# """Sample pytest test function with the pytest fixture as an argument.
# """
# # from bs4 import BeautifulSoup
# # assert 'GitHub' in BeautifulSoup(response.content).title.string
def test_to_datetime():
import datetime
mytime = "2017-10-1 10:20:00"
assert bzoinq.to_datetime(mytime) == datetime.datetime(2017, 10, 1, 10, 20, 0)
def test_sound_and_task():
a = bzoinq.Bzoinq()
a.create_task()
# test that the first id is 1
assert a.task_id == 1
def test_monitor():
import time
a = bzoinq.Bzoinq()
a.create_task("First task")
b = bzoinq.Monitor(a)
b.start()
time.sleep(5)
b.stop()
def test_two_tasks():
import datetime
import time
current_time = datetime.datetime.now()
time_in_10 = current_time + datetime.timedelta(seconds=10)
time_in_5 = current_time + datetime.timedelta(seconds=5)
a = bzoinq.Bzoinq()
a.create_task("10 seconds task", time_in_10)
a.create_task("5 seconds task", time_in_5)
b = bzoinq.Monitor(a)
b.start()
time.sleep(15)
b.stop()
def test_monitor_again():
import time
a = bzoinq.Bzoinq()
b = bzoinq.Monitor(a)
b.start()
a.create_task("Task to test the Monitor")
time.sleep(3)
a.create_task("Second task to test the Monitor")
b.stop()
| mit | Python |
15c039dcf43cc3029ab8524edb17f2c619974757 | Test case of getting core members updated to include Bob Tolbert (either he was added recently to core or I had missed it before some how but test was passing :-/ ) | hylang/hygdrop,hylang/hygdrop | tests/test_github.py | tests/test_github.py | #!/usr/bin/env python
import os
plugin_to_load = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"../plugins/github.hy")
from hy.importer import import_file_to_module
g = import_file_to_module("github", plugin_to_load)
def test_get_github_issue():
expected = " ".join(
["Issue #" + "180", "on", "hylang/hy", "by",
"khinsen:",
"Macro expansion works differently from Lisp conventions",
"(open)",
"<https://github.com/hylang/hy/issues/180>"])
actual = g.get_github_issue(None, None, "180", dry_run=True)
assert expected == actual
def test_get_github_commit():
expected = " ".join(
["Commit", "3e8941c", "on", "hylang/hy", "by",
"Berker Peksag:",
"Convert stdout and stderr to UTF-8 properly in the run_cmd helper.",
"<https://github.com/hylang/hy" +
"/commit/3e8941cdde01635890db524c4789f0640fe665c3>"])
actual = g.get_github_commit(None, None, "3e8941c", dry_run=True)
assert expected == actual
def test_get_core_members():
expected = "Core Team consists of: " + \
", ".join(["Julien Danjou", "Nicolas Dandrimont",
"Gergely Nagy", "Berker Peksag",
"Christopher Allan Webber", "khinsen",
"J Kenneth King", "Paul Tagliamonte",
"Bob Tolbert", "Will Kahn-Greene",
"Morten Linderud", "Abhishek L"])
actual = g.get_core_members(None, None, dry_run=True)
assert actual == expected
| #!/usr/bin/env python
import os
plugin_to_load = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"../plugins/github.hy")
from hy.importer import import_file_to_module
g = import_file_to_module("github", plugin_to_load)
def test_get_github_issue():
expected = " ".join(
["Issue #" + "180", "on", "hylang/hy", "by",
"khinsen:",
"Macro expansion works differently from Lisp conventions",
"(open)",
"<https://github.com/hylang/hy/issues/180>"])
actual = g.get_github_issue(None, None, "180", dry_run=True)
assert expected == actual
def test_get_github_commit():
expected = " ".join(
["Commit", "3e8941c", "on", "hylang/hy", "by",
"Berker Peksag:",
"Convert stdout and stderr to UTF-8 properly in the run_cmd helper.",
"<https://github.com/hylang/hy" +
"/commit/3e8941cdde01635890db524c4789f0640fe665c3>"])
actual = g.get_github_commit(None, None, "3e8941c", dry_run=True)
assert expected == actual
def test_get_core_members():
expected = "Core Team consists of: " + \
", ".join(["Julien Danjou", "Nicolas Dandrimont",
"Gergely Nagy", "Berker Peksag",
"Christopher Allan Webber", "khinsen",
"J Kenneth King", "Paul Tagliamonte",
"Will Kahn-Greene", "Morten Linderud",
"Abhishek L"])
actual = g.get_core_members(None, None, dry_run=True)
assert actual == expected
| mit | Python |
765a6b8e0ea87a5472bd5be393c003739d4fb466 | add argument parser | desihub/desiutil,desihub/desiutil | py/desiutil/census.py | py/desiutil/census.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
===============
desiutil.census
===============
Determine the number of files and size in DESI data file systems.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
def get_options(test_args=None):
"""Parse command-line options.
Parameters
----------
test_args : :class:`list`
Override command-line arguments for testing purposes.
Returns
-------
:class:`argparse.Namespace`
A simple object containing the parsed options.
"""
from sys import argv
from os.path import basename
from argparse import ArgumentParser
parser = ArgumentParser(description="Count number and size of DESI data files.",
prog=basename(argv[0]))
parser.add_argument('-v', '--verbose', action='store_true',
help="Print lots of extra information.")
if test_args is None: # pragma: no cover
options = parser.parse_args()
else:
options = parser.parse_args(test_args)
return options
def main():
"""Entry point for the :command:`desi_data_census` script.
Returns
-------
:class:`int`
Exit status that will be passed to :func:`sys.exit`.
"""
from .log import get_logger, DEBUG
options = get_options()
if options.verbose:
log = get_logger(DEBUG)
log.debug("Verbose logging is set.")
else:
log = get_logger()
return 0
# * Directories to check:
# - Imaging raw & reduced.
# - spectro raw & reduced.
# - work directories.
# - non-footprint image data.
# * Check group id, readability.
# * Record mtime, size.
# * Shift to fiscal year.
# * Don't record filenames, just high-level directories.
# * Treat projecta as same system, follow symlinks to projecta
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
"""
===============
desiutil.census
===============
Determine the number of files and size in DESI data file systems.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
def main():
"""Entry point for the :command:`desi_data_census` script.
Returns
-------
:class:`int`
Exit status that will be passed to :func:`sys.exit`.
"""
return 0
| bsd-3-clause | Python |
87753b0bff057b61879e2dbcc4dceba7aec95451 | Disable pool generation by default | bliksemlabs/bliksemintegration,bliksemlabs/bliksemintegration | settings/const.py | settings/const.py | import urllib2
#Database settings
database_connect = "dbname='ridprod'"
kv1_database_connect = "dbname='kv1tmp'"
iff_database_connect = "dbname='ifftmp'"
pool_generation_enabled = False
#NDOVLoket settings
ndovloket_url = "data.ndovloket.nl"
ndovloket_user = "voorwaarden"
ndovloket_password = "geaccepteerd"
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(realm=ndovloket_url,
uri=ndovloket_url,
user=ndovloket_user,
passwd=ndovloket_password)
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
| import urllib2
#Database settings
database_connect = "dbname='ridprod'"
kv1_database_connect = "dbname='kv1tmp'"
iff_database_connect = "dbname='ifftmp'"
pool_generation_enabled = True
#NDOVLoket settings
ndovloket_url = "data.ndovloket.nl"
ndovloket_user = "voorwaarden"
ndovloket_password = "geaccepteerd"
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(realm=ndovloket_url,
uri=ndovloket_url,
user=ndovloket_user,
passwd=ndovloket_password)
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
| bsd-2-clause | Python |
26154817691dd5da92d94894e12bb1422c26e038 | Change cli timeout arg to resemble previous interface | sirMackk/py3tftp,sirMackk/py3tftp | py3tftp/cli_parser.py | py3tftp/cli_parser.py | import argparse
import logging
from sys import exit
from py3tftp import __version__
EPILOG = """
Released under the MIT license.
Copyright 2016 Matt O. <matt@mattscodecave.com>
"""
logging_config = {
'format': '%(asctime)s [%(levelname)s] %(message)s',
'level': logging.INFO,
'filename': None
}
def print_version():
print("py3tftp version: {}".format(__version__))
def parse_cli_arguments():
parser = argparse.ArgumentParser(epilog=EPILOG)
parser.add_argument('--host',
default='0.0.0.0',
help=('IP of the interface the server will listen on. '
'Default: 0.0.0.0'))
parser.add_argument(
'-p',
'--port',
default=9069,
type=int,
help=(
'Port the server will listen on. '
'Default: 9069. TFTP standard-compliant port: 69 - '
'requires superuser privileges.'))
parser.add_argument(
'--ack-timeout',
default=0.5,
type=float,
help='Timeout for each ACK of the lock-step. Default: 0.5.')
parser.add_argument(
'--conn-timeout',
dest="timeout",
default=3.0,
type=float,
help=(
'Timeout before the server gives up on a transfer and closes '
'the connection. Default: 3.'))
parser.add_argument('-l', '--file-log', help='Append output to log file.')
parser.add_argument('-v',
'--verbose',
action='store_true',
help='Enable debug-level logging.')
parser.add_argument('--version', action='store_true')
args = parser.parse_args()
if args.verbose:
logging_config['level'] = logging.DEBUG
if args.file_log:
logging_config['filename'] = args.log
if args.version:
print_version()
exit()
logging.basicConfig(**logging_config)
return args
| import argparse
import logging
from sys import exit
from py3tftp import __version__
EPILOG = """
Released under the MIT license.
Copyright 2016 Matt O. <matt@mattscodecave.com>
"""
logging_config = {
'format': '%(asctime)s [%(levelname)s] %(message)s',
'level': logging.INFO,
'filename': None
}
def print_version():
print("py3tftp version: {}".format(__version__))
def parse_cli_arguments():
parser = argparse.ArgumentParser(epilog=EPILOG)
parser.add_argument('--host',
default='0.0.0.0',
help=('IP of the interface the server will listen on. '
'Default: 0.0.0.0'))
parser.add_argument(
'-p',
'--port',
default=9069,
type=int,
help=(
'Port the server will listen on. '
'Default: 9069. TFTP standard-compliant port: 69 - '
'requires superuser privileges.'))
parser.add_argument(
'--ack-timeout',
default=0.5,
type=float,
help='Timeout for each ACK of the lock-step. Default: 0.5.')
parser.add_argument(
'--timeout',
default=3.0,
type=float,
help=(
'Timeout before the server gives up on a transfer and closes '
'the connection. Default: 3.'))
parser.add_argument('-l', '--file-log', help='Append output to log file.')
parser.add_argument('-v',
'--verbose',
action='store_true',
help='Enable debug-level logging.')
parser.add_argument('--version', action='store_true')
args = parser.parse_args()
if args.verbose:
logging_config['level'] = logging.DEBUG
if args.file_log:
logging_config['filename'] = args.log
if args.version:
print_version()
exit()
logging.basicConfig(**logging_config)
return args
| mit | Python |
2f0a3071b235ee64f9ebef0b2deba5c5cd30479e | change debug level | guillermooo-forks/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo/dart-sublime-bundle,guillermooo-forks/dart-sublime-bundle | sublime_plugin_lib/__init__.py | sublime_plugin_lib/__init__.py | # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
import logging
import os
import sublime
class PluginLogger(object):
"""A logger intented to be used from plugin files inside this package.
"""
def __init__(self, name):
logger = logging.getLogger(name)
logger.setLevel('DEBUG')
self.logger = logger
def debug(self, msg, *args, **kwargs):
self.logger.debug(msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
self.logger.info(msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
self.logger.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
self.logger.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
self.logger.critical(msg, *args, **kwargs)
| # Copyright (c) 2014, Guillermo López-Anglada. Please see the AUTHORS file for details.
# All rights reserved. Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.)
import logging
import os
import sublime
class PluginLogger(object):
"""A logger intented to be used from plugin files inside this package.
"""
def __init__(self, name):
logger = logging.getLogger(name)
logger.setLevel('ERROR')
self.logger = logger
def debug(self, msg, *args, **kwargs):
self.logger.debug(msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
self.logger.info(msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
self.logger.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
self.logger.error(msg, *args, **kwargs)
def critical(self, msg, *args, **kwargs):
self.logger.critical(msg, *args, **kwargs)
| bsd-3-clause | Python |
2c8b160e4b4e4d9f22bbb96bb5c74565595aabd1 | Adjust length of docstring underline | pyfarm/pyfarm-master,pyfarm/pyfarm-master,pyfarm/pyfarm-master | pyfarm/models/disk.py | pyfarm/models/disk.py | # No shebang line, this module is meant to be imported
#
# Copyright 2015 Ambient Entertainment GmbH & Co. KG
# Copyright 2015 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Disk Model
==========
Model describing a given disk, with size and free space.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.types import IDTypeAgent
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import (
TABLE_AGENT_DISK, MAX_MOUNTPOINT_LENGTH, TABLE_AGENT)
class AgentDisk(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_AGENT_DISK
id = id_column(db.Integer)
agent_id = db.Column(
IDTypeAgent,
db.ForeignKey("%s.id" % TABLE_AGENT),
nullable=False)
mountpoint = db.Column(
db.String(MAX_MOUNTPOINT_LENGTH),
nullable=False,
doc="The mountpoint of this disk on the agent "
"(Drive letter for Windows agents)")
size = db.Column(
db.BigInteger,
nullable=False,
doc="The total capacity of this disk in bytes")
free = db.Column(
db.BigInteger,
nullable=False,
doc="Available space on the disk in bytes.")
| # No shebang line, this module is meant to be imported
#
# Copyright 2015 Ambient Entertainment GmbH & Co. KG
# Copyright 2015 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Disk Model
===========
Model describing a given disk, with size and free space.
"""
from sqlalchemy.schema import UniqueConstraint
from pyfarm.master.application import db
from pyfarm.models.core.types import IDTypeAgent
from pyfarm.models.core.mixins import ReprMixin, UtilityMixins
from pyfarm.models.core.types import id_column
from pyfarm.models.core.cfg import (
TABLE_AGENT_DISK, MAX_MOUNTPOINT_LENGTH, TABLE_AGENT)
class AgentDisk(db.Model, UtilityMixins, ReprMixin):
__tablename__ = TABLE_AGENT_DISK
id = id_column(db.Integer)
agent_id = db.Column(
IDTypeAgent,
db.ForeignKey("%s.id" % TABLE_AGENT),
nullable=False)
mountpoint = db.Column(
db.String(MAX_MOUNTPOINT_LENGTH),
nullable=False,
doc="The mountpoint of this disk on the agent "
"(Drive letter for Windows agents)")
size = db.Column(
db.BigInteger,
nullable=False,
doc="The total capacity of this disk in bytes")
free = db.Column(
db.BigInteger,
nullable=False,
doc="Available space on the disk in bytes.")
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.