commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
927776d61abe9aae55689ffd7bb6ea12114b31fd | add main package file | thomashuang/angel,whiteclover/angel | angel/__init__.py | angel/__init__.py | from angel.html import HTML
from angel.css import CSS
from angel.dom import DOM
__version__ = '0.1.0'
VERSION = tuple(map(int, __version__.split('.')))
__all__ = ['HTML', 'CSS', 'DOM']
| mit | Python | |
f9cc8e04255d07c82d1a96682eebf044a6cdb977 | Fix conflicting migrations (#101) | UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator | projects/migrations/0003_auto_20160928_2137.py | projects/migrations/0003_auto_20160928_2137.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0002_auto_20151208_1553'),
]
operations = [
migrations.AlterField(
model_name='project',
name='status',
field=models.CharField(max_length=1, choices=[('p', 'proposition'), ('i', 'in progress'), ('f', 'finished'), ('a', 'ants are gone')], verbose_name='État'),
),
]
| agpl-3.0 | Python | |
37704a2e905342bf867225fb5a8a3fec0c55a9fd | Add strDiff problem and tests. Minor tweak to setup.py | HKuz/Test_Code | Problems/stringDiff.py | Problems/stringDiff.py | #!/Applications/anaconda/envs/Python3/bin
def main():
# Test suite
tests = [
[None, None, None], # Should throw a TypeError
['abcd', 'abcde', 'e'],
['aaabbcdd', 'abdbacade', 'e'],
['abdbacade', 'aaabbcdd', 'e']
]
for item in tests:
try:
temp_result = find_diff(item[0], item[1])
if temp_result[0] == item[2]:
print('PASSED: find_diff({}, {}) returned {}'.format(item[0], item[1], temp_result))
else:
print('FAILED: find_diff({}, {}) returned {}, should have returned {}'.format(item[0], item[1], temp_result, item[2]))
except TypeError:
print('PASSED TypeError test')
return 0
return
def find_diff(str1, str2):
'''
Finds the one additional character in str 2 vs. str1
Input: two strings
Output: char (one additional character in str2)
Assumes str2 contains all characters from str1, with one additional one
'''
if str1 is None or str2 is None:
raise TypeError
shorter = str1 if len(str1) < len(str2) else str2
longer = str1 if len(str1) >= len(str2) else str2
result = set(longer) - set(shorter)
return result.pop()
if __name__ == '__main__':
main()
| mit | Python | |
dd751e5477629adddedcba2734c33c9839506257 | Create mailer file | KpaBap/streetview_datechange_emailer | street_view_mailer.py | street_view_mailer.py | try:
from selenium import webdriver
except:
raise RuntimeError("Install Selenium for Python 3.x please")
import re
import smtplib
import mimetypes
from email.mime.multipart import MIMEMultipart
from email import encoders
from email.mime.image import MIMEImage
from email.mime.base import MIMEBase
"""
Author: Iavor Todorov (kpabap@gmail.com)
Requires: Python 3.x, Selenium, and Chrome WebDriver
Install selenium with - pip install selenium
Install: https://sites.google.com/a/chromium.org/chromedriver/ to /usr/local/bin or however your OS does it
"""
def send_email_with_attachment(email_from, email_to, subject, smtp_ip_port, smtp_username, smtp_password, attachment_filename):
"""This function sends an email with an attachment using the provided credentials and filename """
email = MIMEMultipart()
email["From"] = email_from
email["To"] = email_to
email['Subject'] = subject
email.preamble = email['Subject']
content_type, encoding = mimetypes.guess_type(attachment_filename)
if content_type is None or encoding is not None:
content_type = "application/octet-stream"
main_type, sub_type = content_type.split("/", 1)
if main_type == "image":
file_pointer = open(attachment_filename, "rb")
attachment = MIMEImage(file_pointer.read(), _subtype=sub_type)
file_pointer.close()
else:
file_pointer = open(attachment_filename, "rb")
attachment = MIMEBase(main_type, sub_type)
attachment.set_payload(file_pointer.read())
file_pointer.close()
encoders.encode_base64(attachment)
attachment.add_header("Content-Disposition", "attachment", filename=attachment_filename)
email.attach(attachment)
server = smtplib.SMTP(smtp_ip_port)
server.starttls()
server.login(smtp_username, smtp_password)
server.sendmail(email_from, email_to, email.as_string())
server.quit()
def load_page_and_save_screenshot_on_match(url, search_string, screenshot_filename):
"""This function loads a URL in Chrome Webdriver and tries to find the label "Image Capture" to determine when a particular
StreetView picture was taken - if a match is found, a screenshot file is saved"""
try:
browser = webdriver.Chrome()
except:
raise RuntimeError("Probably need to install Chromedriver")
browser.implicitly_wait(10) # seconds
browser.maximize_window()
browser.get(url)
image_capture_tag = browser.find_elements_by_xpath("//*[contains(text(), 'Image capture')]")[0]
image_capture_date = image_capture_tag.text
if re.match(".*{}".format(search_string), image_capture_date):
browser.save_screenshot(screenshot_filename)
print ("Search string was found. Saved screenshot file: {}".format(screenshot_filename))
browser.close()
return True
else:
browser.close()
return False
if __name__ == "__main__":
# Set these parameters for the page to load and the string to try and look for
screenshot_filename = "screenshot.png"
url = "https://www.google.com/maps/place/Golden+Gate+Bridge/@37.819352,-122.4783739,3a,60y,90t/data=!3m6!1e1!3m4!1sanDJxWsNq6y0PtH6JzfuZw!2e0!7i13312!8i6656!4m5!3m4!1s0x808586deffffffc3:0xcded139783705509!8m2!3d37.8199286!4d-122.4782551!6m1!1e1"
search_string = "2016"
# These are required to send emails
smtp_ip_port = "smtp.gmail.com:587"
smtp_username = "username@gmail.com "
smtp_password = "abcedfgh123"
email_from = smtp_username
email_to = "email@wherever.com"
subject = "Google StreetView image updated to {}!".format(search_string)
if load_page_and_save_screenshot_on_match(url, search_string, screenshot_filename):
send_email_with_attachment(email_from, email_to, subject, smtp_ip_port,smtp_username,smtp_password,screenshot_filename)
print ("Email sent.")
else:
print ("Sorry, the image does not seem to be updated yet.")
| mpl-2.0 | Python | |
ecd834d482ef001989d4ce60042e4d018a5ac569 | Add two test cases to exercise many expression commands: o test_many_expr_commands() o test_expr_commands_can_handle_quotes() | llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb | test/expression_command/test/TestExprs.py | test/expression_command/test/TestExprs.py | """
Test many basic expression commands.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
class BasicExprCommandsTestCase(TestBase):
mydir = os.path.join("expression_command", "test")
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.c.
self.line = line_number('main.cpp',
'// Please test many expressions while stopped at this line:')
def test_many_expr_commands(self):
"""These basic expression commands should work as expected."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
self.expect("breakpoint set -f main.cpp -l %d" % self.line,
BREAKPOINT_CREATED,
startstr = "Breakpoint created: 1: file ='main.cpp', line = %d" %
self.line)
self.runCmd("run", RUN_SUCCEEDED)
self.expect("expression 2",
patterns = ["\(int\) \$.* = 2"])
# (int) $0 = 1
self.expect("expression 2ull",
patterns = ["\(unsigned long long\) \$.* = 2"])
# (unsigned long long) $1 = 2
self.expect("expression 2.234f",
patterns = ["\(float\) \$.* = 2\.234"])
# (float) $2 = 2.234
self.expect("expression 2.234",
patterns = ["\(double\) \$.* = 2\.234"])
# (double) $3 = 2.234
self.expect("expression 2+3",
patterns = ["\(int\) \$.* = 5"])
# (int) $4 = 5
self.expect("expression argc",
patterns = ["\(int\) \$.* = 1"])
# (int) $5 = 1
self.expect("expression argc + 22",
patterns = ["\(int\) \$.* = 23"])
# (int) $6 = 23
self.expect("expression argv",
patterns = ["\(const char \*\*\) \$.* = 0x"])
# (const char *) $7 = ...
self.expect("expression argv[0]",
substrs = ["(const char *)",
os.path.join(self.mydir, "a.out")])
# (const char *) $8 = 0x... "/Volumes/data/lldb/svn/trunk/test/expression_command/test/a.out"
@unittest2.expectedFailure
# rdar://problem/8686536
# CommandInterpreter::HandleCommand is stripping \'s from input for WantsRawCommand commands
def test_expr_commands_can_handle_quotes(self):
"""Throw some expression commands with quotes at lldb."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
self.expect("breakpoint set -f main.cpp -l %d" % self.line,
BREAKPOINT_CREATED,
startstr = "Breakpoint created: 1: file ='main.cpp', line = %d" %
self.line)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("expression 'a'")
self.runCmd('expression printf("\t\x68\n")')
self.runCmd('expression printf("\"\n")')
self.runCmd('expression printf("\'\n")')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| apache-2.0 | Python | |
856a47b3b570da26a5cdf02b0e3dc0c2b1980307 | Add PLL routing fuzzer | gatecat/prjoxide,gatecat/prjoxide,gatecat/prjoxide | fuzzers/LIFCL/120-pll-routing/fuzzer.py | fuzzers/LIFCL/120-pll-routing/fuzzer.py | from fuzzconfig import FuzzConfig
from interconnect import fuzz_interconnect
import re
configs = [
{
"cfg": FuzzConfig(job="PLLULC", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R0C1:GPLL_ULC"]),
"rc": (1, 1),
},
{
"cfg": FuzzConfig(job="PLLLLC", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R55C0:GPLL_LLC"]),
"rc": (55, 1),
},
{
"cfg": FuzzConfig(job="PLLLRC", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R53C87:GPLL_LRC"]),
"rc": (53, 86),
},
]
ignore_tiles = set([
"CIB_R50C86:CIB_LR"
"CIB_R51C86:CIB_LR",
"CIB_R52C86:CIB_LR",
"CIB_R53C86:CIB_LR",
"CIB_R54C86:CIB_LR",
"CIB_R55C86:CIB",
"CIB_R55C2:CIB",
"CIB_R55C1:CIB",
"CIB_R54C1:CIB_LR",
"CIB_R53C1:CIB_LR",
"CIB_R52C1:CIB_LR",
"CIB_R2C1:CIB_LR",
"CIB_R1C1:CIB_T",
"CIB_R1C2:CIB_T",
"CIB_R1C3:CIB_T"
])
def main():
for config in configs:
cfg = config["cfg"]
cfg.setup()
r, c = config["rc"]
nodes = ["R{}C{}_*".format(r, c)]
def nodename_filter(x, nodes):
return ("R{}C{}_".format(r, c) in x) and ("PLL_CORE" in x or "REFMUX_CORE" in x)
fuzz_interconnect(config=cfg, nodenames=nodes, nodename_predicate=nodename_filter, regex=True, bidir=True, ignore_tiles=ignore_tiles)
if __name__ == "__main__":
main()
| isc | Python | |
e120f5fac68e2daf7cdf6e9d7b17b1f63a330595 | Fix ExpressionNode names that changed in django 1.5 | django-nonrel/djangoappengine,Implisit/djangoappengine,dwdraju/djangoappengine | djangoappengine/db/expressions.py | djangoappengine/db/expressions.py | from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.expressions import ExpressionNode
OPERATION_MAP = {
ExpressionNode.ADD: lambda x, y: x + y,
ExpressionNode.SUB: lambda x, y: x - y,
ExpressionNode.MUL: lambda x, y: x * y,
ExpressionNode.DIV: lambda x, y: x / y,
ExpressionNode.MOD: lambda x, y: x % y,
ExpressionNode.BITAND: lambda x, y: x & y,
ExpressionNode.BITOR: lambda x, y: x | y,
}
class ExpressionEvaluator(SQLEvaluator):
def __init__(self, expression, query, entity, allow_joins=True):
super(ExpressionEvaluator, self).__init__(expression, query,
allow_joins)
self.entity = entity
##################################################
# Vistor methods for final expression evaluation #
##################################################
def evaluate_node(self, node, qn, connection):
values = []
for child in node.children:
if hasattr(child, 'evaluate'):
value = child.evaluate(self, qn, connection)
else:
value = child
if value is not None:
values.append(value)
return OPERATION_MAP[node.connector](*values)
def evaluate_leaf(self, node, qn, connection):
return self.entity[qn(self.cols[node][1])]
| from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.expressions import ExpressionNode
OPERATION_MAP = {
ExpressionNode.ADD: lambda x, y: x + y,
ExpressionNode.SUB: lambda x, y: x - y,
ExpressionNode.MUL: lambda x, y: x * y,
ExpressionNode.DIV: lambda x, y: x / y,
ExpressionNode.MOD: lambda x, y: x % y,
ExpressionNode.AND: lambda x, y: x & y,
ExpressionNode.OR: lambda x, y: x | y,
}
class ExpressionEvaluator(SQLEvaluator):
def __init__(self, expression, query, entity, allow_joins=True):
super(ExpressionEvaluator, self).__init__(expression, query,
allow_joins)
self.entity = entity
##################################################
# Vistor methods for final expression evaluation #
##################################################
def evaluate_node(self, node, qn, connection):
values = []
for child in node.children:
if hasattr(child, 'evaluate'):
value = child.evaluate(self, qn, connection)
else:
value = child
if value is not None:
values.append(value)
return OPERATION_MAP[node.connector](*values)
def evaluate_leaf(self, node, qn, connection):
return self.entity[qn(self.cols[node][1])]
| bsd-3-clause | Python |
69f7bbc9cd0715d0076a78f7cc4f8c044ba7199b | add examples directory and idaho image viewer example | michaelconnor00/gbdxtools,michaelconnor00/gbdxtools | examples/view_idaho_image.py | examples/view_idaho_image.py | from gbdxtools import Interface
import json
gi = Interface()
#catid = '101001000DB2FB00'
#catid = '1020010013C4CF00'
catid = '10400100120FEA00'
idaho_images = gi.get_idaho_images_by_catid(catid)
description = gi.describe_idaho_images(idaho_images)
print json.dumps(description, indent=4, sort_keys=True)
gi.create_idaho_leaflet_viewer(idaho_images, 'outputmap.html') | mit | Python | |
03971196d5399851b17ff05b7ad03f438797f973 | Fix regen of EventInterfaces.in by depending on list | Bysmyyr/blink-crosswalk,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,jtg-gg/blink,PeterWangIntel/blink-crosswalk,XiaosongWei/blink-crosswalk,nwjs/blink,jtg-gg/blink,hgl888/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,jtg-gg/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,PeterWangIntel/blink-crosswalk,hgl888/blink-crosswalk-efl,Pluto-tv/blink-crosswalk,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,PeterWangIntel/blink-crosswalk,smishenk/blink-crosswalk,hgl888/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,kurli/blink-crosswalk,ondra-novak/blink,modulexcite/blink,kurli/blink-crosswalk,Bysmyyr/blink-crosswalk,nwjs/blink,jtg-gg/blink,Pluto-tv/blink-crosswalk,hgl888/blink-crosswalk-efl,modulexcite/blink,XiaosongWei/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,jtg-gg/blink,hgl888/blink-crosswalk-efl,jtg-gg/blink,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,ondra-novak/blink,crosswalk-project/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,nwjs/blink,jtg-gg/blink,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,ondra-novak/blink,ondra-novak/blink,kurli/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl,smishenk/blink-crosswalk,modulexcite/blink,ondra-novak/blink,nwjs/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,ondra-novak/blink,jtg-gg/blink,nwjs/blink,PeterWangIntel/blink-crosswalk,modulexcite/blink,nwjs/blink,kurli/blink-crosswalk,XiaosongWei/blink-crosswalk,Pluto-tv/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,jtg-gg/blink,Bysmyyr/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,modulexcite/blink,kurli/blink-crosswalk,ondra-novak/blink,hgl888/blink-crosswalk-efl,smishenk/blink-crosswalk,Bysmyyr/blink-crosswalk,nwjs/blink,ondra-novak/blink,PeterWangIntel/blink-crosswalk,modulexcite/blink,XiaosongWei/blink-crosswalk,kurli/blink-crosswalk,ondra-novak/blink,kurli/blink-crosswalk,PeterWangIntel/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,XiaosongWei/blink-crosswalk,jtg-gg/blink,smishenk/blink-crosswalk,nwjs/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,XiaosongWei/blink-crosswalk,Pluto-tv/blink-crosswalk,smishenk/blink-crosswalk,kurli/blink-crosswalk,kurli/blink-crosswalk,Pluto-tv/blink-crosswalk,XiaosongWei/blink-crosswalk,modulexcite/blink,smishenk/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,nwjs/blink,nwjs/blink,crosswalk-project/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl,modulexcite/blink,XiaosongWei/blink-crosswalk | Source/bindings/core_bindings_generated.gyp | Source/bindings/core_bindings_generated.gyp | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Generate EventInterfaces.in, used by core/ but depends on modules/,
# hence placed in bindings/ to avoid direct core/ -> modules/ dependency.
{
'includes': [
'bindings.gypi',
'../core/core.gypi',
'../modules/modules.gypi',
],
'targets': [
{
'target_name': 'core_bindings_generated',
'type': 'none',
'actions': [
{
'action_name': 'event_interfaces',
'variables': {
'event_idl_files': [
'<@(core_event_idl_files)',
'<@(modules_event_idl_files)',
],
'event_idl_files_list':
'<|(event_idl_files_list.tmp <@(event_idl_files))',
},
'inputs': [
'../bindings/scripts/generate_event_interfaces.py',
'../bindings/scripts/utilities.py',
'<(event_idl_files_list)',
'<@(event_idl_files)',
],
'outputs': [
'<(blink_output_dir)/EventInterfaces.in',
],
'action': [
'python',
'../bindings/scripts/generate_event_interfaces.py',
'--event-idl-files-list',
'<(event_idl_files_list)',
'--event-interfaces-file',
'<(blink_output_dir)/EventInterfaces.in',
'--write-file-only-if-changed',
'<(write_file_only_if_changed)',
],
},
],
},
], # targets
}
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Generate EventInterfaces.in, used by core/ but depends on modules/,
# hence placed in bindings/ to avoid direct core/ -> modules/ dependency.
{
'includes': [
'bindings.gypi',
'../core/core.gypi',
'../modules/modules.gypi',
],
'targets': [
{
'target_name': 'core_bindings_generated',
'type': 'none',
'actions': [
{
'action_name': 'event_interfaces',
'variables': {
'event_idl_files': [
'<@(core_event_idl_files)',
'<@(modules_event_idl_files)',
],
'event_idl_files_list':
'<|(event_idl_files_list.tmp <@(event_idl_files))',
},
'inputs': [
'../bindings/scripts/generate_event_interfaces.py',
'../bindings/scripts/utilities.py',
'<@(event_idl_files)',
],
'outputs': [
'<(blink_output_dir)/EventInterfaces.in',
],
'action': [
'python',
'../bindings/scripts/generate_event_interfaces.py',
'--event-idl-files-list',
'<(event_idl_files_list)',
'--event-interfaces-file',
'<(blink_output_dir)/EventInterfaces.in',
'--write-file-only-if-changed',
'<(write_file_only_if_changed)',
],
},
],
},
], # targets
}
| bsd-3-clause | Python |
de350de6093600cf891385fafd875485051219c3 | Add core urls to project urls | Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel | apps/core/urls.py | apps/core/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.HomeView.as_view(), name='home'),
]
| bsd-3-clause | Python | |
31f16844dd98516b1f57e3913d0fdba3e5715aa8 | Delete method now softdeletep's all FK related objects | pinax/pinax-models,angvp/django-logical-delete,Ubiwhere/pinax-models,angvp/django-logical-delete,naringas/pinax-models | logicaldelete/models.py | logicaldelete/models.py | import datetime
from django.db import models
from logicaldelete import managers
class Model(models.Model):
"""
This base model provides date fields and functionality to enable logical
delete functionality in derived models.
"""
date_created = models.DateTimeField(default=datetime.datetime.now)
date_modified = models.DateTimeField(default=datetime.datetime.now)
date_removed = models.DateTimeField(null=True, blank=True)
objects = managers.LogicalDeletedManager()
def active(self):
return self.date_removed == None
active.boolean = True
def delete(self):
'''
Soft delete all fk related objects that
inherit from logicaldelete class
'''
# Fetch related models
related_objs = [relation.get_accessor_name() for
relation in self._meta.get_all_related_objects()]
for objs_model in related_objs:
# Retrieve all related objects
objs = getattr(self, objs_model).all()
for obj in objs:
# Checking if inherits from logicaldelete
if not issubclass(obj.__class__, Model):
break
obj.delete()
# Soft delete the object
self.date_removed = timezone.now()
self.save()
class Meta:
abstract = True
| import datetime
from django.db import models
from logicaldelete import managers
class Model(models.Model):
"""
This base model provides date fields and functionality to enable logical
delete functionality in derived models.
"""
date_created = models.DateTimeField(default=datetime.datetime.now)
date_modified = models.DateTimeField(default=datetime.datetime.now)
date_removed = models.DateTimeField(null=True, blank=True)
objects = managers.LogicalDeletedManager()
def active(self):
return self.date_removed == None
active.boolean = True
def delete(self):
self.date_removed = datetime.datetime.now()
self.save()
class Meta:
abstract = True
| mit | Python |
3dfc4bfbb71d1e97a6b8213f338df487fbae5fcc | Add auto migration for minor model changes | andychase/codebook,andychase/codebook | topics/migrations/0016_auto_20151221_0923.py | topics/migrations/0016_auto_20151221_0923.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('topics', '0015_auto_20151218_1823'),
]
operations = [
migrations.AlterField(
model_name='link',
name='pub_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='date published'),
),
migrations.AlterField(
model_name='link',
name='tags',
field=models.ManyToManyField(blank=True, to='topics.Tag'),
),
migrations.AlterField(
model_name='link',
name='title',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='tag',
name='pub_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='date published'),
),
migrations.AlterField(
model_name='topicsite',
name='create_date',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created'),
),
]
| mit | Python | |
9ce0f7d3dad6b588f5e494edf68070e6c80c3339 | Add conversion to SAT | jaanos/LVR-2016,jaanos/LVR-2016 | tutorials/2017/thursday/graphColouring.py | tutorials/2017/thursday/graphColouring.py | from boolean import *
def graphColouring2SAT(G, k):
conj = []
for i in range(len(G)):
conj.append(Or(*((i, j) for j in range(k))))
for j in range(k):
for jj in range(j+1, k):
conj.append(Or(Not((i, j)), Not((i, jj))))
for ii in G[i]:
for j in range(k):
conj.append(Or(Not((i, j)), Not((ii, j))))
return And(*conj)
| mit | Python | |
48a0fe0a4f841fb58783e2c021d29bd8f2b657ab | Create Settings.py | maximx1/PyPlatformerEngine | pyplatformerengine/utilities/Settings.py | pyplatformerengine/utilities/Settings.py | import pygame
"""
Settings manager
"""
class Settings:
"""
Initializes the object
"""
def __init__(self,path):
self.settings = {}
settingsFile = open(path, "r")
for line in settingsFile:
settingName,settingValue = line.split("=")
self.settings[settingName] = settingValue.rstrip('\n')
def changeSetting(self,settingName,change):
if self.settings.get(settingName):
self.settings[settingName] = change
def fetchSetting(self,settingName):
if self.settings.get(settingName):
return self.settings[settingName]
else:
return "-1"
| mit | Python | |
2207351e805f11c39f843ca0e0eff261a7a5bde8 | Add half-naive solution to problem 10 | gidj/euler,gidj/euler | python/010_summation_of_primes/primes.py | python/010_summation_of_primes/primes.py | from math import sqrt
from typing import Generator
def prime_generator(limit: int) -> Generator[int, None, int]:
if limit < 2:
return
else:
yield 2
primes = [2]
for x in range(3, limit + 1, 2):
if any(map(lambda divisor: x % divisor == 0, primes[: int(sqrt(x))])):
continue
else:
primes.append(x)
yield x
primes_20 = tuple(prime_generator(20))
primes_two_milliot = prime_generator(2000000)
sum_primes = sum(primes_two_million)
| bsd-3-clause | Python | |
5980018bdf6989f1d86e9eabb24bf55b26df5115 | add forms.py for the group router | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | go/routers/group/forms.py | go/routers/group/forms.py | from django import forms
class GroupEditForm(forms.Form):
groups = forms.MultipleChoiceField(
label="Select Groups",
choices=[]
)
def __init__(self, *args, **kwargs):
groups = kwargs.pop('groups', [])
super(GroupEditForm, self).__init__(*args, **kwargs)
self.fields['groups'] = forms.MultipleChoiceField(
label="Select Groups",
choices=[(group.key, group.name) for group in groups]
)
@staticmethod
def initial_from_config(data):
return {'groups': data['groups']}
def to_config(self):
groups = {}
if self.is_valid():
groups = self.cleaned_data['groups']
return {'groups': groups}
| bsd-3-clause | Python | |
94c8be385a08df24269008f7e7a1f4548387f763 | Create generate_struct.py | vnpy/vnpy,bigdig/vnpy,bigdig/vnpy,bigdig/vnpy,vnpy/vnpy,bigdig/vnpy | vnpy/api/tap/generator/generate_struct.py | vnpy/api/tap/generator/generate_struct.py | """"""
import importlib
class StructGenerator:
"""Struct生成器"""
def __init__(self, filename: str, prefix: str):
"""Constructor"""
self.filename = filename
self.prefix = prefix
self.typedefs = {}
self.load_constant()
def load_constant(self):
""""""
module_name = f"{self.prefix}_typedef"
module = importlib.import_module(module_name)
for name in dir(module):
if "__" not in name:
self.typedefs[name] = getattr(module, name)
def run(self):
"""运行生成"""
self.f_cpp = open(self.filename, "r")
self.f_struct = open(f"{self.prefix}_struct.py", "w")
for line in self.f_cpp:
self.process_line(line)
self.f_cpp.close()
self.f_struct.close()
print("Struct生成成功")
def process_line(self, line: str):
"""处理每行"""
line = line.replace(";", "")
line = line.replace("\n", "")
if line.startswith("struct"):
self.process_declare(line)
elif line.startswith("{"):
self.process_start(line)
elif line.startswith("}"):
self.process_end(line)
elif "\t" in line and "///" not in line:
self.process_member(line)
def process_declare(self, line: str):
"""处理声明"""
words = line.split(" ")
name = words[1]
end = "{"
new_line = f"{name} = {end}\n"
self.f_struct.write(new_line)
def process_start(self, line: str):
"""处理开始"""
pass
def process_end(self, line: str):
"""处理结束"""
new_line = "}\n\n"
self.f_struct.write(new_line)
def process_member(self, line: str):
"""处理成员"""
words = line.split("\t")
words = [word for word in words if word]
py_type = self.typedefs[words[0]]
name = words[1]
new_line = f" \"{name}\": \"{py_type}\",\n"
self.f_struct.write(new_line)
if __name__ == "__main__":
generator = StructGenerator("../include/ctp/ThostFtdcUserApiStruct.h", "ctp")
generator.run()
| mit | Python | |
747ff2fbaf9e6216ba932f446418819723611174 | Add solution for problem 12 | rlucioni/project-euler | euler/solutions/solution_12.py | euler/solutions/solution_12.py | """Highly divisible triangular number
The sequence of triangle numbers is generated by adding the natural numbers.
The 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28.
The first ten terms would be:
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
Let us list the factors of the first seven triangle numbers:
1: 1
3: 1, 3
6: 1, 2, 3, 6
10: 1, 2, 5, 10
15: 1, 3, 5, 15
21: 1, 3, 7, 21
28: 1, 2, 4, 7, 14, 28
We can see that 28 is the first triangle number to have over five divisors.
What is the value of the first triangle number to have over five hundred divisors?
"""
import math
def triangle_number_generator():
"""Generator yielding the sequence of triangle numbers."""
i = 0
while True:
i += 1
yield int(i * (i + 1) / 2)
def check_divisors(target):
"""Return the value of the first triangle number to have greater than the target number of divisors."""
triangles = triangle_number_generator()
for triangle in triangles:
divisors = 0
for i in range(1, int(math.sqrt(triangle) + 1)):
if triangle % i == 0:
divisors += 1
if i*i != triangle:
divisors += 1
if divisors > target:
return triangle
def check_divisors_alternate(target):
"""Return the value of the first triangle number to have greater than the target number of divisors.
Uses prime factorizations. Any integer N can be expressed as
N = p_0^a_0 + p_1^a_1 + ... + p_n^a_n,
where p_n is a distinct prime number and a_n is its exponent. The number of divisors D(N) of any integer
N can be computed as
D(N) = (a_0 + 1) * (a_1 + 1) * ... * (a_n + 1)
"""
triangles = triangle_number_generator()
for triangle in triangles:
divisors = 1
number = triangle
for candidate in range(2, triangle):
exponent = 0
while number % candidate == 0:
exponent += 1
number /= candidate
divisors *= exponent + 1
if divisors > target:
return triangle
if number == 1:
break
| mit | Python | |
65aee742ea0e95b200152f8a90d9ad5ee86b2512 | Add a Locust load testing script | ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod | scripts/locustfile.py | scripts/locustfile.py | from locust import HttpLocust, TaskSet, task
import urllib
from faker import Faker
fake = Faker()
class SPARQLQueryTasks(TaskSet):
@task
def query_simple(self):
self.client.get("/sparql/?query=select+%2A+where+%7B+%3Fs+%3Fp+%3Fo+%7D+limit+0")
@task
def query_realistic(self):
self.client.get("/sparql/?query=select+%2A+where+%7B+%3Fs+%3Fp+%3Fo+%7D+limit+0")
class SPARQLInsertTasks(TaskSet):
@task
def insert_simple(self):
self.client.get("/sparql/?query=INSERT+DATA+%7B+GRAPH+%3Chttps%3A%2F%2Fdataone.org%3E+%7B+%3Chttp%3A%2F%2Fexample.com%2FX%3E+%3Chttp%3A%2F%2Fexample.com%2FisA%3E+%3Chttp%3A%2F%2Fexample.com%2FY%3E+%7D%7D")
@task
def insert_realistic(self):
self.client.get("/sparql/?query=INSERT+DATA+%7B+GRAPH+%3Chttps%3A%2F%2Fdataone.org%3E+%7B+%3Chttp%3A%2F%2Fexample.com%2FX%3E+%3Chttp%3A%2F%2Fexample.com%2FisA%3E+%22{}%22+%7D%7D".format(urllib.quote(fake.name())))
class QueryLocust(HttpLocust):
weight = 5
task_set = SPARQLQueryTasks
class InsertLocust(HttpLocust):
weight = 3
task_set = SPARQLInsertTasks | apache-2.0 | Python | |
e84d811f13347da3625e3a30d200e836f6393e9d | Store OBO data into a suitable queryable format | jawrainey/healthchat,jawrainey/healthchat | scripts/obo_to_sql.py | scripts/obo_to_sql.py | # Note: based on - http://blog.adimian.com/2014/10/cte-and-closure-tables/
import sqlite3
conn = sqlite3.connect('test.db')
cursor = conn.cursor()
def create_tables():
'''
Creates the two tables used to store the ontology concepts and terms.
- 'nodes' stores the .obo content.
- 'closure' stores the hierarchy in a transitive closure representation.
'''
cursor.execute(
'CREATE TABLE IF NOT EXISTS nodes ('
'id INTEGER NOT NULL PRIMARY KEY,'
'parent INTEGER REFERENCES nodes(id),'
'name VARCHAR(100))')
cursor.execute(
'CREATE TABLE IF NOT EXISTS closure ('
'parent INTEGER REFERENCES nodes(id), '
'child INTEGER REFERENCES nodes(id), '
'depth INTEGER)')
conn.commit()
def add_unknown_concepts_to_db(obo_content):
'''
Inserts concepts/terms into the database.
Moreover, the transitive closure table is also updated upon insertion
of an element to ensure it's retrievable later...
Args:
obo_content (list): a list of Stanzas, i.e. dictionaries containing
the relevant obo structure, such as id, name, and relationship.
'''
known_ids = [r[0] for r in
cursor.execute('SELECT id FROM nodes').fetchall()]
for i in obo_content:
_id = int(str(i.tags['id'][0]).split(':')[1])
# The root element does not have a parent. Assign it a zero.
_pid = (int(str(i.tags['is_a'][0]).split(':')[1])
if 'is_a' in str(i) else 0)
_name = str(i.tags['name'][0])
# Only add NEW terms to the database.
if _id not in known_ids:
# Add ontological term to node table.
cursor.execute('INSERT INTO nodes VALUES (?, ?, ?)',
(_id, _pid, _name))
last_id = cursor.lastrowid
# Collect ancestor of parent, and insert into closure table.
cursor.execute('SELECT parent, ? as child, depth+1 FROM closure '
'WHERE child = ?', (_id, _pid))
stm = 'INSERT INTO closure (parent, child, depth) VALUES (?, ?, ?)'
cursor.executemany(stm, cursor.fetchall())
cursor.execute(stm, (last_id, last_id, 0))
conn.commit()
if __name__ == "__main__":
import obo
create_tables()
obo_content = [i for i in obo.Parser('../data/structure.obo')]
add_unknown_concepts_to_db(obo_content)
| mit | Python | |
5649a284826b4e85af29854f443a1fc421613932 | add solution for Binary Tree Right Side View | zhyu/leetcode,zhyu/leetcode | src/binaryTreeRightSideView.py | src/binaryTreeRightSideView.py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a list of integers
def rightSideView(self, root):
res = []
def dfs(root, level):
if not root:
return
if len(res) == level:
res.append(root.val)
dfs(root.right, level+1)
dfs(root.left, level+1)
dfs(root, 0)
return res
| mit | Python | |
fd0b8035035d23276496428747c35329741a49e1 | fix group invitation handler | manastech/de-bee,manastech/de-bee,manastech/de-bee,manastech/de-bee | groupInvitationHandler.py | groupInvitationHandler.py | import cgi
from google.appengine.ext import webapp
from model import *
from google.appengine.api import users
import registration as registration
import os
from google.appengine.ext.webapp import template
class GroupInvitationHandler(webapp.RequestHandler):
def get(self):
pass | mit | Python | |
7d3114e1cc934cc122de641f864690f81794d10b | add scripts folder to fill-app folder | sup/fill,sup/fill,sup/fill | fill-app/scripts/fetch_gae_sdk.py | fill-app/scripts/fetch_gae_sdk.py | #!/usr/bin/env python
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Retrieved from https://github.com/Google/oauth2client
"""Fetch the most recent GAE SDK and decompress it in the current directory.
Usage:
fetch_gae_sdk.py [<dest_dir>]
Current releases are listed here:
https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured
"""
import json
import os
import StringIO
import sys
import urllib2
import zipfile
_SDK_URL = (
'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured')
def get_gae_versions():
try:
version_info_json = urllib2.urlopen(_SDK_URL).read()
except:
return {}
try:
version_info = json.loads(version_info_json)
except:
return {}
return version_info.get('items', {})
def _version_tuple(v):
version_string = os.path.splitext(v['name'])[0].rpartition('_')[2]
return tuple(int(x) for x in version_string.split('.'))
def get_sdk_urls(sdk_versions):
python_releases = [
v for v in sdk_versions
if v['name'].startswith('featured/google_appengine')]
current_releases = sorted(
python_releases, key=_version_tuple, reverse=True)
return [release['mediaLink'] for release in current_releases]
def main(argv):
if len(argv) > 2:
print('Usage: {} [<destination_dir>]'.format(argv[0]))
return 1
dest_dir = argv[1] if len(argv) > 1 else '.'
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
if os.path.exists(os.path.join(dest_dir, 'google_appengine')):
print('GAE SDK already installed at {}, exiting.'.format(dest_dir))
return 0
sdk_versions = get_gae_versions()
if not sdk_versions:
print('Error fetching GAE SDK version info')
return 1
sdk_urls = get_sdk_urls(sdk_versions)
for sdk_url in sdk_urls:
try:
sdk_contents = StringIO.StringIO(urllib2.urlopen(sdk_url).read())
break
except:
pass
else:
print('Could not read SDK from any of {}'.format(sdk_urls))
return 1
sdk_contents.seek(0)
try:
zip_contents = zipfile.ZipFile(sdk_contents)
zip_contents.extractall(dest_dir)
print('GAE SDK Installed to {}.'.format(dest_dir))
except:
print('Error extracting SDK contents')
return 1
if __name__ == '__main__':
sys.exit(main(sys.argv[:]))
| mit | Python | |
9b48cb18980ae2e55ce02a84576f65f0bd8a27bb | Add wiki macro for the Like button. | thijstriemstra/trac-facebook-plugins | FacebookPlugins.py | FacebookPlugins.py | """
@note: enable X
"""
from trac.core import Component
from trac.wiki.macros import WikiMacroBase
class FacebookPlugins(Component):
"""
Support for Facebook plugins.
"""
revision = "$Rev$"
url = "$URL$"
class LikeButton(WikiMacroBase):
"""
The [http://developers.facebook.com/docs/reference/plugins/like Like button] lets
users share pages from your site back to their Facebook profile with one click.
Examples:
{{{
[[LikeButton]] # current page
[[LikeButton(http://google.nl)]] # google.nl with default layout
[[LikeButton(http://google.com,button)]] # google.com with button layout
[[LikeButton(http://google.com,box)]] # google.com with box layout
}}}
"""
revision = "$Rev$"
url = "$URL$"
def expand_macro(self, formatter, name, args):
"""Description here.
@param name: the actual name of the macro
@param args: text enclosed in parenthesis at the call of the macro
"""
options = unicode(args).split(",")
href = self.url
layout = 'standard' # options: 'button_count', 'box_count'
show_faces = 'true'
width = '450'
height = '80'
colorscheme = 'light' # or 'dark'
action = 'like' # or 'recommend'
if len(options) > 0:
href = options[0]
if len(options) > 1:
layout = options[1] + "_count"
iframe_code = '<iframe src="http://www.facebook.com/plugins/like.php?href=%s&layout=%s&show_faces=%s&width=%s&action=%s&colorscheme=%s&height=%s" scrolling="no" frameborder="0" style="border:none; overflow:hidden; width:%spx; height:%spx;" allowTransparency="true"></iframe>' % (
href, layout, show_faces, width, action, colorscheme, height, width, height)
return iframe_code
| mit | Python | |
fd032cf6b7b8793be5b6653c72286d5e5a458d25 | support for trigger Maker IFTTT | deisi/home-assistant,deisi/home-assistant,happyleavesaoc/home-assistant,morphis/home-assistant,luxus/home-assistant,ct-23/home-assistant,g12mcgov/home-assistant,sdague/home-assistant,balloob/home-assistant,justyns/home-assistant,tmm1/home-assistant,jamespcole/home-assistant,happyleavesaoc/home-assistant,molobrakos/home-assistant,justyns/home-assistant,aequitas/home-assistant,partofthething/home-assistant,hexxter/home-assistant,tboyce1/home-assistant,jaharkes/home-assistant,DavidLP/home-assistant,hexxter/home-assistant,stefan-jonasson/home-assistant,eagleamon/home-assistant,nevercast/home-assistant,Smart-Torvy/torvy-home-assistant,persandstrom/home-assistant,betrisey/home-assistant,titilambert/home-assistant,oandrew/home-assistant,tmm1/home-assistant,alanbowman/home-assistant,jnewland/home-assistant,MartinHjelmare/home-assistant,keerts/home-assistant,Zac-HD/home-assistant,turbokongen/home-assistant,MartinHjelmare/home-assistant,leoc/home-assistant,Theb-1/home-assistant,titilambert/home-assistant,w1ll1am23/home-assistant,sffjunkie/home-assistant,aoakeson/home-assistant,dorant/home-assistant,betrisey/home-assistant,mikaelboman/home-assistant,mKeRix/home-assistant,oandrew/home-assistant,open-homeautomation/home-assistant,ma314smith/home-assistant,Danielhiversen/home-assistant,xifle/home-assistant,GenericStudent/home-assistant,instantchow/home-assistant,tboyce1/home-assistant,florianholzapfel/home-assistant,lukas-hetzenecker/home-assistant,mahendra-r/home-assistant,Julian/home-assistant,Cinntax/home-assistant,pschmitt/home-assistant,FreekingDean/home-assistant,nkgilley/home-assistant,robjohnson189/home-assistant,sffjunkie/home-assistant,joopert/home-assistant,keerts/home-assistant,open-homeautomation/home-assistant,Zyell/home-assistant,keerts/home-assistant,SEJeff/home-assistant,sfam/home-assistant,varunr047/homefile,open-homeautomation/home-assistant,bdfoster/blumate,ct-23/home-assistant,nugget/home-assistant,florianholzapfel/home-assistant,maddox/home-assistant,philipbl/home-assistant,bdfoster/blumate,alexmogavero/home-assistant,Smart-Torvy/torvy-home-assistant,mKeRix/home-assistant,w1ll1am23/home-assistant,deisi/home-assistant,emilhetty/home-assistant,maddox/home-assistant,hmronline/home-assistant,sffjunkie/home-assistant,kennedyshead/home-assistant,alanbowman/home-assistant,Zyell/home-assistant,DavidLP/home-assistant,devdelay/home-assistant,ma314smith/home-assistant,g12mcgov/home-assistant,ct-23/home-assistant,PetePriority/home-assistant,auduny/home-assistant,keerts/home-assistant,balloob/home-assistant,mikaelboman/home-assistant,dmeulen/home-assistant,srcLurker/home-assistant,eagleamon/home-assistant,morphis/home-assistant,Julian/home-assistant,auduny/home-assistant,leoc/home-assistant,stefan-jonasson/home-assistant,jamespcole/home-assistant,emilhetty/home-assistant,fbradyirl/home-assistant,soldag/home-assistant,dmeulen/home-assistant,sfam/home-assistant,dmeulen/home-assistant,kyvinh/home-assistant,sdague/home-assistant,Teagan42/home-assistant,Zac-HD/home-assistant,LinuxChristian/home-assistant,stefan-jonasson/home-assistant,coteyr/home-assistant,pottzer/home-assistant,LinuxChristian/home-assistant,xifle/home-assistant,ct-23/home-assistant,JshWright/home-assistant,MungoRae/home-assistant,tinloaf/home-assistant,leoc/home-assistant,luxus/home-assistant,morphis/home-assistant,dorant/home-assistant,hexxter/home-assistant,sander76/home-assistant,alexkolar/home-assistant,alanbowman/home-assistant,maddox/home-assistant,oandrew/home-assistant,aoakeson/home-assistant,Zac-HD/home-assistant,rohitranjan1991/home-assistant,LinuxChristian/home-assistant,mKeRix/home-assistant,Danielhiversen/home-assistant,eagleamon/home-assistant,ma314smith/home-assistant,adrienbrault/home-assistant,tomduijf/home-assistant,miniconfig/home-assistant,mikaelboman/home-assistant,ewandor/home-assistant,srcLurker/home-assistant,srcLurker/home-assistant,deisi/home-assistant,betrisey/home-assistant,HydrelioxGitHub/home-assistant,hexxter/home-assistant,kyvinh/home-assistant,GenericStudent/home-assistant,PetePriority/home-assistant,emilhetty/home-assistant,bdfoster/blumate,HydrelioxGitHub/home-assistant,ct-23/home-assistant,instantchow/home-assistant,srcLurker/home-assistant,jnewland/home-assistant,alexmogavero/home-assistant,coteyr/home-assistant,ewandor/home-assistant,MungoRae/home-assistant,jabesq/home-assistant,Duoxilian/home-assistant,nnic/home-assistant,qedi-r/home-assistant,deisi/home-assistant,nugget/home-assistant,miniconfig/home-assistant,ewandor/home-assistant,morphis/home-assistant,jawilson/home-assistant,aequitas/home-assistant,philipbl/home-assistant,molobrakos/home-assistant,shaftoe/home-assistant,shaftoe/home-assistant,tomduijf/home-assistant,shaftoe/home-assistant,aronsky/home-assistant,tchellomello/home-assistant,MungoRae/home-assistant,florianholzapfel/home-assistant,Duoxilian/home-assistant,alexkolar/home-assistant,bdfoster/blumate,DavidLP/home-assistant,mahendra-r/home-assistant,emilhetty/home-assistant,partofthething/home-assistant,hmronline/home-assistant,nnic/home-assistant,rohitranjan1991/home-assistant,alexkolar/home-assistant,stefan-jonasson/home-assistant,badele/home-assistant,xifle/home-assistant,happyleavesaoc/home-assistant,caiuspb/home-assistant,mKeRix/home-assistant,sander76/home-assistant,MartinHjelmare/home-assistant,adrienbrault/home-assistant,kyvinh/home-assistant,nkgilley/home-assistant,badele/home-assistant,kyvinh/home-assistant,tinloaf/home-assistant,Zac-HD/home-assistant,robjohnson189/home-assistant,devdelay/home-assistant,jabesq/home-assistant,mezz64/home-assistant,eagleamon/home-assistant,luxus/home-assistant,emilhetty/home-assistant,Smart-Torvy/torvy-home-assistant,Zyell/home-assistant,MungoRae/home-assistant,qedi-r/home-assistant,mikaelboman/home-assistant,shaftoe/home-assistant,jabesq/home-assistant,coteyr/home-assistant,jawilson/home-assistant,home-assistant/home-assistant,tboyce1/home-assistant,caiuspb/home-assistant,robbiet480/home-assistant,hmronline/home-assistant,LinuxChristian/home-assistant,mikaelboman/home-assistant,devdelay/home-assistant,LinuxChristian/home-assistant,nevercast/home-assistant,jaharkes/home-assistant,instantchow/home-assistant,fbradyirl/home-assistant,persandstrom/home-assistant,robjohnson189/home-assistant,fbradyirl/home-assistant,sfam/home-assistant,nnic/home-assistant,HydrelioxGitHub/home-assistant,pottzer/home-assistant,Theb-1/home-assistant,philipbl/home-assistant,Duoxilian/home-assistant,mezz64/home-assistant,varunr047/homefile,joopert/home-assistant,tboyce1/home-assistant,toddeye/home-assistant,aoakeson/home-assistant,ma314smith/home-assistant,persandstrom/home-assistant,robjohnson189/home-assistant,tomduijf/home-assistant,JshWright/home-assistant,miniconfig/home-assistant,betrisey/home-assistant,auduny/home-assistant,alexmogavero/home-assistant,tchellomello/home-assistant,varunr047/homefile,Julian/home-assistant,dmeulen/home-assistant,happyleavesaoc/home-assistant,Smart-Torvy/torvy-home-assistant,home-assistant/home-assistant,Julian/home-assistant,Teagan42/home-assistant,jaharkes/home-assistant,rohitranjan1991/home-assistant,philipbl/home-assistant,bdfoster/blumate,caiuspb/home-assistant,Duoxilian/home-assistant,aequitas/home-assistant,aronsky/home-assistant,nevercast/home-assistant,sffjunkie/home-assistant,xifle/home-assistant,MungoRae/home-assistant,JshWright/home-assistant,open-homeautomation/home-assistant,SEJeff/home-assistant,varunr047/homefile,tboyce021/home-assistant,JshWright/home-assistant,g12mcgov/home-assistant,devdelay/home-assistant,leppa/home-assistant,toddeye/home-assistant,leoc/home-assistant,hmronline/home-assistant,miniconfig/home-assistant,tboyce021/home-assistant,varunr047/homefile,FreekingDean/home-assistant,SEJeff/home-assistant,balloob/home-assistant,leppa/home-assistant,badele/home-assistant,sffjunkie/home-assistant,florianholzapfel/home-assistant,jnewland/home-assistant,lukas-hetzenecker/home-assistant,kennedyshead/home-assistant,PetePriority/home-assistant,tinloaf/home-assistant,justyns/home-assistant,hmronline/home-assistant,pottzer/home-assistant,oandrew/home-assistant,jamespcole/home-assistant,robbiet480/home-assistant,postlund/home-assistant,pschmitt/home-assistant,alexmogavero/home-assistant,Cinntax/home-assistant,turbokongen/home-assistant,jaharkes/home-assistant,Theb-1/home-assistant,dorant/home-assistant,postlund/home-assistant,tmm1/home-assistant,molobrakos/home-assistant,nugget/home-assistant,soldag/home-assistant,mahendra-r/home-assistant | homeassistant/components/ifttt.py | homeassistant/components/ifttt.py | """
homeassistant.components.ifttt
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This component enable you to trigger Maker IFTTT recipes.
Check https://ifttt.com/maker for details.
Configuration:
To use Maker IFTTT you will need to add something like the following to your
config/configuration.yaml.
ifttt:
key: xxxxx-x-xxxxxxxxxxxxx
Variables:
key
*Required
Your api key
"""
import logging
import requests
from homeassistant.helpers import validate_config
_LOGGER = logging.getLogger(__name__)
DOMAIN = "ifttt"
SERVICE_TRIGGER = 'trigger'
ATTR_EVENT = 'event'
ATTR_VALUE1 = 'value1'
ATTR_VALUE2 = 'value2'
ATTR_VALUE3 = 'value3'
DEPENDENCIES = []
REQUIREMENTS = ['pyfttt==0.1']
def trigger(hass, event, value1=None, value2=None, value3=None):
""" Trigger a Maker IFTTT recipe """
data = {
ATTR_EVENT: event,
ATTR_VALUE1: value1,
ATTR_VALUE2: value2,
ATTR_VALUE3: value3,
}
hass.services.call(DOMAIN, SERVICE_TRIGGER, data)
def setup(hass, config):
""" Setup the ifttt service component """
if not validate_config(config, {DOMAIN: ['key']}, _LOGGER):
return False
key = config[DOMAIN]['key']
def trigger_service(call):
""" Handle ifttt trigger service calls. """
event = call.data.get(ATTR_EVENT)
value1 = call.data.get(ATTR_VALUE1)
value2 = call.data.get(ATTR_VALUE2)
value3 = call.data.get(ATTR_VALUE3)
if event is None:
return
try:
import pyfttt as pyfttt
r = pyfttt.send_event(key, event, value1, value2, value3)
except requests.exceptions.ConnectionError:
_LOGGER.error("Could not connect to IFTTT")
except requests.exceptions.HTTPError:
_LOGGER.error("Received invalid response")
except requests.exceptions.Timeout:
_LOGGER.error("Request timed out")
except requests.exceptions.TooManyRedirects:
_LOGGER.error("Too many redirects")
except requests.exceptions.RequestException as e:
_LOGGER.error("{e}".format(e=e))
if r.status_code != requests.codes.ok:
try:
j = r.json()
except ValueError:
_LOGGER.error('Could not parse response. Event not sent!')
for e in j['errors']:
_LOGGER.error('{}'.format(e['message']))
hass.services.register(DOMAIN, SERVICE_TRIGGER, trigger_service)
return True
| apache-2.0 | Python | |
7cb546ba28a53b50969db384e79dc6bb394de4ad | use explicit relative import on djangoitem tests | hyrole/scrapy,Cnfc19932/scrapy,lacrazyboy/scrapy,wenyu1001/scrapy,gnemoug/scrapy,jorik041/scrapy,Digenis/scrapy,CodeJuan/scrapy,tagatac/scrapy,yidongliu/scrapy,dracony/scrapy,kmike/scrapy,kmike/scrapy,barraponto/scrapy,YeelerG/scrapy,hansenDise/scrapy,livepy/scrapy,beni55/scrapy,ramiro/scrapy,taito/scrapy,wujuguang/scrapy,pombredanne/scrapy,nowopen/scrapy,webmakin/scrapy,WilliamKinaan/scrapy,olorz/scrapy,Ryezhang/scrapy,pfctdayelise/scrapy,Allianzcortex/scrapy,ylcolala/scrapy,hwsyy/scrapy,bmess/scrapy,ENjOyAbLE1991/scrapy,huoxudong125/scrapy,gnemoug/scrapy,huoxudong125/scrapy,Djlavoy/scrapy,jorik041/scrapy,scrapy/scrapy,Digenis/scrapy,ndemir/scrapy,kimimj/scrapy,cyrixhero/scrapy,coderabhishek/scrapy,wzyuliyang/scrapy,mlyundin/scrapy,jdemaeyer/scrapy,wenyu1001/scrapy,stenskjaer/scrapy,avtoritet/scrapy,hbwzhsh/scrapy,elijah513/scrapy,sigma-random/scrapy,coderabhishek/scrapy,CodeJuan/scrapy,Ryezhang/scrapy,snowdream1314/scrapy,Geeglee/scrapy,1yvT0s/scrapy,agreen/scrapy,ylcolala/scrapy,ssteo/scrapy,famorted/scrapy,Preetwinder/scrapy,smaty1/scrapy,livepy/scrapy,agusc/scrapy,Preetwinder/scrapy,Zephor5/scrapy,dacjames/scrapy,fafaman/scrapy,fqul/scrapy,Slater-Victoroff/scrapy,w495/scrapy,Zephor5/scrapy,wujuguang/scrapy,smaty1/scrapy,fpy171/scrapy,starrify/scrapy,emschorsch/scrapy,famorted/scrapy,irwinlove/scrapy,KublaikhanGeek/scrapy,chekunkov/scrapy,ArturGaspar/scrapy,1yvT0s/scrapy,joshlk/scrapy,olorz/scrapy,raphaelfruneaux/scrapy,nett55/scrapy,ssh-odoo/scrapy,eLRuLL/scrapy,fafaman/scrapy,livepy/scrapy,kashyap32/scrapy,cursesun/scrapy,profjrr/scrapy,kazitanvirahsan/scrapy,songfj/scrapy,hyrole/scrapy,webmakin/scrapy,devGregA/scrapy,ndemir/scrapy,irwinlove/scrapy,rolando-contrib/scrapy,csalazar/scrapy,rootAvish/scrapy,huoxudong125/scrapy,ENjOyAbLE1991/scrapy,jeffreyjinfeng/scrapy,tntC4stl3/scrapy,amboxer21/scrapy,joshlk/scrapy,pablohoffman/scrapy,Parlin-Galanodel/scrapy,amboxer21/scrapy,OpenWhere/scrapy,rahulsharma1991/scrapy,jc0n/scrapy,ssh-odoo/scrapy,dacjames/scrapy,profjrr/scrapy,YeelerG/scrapy,scorphus/scrapy,rolando/scrapy,snowdream1314/scrapy,tliber/scrapy,elijah513/scrapy,zjuwangg/scrapy,github-account-because-they-want-it/scrapy,devGregA/scrapy,GregoryVigoTorres/scrapy,agusc/scrapy,beni55/scrapy,hwsyy/scrapy,Geeglee/scrapy,jdemaeyer/scrapy,fpy171/scrapy,IvanGavran/scrapy,aivarsk/scrapy,Allianzcortex/scrapy,nikgr95/scrapy,ssh-odoo/scrapy,nguyenhongson03/scrapy,YeelerG/scrapy,nett55/scrapy,pfctdayelise/scrapy,shaform/scrapy,hectoruelo/scrapy,z-fork/scrapy,pawelmhm/scrapy,arush0311/scrapy,songfj/scrapy,darkrho/scrapy-scrapy,xiao26/scrapy,tliber/scrapy,scorphus/scrapy,Partoo/scrapy,pranjalpatil/scrapy,tliber/scrapy,eLRuLL/scrapy,eLRuLL/scrapy,CENDARI/scrapy,URXtech/scrapy,lacrazyboy/scrapy,Lucifer-Kim/scrapy,carlosp420/scrapy,moraesnicol/scrapy,kalessin/scrapy,avtoritet/scrapy,amboxer21/scrapy,nowopen/scrapy,ArturGaspar/scrapy,zhangtao11/scrapy,farhan0581/scrapy,moraesnicol/scrapy,wangjun/scrapy,rahulsharma1991/scrapy,dangra/scrapy,cursesun/scrapy,dhenyjarasandy/scrapy,dacjames/scrapy,cyrixhero/scrapy,zhangtao11/scrapy,OpenWhere/scrapy,fqul/scrapy,liyy7/scrapy,yusofm/scrapy,dgillis/scrapy,kashyap32/scrapy,mlyundin/scrapy,tagatac/scrapy,mlyundin/scrapy,Allianzcortex/scrapy,chekunkov/scrapy,fontenele/scrapy,Slater-Victoroff/scrapy,mgedmin/scrapy,IvanGavran/scrapy,nett55/scrapy,nowopen/scrapy,jc0n/scrapy,Timeship/scrapy,emschorsch/scrapy,zackslash/scrapy,sardok/scrapy,Cnfc19932/scrapy,rahulsharma1991/scrapy,rahul-c1/scrapy,KublaikhanGeek/scrapy,dgillis/scrapy,CENDARI/scrapy,cyberplant/scrapy,cleydson/scrapy,GregoryVigoTorres/scrapy,jiezhu2007/scrapy,rolando/scrapy,kalessin/scrapy,smaty1/scrapy,darkrho/scrapy-scrapy,dhenyjarasandy/scrapy,gbirke/scrapy,mgedmin/scrapy,nfunato/scrapy,csalazar/scrapy,yarikoptic/scrapy,umrashrf/scrapy,ArturGaspar/scrapy,nfunato/scrapy,carlosp420/scrapy,z-fork/scrapy,Partoo/scrapy,heamon7/scrapy,ashishnerkar1/scrapy,redapple/scrapy,jorik041/scrapy,pawelmhm/scrapy,johnardavies/scrapy,fontenele/scrapy,Timeship/scrapy,hyrole/scrapy,legendtkl/scrapy,elacuesta/scrapy,nfunato/scrapy,elacuesta/scrapy,crasker/scrapy,pombredanne/scrapy,liyy7/scrapy,Zephor5/scrapy,Adai0808/scrapy-1,finfish/scrapy,hwsyy/scrapy,yidongliu/scrapy,cyrixhero/scrapy,farhan0581/scrapy,foromer4/scrapy,codebhendi/scrapy,rklabs/scrapy,eliasdorneles/scrapy,hansenDise/scrapy,yidongliu/scrapy,dangra/scrapy,nguyenhongson03/scrapy,URXtech/scrapy,Bourneer/scrapy,rootAvish/scrapy,zorojean/scrapy,pranjalpatil/scrapy,legendtkl/scrapy,WilliamKinaan/scrapy,dangra/scrapy,shaform/scrapy,JacobStevenR/scrapy,sardok/scrapy,WilliamKinaan/scrapy,raphaelfruneaux/scrapy,Timeship/scrapy,jamesblunt/scrapy,kimimj/scrapy,zackslash/scrapy,Parlin-Galanodel/scrapy,beni55/scrapy,pablohoffman/scrapy,rdowinton/scrapy,umrashrf/scrapy,yarikoptic/scrapy,rdowinton/scrapy,GregoryVigoTorres/scrapy,kalessin/scrapy,zorojean/scrapy,taito/scrapy,scorphus/scrapy,URXtech/scrapy,curita/scrapy,AaronTao1990/scrapy,avtoritet/scrapy,ylcolala/scrapy,agreen/scrapy,wzyuliyang/scrapy,joshlk/scrapy,zackslash/scrapy,AaronTao1990/scrapy,arush0311/scrapy,barraponto/scrapy,IvanGavran/scrapy,1yvT0s/scrapy,wangjun/scrapy,jiezhu2007/scrapy,moraesnicol/scrapy,godfreyy/scrapy,olorz/scrapy,ashishnerkar1/scrapy,curita/scrapy,haiiiiiyun/scrapy,olafdietsche/scrapy,w495/scrapy,hansenDise/scrapy,kazitanvirahsan/scrapy,johnardavies/scrapy,haiiiiiyun/scrapy,mouadino/scrapy,Chenmxs/scrapy,wenyu1001/scrapy,JacobStevenR/scrapy,wangjun/scrapy,stenskjaer/scrapy,redapple/scrapy,godfreyy/scrapy,codebhendi/scrapy,umrashrf/scrapy,github-account-because-they-want-it/scrapy,cyberplant/scrapy,rklabs/scrapy,curita/scrapy,ramiro/scrapy,jiezhu2007/scrapy,hbwzhsh/scrapy,xiao26/scrapy,olafdietsche/scrapy,Preetwinder/scrapy,foromer4/scrapy,nikgr95/scrapy,Bourneer/scrapy,finfish/scrapy,kmike/scrapy,lacrazyboy/scrapy,rolando-contrib/scrapy,yarikoptic/scrapy,bmess/scrapy,pranjalpatil/scrapy,dgillis/scrapy,TarasRudnyk/scrapy,hectoruelo/scrapy,Djlavoy/scrapy,CENDARI/scrapy,Ryezhang/scrapy,emschorsch/scrapy,eliasdorneles/scrapy,heamon7/scrapy,Chenmxs/scrapy,Cnfc19932/scrapy,aivarsk/scrapy,AaronTao1990/scrapy,TarasRudnyk/scrapy,wujuguang/scrapy,zjuwangg/scrapy,starrify/scrapy,starrify/scrapy,profjrr/scrapy,pawelmhm/scrapy,jeffreyjinfeng/scrapy,nguyenhongson03/scrapy,coderabhishek/scrapy,kazitanvirahsan/scrapy,tntC4stl3/scrapy,jc0n/scrapy,carlosp420/scrapy,rdowinton/scrapy,godfreyy/scrapy,ramiro/scrapy,rootAvish/scrapy,JacobStevenR/scrapy,barraponto/scrapy,pfctdayelise/scrapy,yusofm/scrapy,zhangtao11/scrapy,cursesun/scrapy,hbwzhsh/scrapy,cleydson/scrapy,ENjOyAbLE1991/scrapy,aivarsk/scrapy,yusofm/scrapy,rolando/scrapy,rahul-c1/scrapy,Adai0808/scrapy-1,shaform/scrapy,OpenWhere/scrapy,fpy171/scrapy,snowdream1314/scrapy,tntC4stl3/scrapy,Geeglee/scrapy,eliasdorneles/scrapy,jdemaeyer/scrapy,KublaikhanGeek/scrapy,raphaelfruneaux/scrapy,mgedmin/scrapy,elacuesta/scrapy,Lucifer-Kim/scrapy,Djlavoy/scrapy,fqul/scrapy,dracony/scrapy,Digenis/scrapy,Bourneer/scrapy,foromer4/scrapy,chekunkov/scrapy,nikgr95/scrapy,redapple/scrapy,kimimj/scrapy,ssteo/scrapy,liyy7/scrapy,cleydson/scrapy,fontenele/scrapy,rolando-contrib/scrapy,elijah513/scrapy,codebhendi/scrapy,bmess/scrapy,arush0311/scrapy,cyberplant/scrapy,crasker/scrapy,tagatac/scrapy,legendtkl/scrapy,famorted/scrapy,zjuwangg/scrapy,xiao26/scrapy,rahul-c1/scrapy,farhan0581/scrapy,crasker/scrapy,csalazar/scrapy,agreen/scrapy,TarasRudnyk/scrapy,wzyuliyang/scrapy,mouadino/scrapy,Chenmxs/scrapy,w495/scrapy,hectoruelo/scrapy,scrapy/scrapy,stenskjaer/scrapy,pablohoffman/scrapy,Partoo/scrapy,webmakin/scrapy,johnardavies/scrapy,taito/scrapy,pombredanne/scrapy,haiiiiiyun/scrapy,irwinlove/scrapy,ssteo/scrapy,devGregA/scrapy,agusc/scrapy,rklabs/scrapy,dracony/scrapy,z-fork/scrapy,Parlin-Galanodel/scrapy,heamon7/scrapy,finfish/scrapy,jamesblunt/scrapy,jeffreyjinfeng/scrapy,darkrho/scrapy-scrapy,gbirke/scrapy,olafdietsche/scrapy,github-account-because-they-want-it/scrapy,dhenyjarasandy/scrapy,zorojean/scrapy,scrapy/scrapy,Slater-Victoroff/scrapy,sigma-random/scrapy,songfj/scrapy,fafaman/scrapy,CodeJuan/scrapy,Adai0808/scrapy-1,Lucifer-Kim/scrapy,kashyap32/scrapy | scrapy/tests/test_djangoitem/__init__.py | scrapy/tests/test_djangoitem/__init__.py | import os
from twisted.trial import unittest
from scrapy.contrib_exp.djangoitem import DjangoItem, Field
os.environ['DJANGO_SETTINGS_MODULE'] = 'scrapy.tests.test_djangoitem.settings'
try:
import django
except ImportError:
django = None
if django:
from .models import Person
else:
Person = None
class BasePersonItem(DjangoItem):
django_model = Person
class NewFieldPersonItem(BasePersonItem):
other = Field()
class OverrideFieldPersonItem(BasePersonItem):
age = Field(default=1)
class DjangoItemTest(unittest.TestCase):
def setUp(self):
if not django:
raise unittest.SkipTest("Django is not available")
def test_base(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_new_fields(self):
i = NewFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'other', 'name'])
def test_override_field(self):
i = OverrideFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
self.assertEqual(i.fields['age'], {'default': 1})
def test_save(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
i['name'] = 'John'
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_override_save(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, 1)
| import os
from twisted.trial import unittest
from scrapy.contrib_exp.djangoitem import DjangoItem, Field
os.environ['DJANGO_SETTINGS_MODULE'] = 'scrapy.tests.test_djangoitem.settings'
try:
import django
except ImportError:
django = None
if django:
from models import Person
else:
Person = None
class BasePersonItem(DjangoItem):
django_model = Person
class NewFieldPersonItem(BasePersonItem):
other = Field()
class OverrideFieldPersonItem(BasePersonItem):
age = Field(default=1)
class DjangoItemTest(unittest.TestCase):
def setUp(self):
if not django:
raise unittest.SkipTest("Django is not available")
def test_base(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
def test_new_fields(self):
i = NewFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'other', 'name'])
def test_override_field(self):
i = OverrideFieldPersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
self.assertEqual(i.fields['age'], {'default': 1})
def test_save(self):
i = BasePersonItem()
self.assertEqual(i.fields.keys(), ['age', 'name'])
i['name'] = 'John'
i['age'] = '22'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, '22')
def test_override_save(self):
i = OverrideFieldPersonItem()
i['name'] = 'John'
person = i.save(commit=False)
self.assertEqual(person.name, 'John')
self.assertEqual(person.age, 1)
| bsd-3-clause | Python |
aa8f3d13f4718cbb0ce4a45b6e30bb1a676f7216 | Add migration | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | fellowms/migrations/0039_auto_20160720_1002.py | fellowms/migrations/0039_auto_20160720_1002.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-07-20 10:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0038_auto_20160717_2144'),
]
operations = [
migrations.AlterField(
model_name='fellow',
name='research_area_code',
field=models.CharField(choices=[('A100', 'Pre-clinical medicine'), ('A200', 'Pre-clinical dentistry'), ('A300', 'Clinical medicine'), ('A400', 'Clinical dentistry'), ('A900', 'Others in medicine & dentistry'), ('A000', 'Medicine & dentistry'), ('B100', 'Anatomy, physiology & pathology'), ('B200', 'Pharmacology, toxicology & pharmacy'), ('B300', 'Complementary medicines, therapies & well-being'), ('B400', 'Nutrition'), ('B500', 'Ophthalmics'), ('B600', 'Aural & oral sciences'), ('B700', 'Nursing'), ('B800', 'Medical technology'), ('B900', 'Others in subjects allied to medicine'), ('B000', 'Subjects allied to medicine'), ('C100', 'Biology'), ('C200', 'Botany'), ('C300', 'Zoology'), ('C400', 'Genetics'), ('C500', 'Microbiology'), ('C600', 'Sport & exercise science'), ('C700', 'Molecular biology, biophysics & biochemistry'), ('C800', 'Psychology'), ('C900', 'Others in Biological Sciences'), ('C000', 'Biological sciences'), ('D100', 'Pre-clinical veterinary medicine'), ('D200', 'Clinical veterinary medicine & dentistry'), ('D300', 'Animal science'), ('D400', 'Agriculture'), ('D500', 'Forestry & arboriculture'), ('D600', 'Food & beverage studies'), ('D700', 'Agricultural sciences'), ('D900', 'Others in veterinary sciences, agriculture & related subjects'), ('D000', 'Veterinary sciences, agriculture & related subjects'), ('F100', 'Chemistry'), ('F200', 'Materials science'), ('F300', 'Physics'), ('F400', 'Forensic & archaeological sciences'), ('F500', 'Astronomy'), ('F600', 'Geology'), ('F700', 'Science of aquatic & terrestrial environments'), ('F800', 'Physical geographical sciences'), ('F900', 'Others in physical sciences'), ('F000', 'Physical sciences'), ('G100', 'Mathematics'), ('G200', 'Operational research'), ('G300', 'Statistics'), ('G900', 'Others in mathematical sciences'), ('H100', 'General engineering'), ('H200', 'Civil engineering'), ('H300', 'Mechanical engineering'), ('H400', 'Aerospace engineering'), ('H500', 'Naval architecture'), ('H600', 'Electronic & electrical engineering'), ('H700', 'Production & manufacturing engineering'), ('H800', 'Chemical, process & energy engineering'), ('H900', 'Others in engineering'), ('H000', 'Engineering'), ('I100', 'Computer science'), ('I200', 'Information systems'), ('I300', 'Software engineering'), ('I400', 'Artificial intelligence'), ('I500', 'Health informatics'), ('I600', 'Games'), ('I700', 'Computer generated visual & audio effects'), ('I900', 'Others in Computer sciences'), ('J100', 'Minerals technology'), ('J200', 'Metallurgy'), ('J300', 'Ceramics & glass'), ('J400', 'Polymers & textiles'), ('J500', 'Materials technology not otherwise specified'), ('J600', 'Maritime technology'), ('J700', 'Biotechnology'), ('J900', 'Others in technology'), ('J000', 'Technologies'), ('K100', 'Architecture'), ('K200', 'Building'), ('K300', 'Landscape & garden design'), ('K400', 'Planning (urban, rural & regional)'), ('K900', 'Others in architecture, building & planning'), ('K000', 'Architecture, building & planning'), ('L100', 'Economics'), ('L200', 'Politics'), ('L300', 'Sociology'), ('L391', 'Sociology of science & technology'), ('L400', 'Social policy'), ('L500', 'Social work'), ('L600', 'Anthropology'), ('L700', 'Human & social geography'), ('L800', 'Development studies'), ('L900', 'Others in social studies'), ('L000', 'Social studies'), ('M000', 'Law'), ('N100', 'Business studies'), ('N200', 'Management studies'), ('N300', 'Finance'), ('N400', 'Accounting'), ('N500', 'Marketing'), ('N600', 'Human resource management'), ('N700', 'Office skills'), ('N800', 'Hospitality, leisure, sport, tourism & transport'), ('N900', 'Others in business & administrative studies'), ('N000', 'Business & administrative studies'), ('P100', 'Information services'), ('P200', 'Publicity studies'), ('P300', 'Media studies'), ('P400', 'Publishing'), ('P500', 'Journalism'), ('P900', 'Others in mass communications & documentation'), ('Q100', 'Linguistics'), ('Q200', 'Comparative literary studies'), ('Q300', 'English studies'), ('Q400', 'Ancient language studies'), ('Q500', 'Celtic studies'), ('Q600', 'Latin studies'), ('Q700', 'Classical Greek studies'), ('Q800', 'Classical studies'), ('Q900', 'Others in linguistics, classics & related subjects'), ('Q000', 'Linguistics, classics & related subjects'), ('R100', 'French studies'), ('R200', 'German studies'), ('R300', 'Italian studies'), ('R400', 'Spanish studies'), ('R500', 'Portuguese studies'), ('R600', 'Scandinavian studies'), ('R700', 'Russian & East European studies'), ('R800', 'European studies'), ('R900', 'Others in European languages, literature & related subjects'), ('R000', 'European languages, literature & related subjects'), ('T100', 'Chinese studies'), ('T200', 'Japanese studies'), ('T300', 'South Asian studies'), ('T400', 'Other Asian studies'), ('T500', 'African studies'), ('T600', 'Modern Middle Eastern studies'), ('T700', 'American studies'), ('T800', 'Australasian studies'), ('T900', 'Others in Eastern, Asiatic, African, American & Australasian languages, literature & related subjects'), ('T000', 'Eastern, Asiatic, African, American & Australasian languages, literature & related subjects'), ('V100', 'History by period'), ('V200', 'History by area'), ('V300', 'History by topic'), ('V400', 'Archaeology'), ('V500', 'Philosophy'), ('V600', 'Theology & religious studies'), ('V700', 'Heritage studies'), ('V900', 'Others in historical & philosophical studies'), ('V000', 'Historical & philosophical studies'), ('W100', 'Fine art'), ('W200', 'Design studies'), ('W300', 'Music'), ('W400', 'Drama'), ('W500', 'Dance'), ('W600', 'Cinematics & photography'), ('W700', 'Crafts'), ('W800', 'Imaginative writing'), ('W900', 'Others in creative arts & design'), ('W000', 'Creative arts & design'), ('X100', 'Training teachers'), ('X200', 'Research & study skills in education'), ('X300', 'Academic studies in education'), ('X900', 'Others in education'), ('X000', 'Education'), ('Y000', 'Combined/general subject unspecified')], default='Y000', max_length=4),
),
]
| bsd-3-clause | Python | |
b3dc3a0e2125e44da7d5ca8d0caa2a6098891997 | Add objects/utils.py copied from Ironic | LaynePeng/magnum,sajuptpm/magnum,eshijia/magnum,paulczar/magnum,eshijia/SUR,mjbrewer/testindex,openstack/magnum,mjbrewer/testindex,eshijia/magnum,eshijia/SUR,mjbrewer/testindex,ddepaoli3/magnum,dimtruck/magnum,ramielrowe/magnum,ChengTiesheng/magnum,ffantast/magnum,ddepaoli3/magnum,sajuptpm/magnum,paulczar/magnum,LaynePeng/magnum,annegentle/magnum,Tennyson53/magnum,jay-lau/magnum,hongbin/magnum,dimtruck/magnum,annegentle/magnum,ffantast/magnum,openstack/magnum,Alzon/SUR,ChengTiesheng/magnum,ramielrowe/magnum,ArchiFleKs/magnum,Tennyson53/magnum,hongbin/magnum,Alzon/SUR,Tennyson53/SUR,ArchiFleKs/magnum,Tennyson53/SUR | magnum/objects/utils.py | magnum/objects/utils.py | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utility methods for objects"""
import ast
import datetime
import iso8601
import netaddr
from oslo.utils import timeutils
import six
from magnum.openstack.common._i18n import _
def datetime_or_none(dt):
"""Validate a datetime or None value."""
if dt is None:
return None
elif isinstance(dt, datetime.datetime):
if dt.utcoffset() is None:
# NOTE(danms): Legacy objects from sqlalchemy are stored in UTC,
# but are returned without a timezone attached.
# As a transitional aid, assume a tz-naive object is in UTC.
return dt.replace(tzinfo=iso8601.iso8601.Utc())
else:
return dt
raise ValueError(_("A datetime.datetime is required here"))
def datetime_or_str_or_none(val):
if isinstance(val, six.string_types):
return timeutils.parse_isotime(val)
return datetime_or_none(val)
def int_or_none(val):
"""Attempt to parse an integer value, or None."""
if val is None:
return val
else:
return int(val)
def str_or_none(val):
"""Attempt to stringify a value to unicode, or None."""
if val is None:
return val
else:
return six.text_type(val)
def dict_or_none(val):
"""Attempt to dictify a value, or None."""
if val is None:
return {}
elif isinstance(val, six.string_types):
return dict(ast.literal_eval(val))
else:
try:
return dict(val)
except ValueError:
return {}
def list_or_none(val):
"""Attempt to listify a value, or None."""
if val is None:
return []
elif isinstance(val, six.string_types):
return list(ast.literal_eval(val))
else:
try:
return list(val)
except ValueError:
return []
def ip_or_none(version):
"""Return a version-specific IP address validator."""
def validator(val, version=version):
if val is None:
return val
else:
return netaddr.IPAddress(val, version=version)
return validator
def nested_object_or_none(objclass):
def validator(val, objclass=objclass):
if val is None or isinstance(val, objclass):
return val
raise ValueError(_("An object of class %s is required here")
% objclass)
return validator
def dt_serializer(name):
"""Return a datetime serializer for a named attribute."""
def serializer(self, name=name):
if getattr(self, name) is not None:
return timeutils.isotime(getattr(self, name))
else:
return None
return serializer
def dt_deserializer(instance, val):
"""A deserializer method for datetime attributes."""
if val is None:
return None
else:
return timeutils.parse_isotime(val)
def obj_serializer(name):
def serializer(self, name=name):
if getattr(self, name) is not None:
return getattr(self, name).obj_to_primitive()
else:
return None
return serializer
| apache-2.0 | Python | |
4f2a481d0739b7eab5826a7198e768f364e97cdf | introduce configurable base type | openaps/openaps,openaps/openaps | openaps/configurable.py | openaps/configurable.py | import json
class Configurable (object):
name = None
required = [ ]
optional = [ ]
prefix = '{name:s}'
url_template = ""
def __init__ (self, name, **kwargs):
self.name = name
self.fields = kwargs
def section_name (self):
return '%s "%s"' % (self.prefix, self.name)
def add_option (self, k, v):
# section = self.section_name( )
# self._config.set(section, k, v)
self.fields[k] = v
if k not in self.required + self.optional:
self.optional.append(k)
def items (self):
return self.fields.items( )
def format_url (self):
return self.url_template.format(name=self.name, **self.fields)
def store (self, config):
if not config.has_section(self.section_name( )):
config.add_device(self)
else:
for k, v in self.items( ):
config.set(self.section_name( ), k, v)
def remove (self, config):
config.remove_device(self)
@classmethod
def FromConfig (klass, config):
items = [ ]
for candidate in config.sections( ):
if candidate.startswith(klass.prefix):
name = json.loads(candidate.split(' ').pop( ))
# for f in klass.required:
fields = dict(config.items(candidate))
report = klass(report=name, **fields)
items.append(report)
return items
| mit | Python | |
fc72ca537e6eaf5f4bf04ed4511ec1acdd9eae11 | Add check script to test 'deprecated' decorator | aleju/imgaug,aleju/imgaug,aleju/ImageAugmenter | checks/check_deprecation_warning.py | checks/check_deprecation_warning.py | from __future__ import print_function, absolute_import, division
import imgaug as ia
class Dummy1(object):
@ia.deprecated(alt_func="Foo")
def __init__(self):
pass
class Dummy2(object):
@ia.deprecated(alt_func="Foo", comment="Some example comment.")
def __init__(self):
pass
class Dummy3(object):
def __init__(self):
pass
@ia.deprecated(alt_func="bar()",
comment="Some example comment.")
def foo(self):
pass
@ia.deprecated(alt_func="bar()", comment="Some example comment.")
def foo():
pass
def main():
Dummy1()
Dummy2()
Dummy3()
foo()
if __name__ == "__main__":
main()
| mit | Python | |
957d93639a4877258a55ec31c1817569da61c14f | Add scaletree script | kdmurray91/kwip-experiments,kdmurray91/kwip-experiments,kdmurray91/kwip-experiments | coalescent/scripts/scaletree.py | coalescent/scripts/scaletree.py | #!/usr/bin/env python3
from __future__ import division, print_function
from ete3 import Tree
import numpy as np
from numpy import median, mean
import itertools as itl
from math import ceil, log
from string import ascii_uppercase
METRICS = [
'mean',
'median',
'min',
'max',
]
def pwdist(tree):
'''Finds the (off-diagonal) pairwise distances between all tips of `tree`.
'''
dists = []
for a, b in itl.combinations(tree.get_leaf_names(), 2):
a = tree&a
dists.append(a.get_distance(b))
return np.array(dists)
def normalise_tree(tree, to=1.0, metric='mean'):
'''
Normalise branch lengths of `tree` such that `metric` of the pairwise
distances is `to`.
By default, normalise such that the mean of all pairwise distances is 1.0.
'''
print(metric)
dists = pwdist(tree)
print(dists)
assert metric in METRICS
current = eval('{}(dists)'.format(metric))
print(current)
for node in tree.iter_descendants():
node.dist /= current
return tree
def alphbetise_names(tree):
'''Replace numeric tip labels with alphabetic ones. 1 -> A, 2 -> B etc.
If there are more than 26 tips, labels are AA, AB, ..., ZZ and so forth for
any number of tips.
'''
label_len = ceil(log(len(tree)) / log(26)) # how many letters do we need?
labels = [''.join(letters)
for letters in itl.product(ascii_uppercase, repeat=label_len)]
tiplabels = list(sorted(tree.get_leaf_names(), key=int))
for i, leaf in enumerate(tiplabels):
node = tree&leaf
node.name = labels[i]
return tree
def main(treefile, to, metric):
with open(treefile) as fh:
for treeline in fh:
tree = Tree(treeline)
print(tree.write(format=5))
tree = alphbetise_names(tree)
tree = normalise_tree(tree, to, metric)
print(tree.write(format=5))
CLI = '''
USAGE:
scaletree [options] TREEFILE
OPTIONS:
-t TO Scale tree metric to TO. [default: 1.0]
-m METRIC Metric for scaling. Must be one of mean, min, max.
[default: mean]
'''
if __name__ == '__main__':
from docopt import docopt
opts = docopt(CLI)
treefile = opts['TREEFILE']
to = float(opts['-t'])
metric = opts['-m']
main(treefile, to, metric)
| mit | Python | |
33a5f8394265770bcdcae152a7e9635572fe6e09 | Change the revision number for downloading chrome 29 from continuous archive. | PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,patrickm/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,patrickm/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,M4sse/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,Chilledheart/chromium,anirudhSK/chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,ltilve/chromium,jaruba/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,patrickm/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,mogoweb/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,M4sse/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,anirudhSK/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,Chilledheart/chromium,jaruba/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Chilledheart/chromium,ltilve/chromium,littlstar/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,patrickm/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,ondra-novak/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Chilledheart/chromium,patrickm/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,krieger-od/nwjs_chromium.src | chrome/test/chromedriver/archive.py | chrome/test/chromedriver/archive.py | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads items from the Chromium continuous archive."""
import os
import platform
import urllib
import util
CHROME_27_REVISION = '190466'
CHROME_28_REVISION = '198276'
CHROME_29_REVISION = '208261'
_SITE = 'http://commondatastorage.googleapis.com'
class Site(object):
CONTINUOUS = _SITE + '/chromium-browser-continuous'
SNAPSHOT = _SITE + '/chromium-browser-snapshots'
def GetLatestRevision(site=Site.CONTINUOUS):
"""Returns the latest revision (as a string) available for this platform.
Args:
site: the archive site to check against, default to the continuous one.
"""
url = site + '/%s/LAST_CHANGE'
return urllib.urlopen(url % _GetDownloadPlatform()).read()
def DownloadChrome(revision, dest_dir, site=Site.CONTINUOUS):
"""Downloads the packaged Chrome from the archive to the given directory.
Args:
revision: the revision of Chrome to download.
dest_dir: the directory to download Chrome to.
site: the archive site to download from, default to the continuous one.
Returns:
The path to the unzipped Chrome binary.
"""
def GetZipName():
if util.IsWindows():
return 'chrome-win32'
elif util.IsMac():
return 'chrome-mac'
elif util.IsLinux():
return 'chrome-linux'
def GetChromePathFromPackage():
if util.IsWindows():
return 'chrome.exe'
elif util.IsMac():
return 'Chromium.app/Contents/MacOS/Chromium'
elif util.IsLinux():
return 'chrome'
zip_path = os.path.join(dest_dir, 'chrome-%s.zip' % revision)
if not os.path.exists(zip_path):
url = site + '/%s/%s/%s.zip' % (_GetDownloadPlatform(), revision,
GetZipName())
print 'Downloading', url, '...'
urllib.urlretrieve(url, zip_path)
util.Unzip(zip_path, dest_dir)
return os.path.join(dest_dir, GetZipName(), GetChromePathFromPackage())
def _GetDownloadPlatform():
"""Returns the name for this platform on the archive site."""
if util.IsWindows():
return 'Win'
elif util.IsMac():
return 'Mac'
elif util.IsLinux():
if platform.architecture()[0] == '64bit':
return 'Linux_x64'
else:
return 'Linux'
| # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads items from the Chromium continuous archive."""
import os
import platform
import urllib
import util
CHROME_27_REVISION = '190466'
CHROME_28_REVISION = '198276'
CHROME_29_REVISION = '208334'
_SITE = 'http://commondatastorage.googleapis.com'
class Site(object):
CONTINUOUS = _SITE + '/chromium-browser-continuous'
SNAPSHOT = _SITE + '/chromium-browser-snapshots'
def GetLatestRevision(site=Site.CONTINUOUS):
"""Returns the latest revision (as a string) available for this platform.
Args:
site: the archive site to check against, default to the continuous one.
"""
url = site + '/%s/LAST_CHANGE'
return urllib.urlopen(url % _GetDownloadPlatform()).read()
def DownloadChrome(revision, dest_dir, site=Site.CONTINUOUS):
"""Downloads the packaged Chrome from the archive to the given directory.
Args:
revision: the revision of Chrome to download.
dest_dir: the directory to download Chrome to.
site: the archive site to download from, default to the continuous one.
Returns:
The path to the unzipped Chrome binary.
"""
def GetZipName():
if util.IsWindows():
return 'chrome-win32'
elif util.IsMac():
return 'chrome-mac'
elif util.IsLinux():
return 'chrome-linux'
def GetChromePathFromPackage():
if util.IsWindows():
return 'chrome.exe'
elif util.IsMac():
return 'Chromium.app/Contents/MacOS/Chromium'
elif util.IsLinux():
return 'chrome'
zip_path = os.path.join(dest_dir, 'chrome-%s.zip' % revision)
if not os.path.exists(zip_path):
url = site + '/%s/%s/%s.zip' % (_GetDownloadPlatform(), revision,
GetZipName())
print 'Downloading', url, '...'
urllib.urlretrieve(url, zip_path)
util.Unzip(zip_path, dest_dir)
return os.path.join(dest_dir, GetZipName(), GetChromePathFromPackage())
def _GetDownloadPlatform():
"""Returns the name for this platform on the archive site."""
if util.IsWindows():
return 'Win'
elif util.IsMac():
return 'Mac'
elif util.IsLinux():
if platform.architecture()[0] == '64bit':
return 'Linux_x64'
else:
return 'Linux'
| bsd-3-clause | Python |
3026a02f1b8739eee42a53906db3169f8fc9653e | Add fibonacci-modified | vikasgorur/hackerrank,vikasgorur/hackerrank,vikasgorur/hackerrank | fibonacci-modified/solution.py | fibonacci-modified/solution.py | import sys
a, b, n = [int(x) for x in sys.stdin.readline().split()]
c = b*b + a
for i in range(3, n):
a, b, c = b, c, c*c+b
print(c) | mit | Python | |
594902c5f2dea525889e79bfabfb6e3f20abc4ab | Fix initial tuple for ignorable nexts in mezzanine.utils.login_redirect. | geodesign/mezzanine,stbarnabas/mezzanine,mush42/mezzanine,frankchin/mezzanine,orlenko/sfpirg,SoLoHiC/mezzanine,theclanks/mezzanine,dekomote/mezzanine-modeltranslation-backport,batpad/mezzanine,scarcry/snm-mezzanine,mush42/mezzanine,stephenmcd/mezzanine,Cajoline/mezzanine,ryneeverett/mezzanine,viaregio/mezzanine,damnfine/mezzanine,tuxinhang1989/mezzanine,stbarnabas/mezzanine,dustinrb/mezzanine,sjdines/mezzanine,geodesign/mezzanine,readevalprint/mezzanine,douglaskastle/mezzanine,readevalprint/mezzanine,spookylukey/mezzanine,jjz/mezzanine,nikolas/mezzanine,dovydas/mezzanine,sjuxax/mezzanine,dovydas/mezzanine,jjz/mezzanine,cccs-web/mezzanine,industrydive/mezzanine,agepoly/mezzanine,adrian-the-git/mezzanine,frankier/mezzanine,orlenko/plei,biomassives/mezzanine,Cicero-Zhao/mezzanine,biomassives/mezzanine,PegasusWang/mezzanine,frankchin/mezzanine,theclanks/mezzanine,batpad/mezzanine,ZeroXn/mezzanine,sjuxax/mezzanine,agepoly/mezzanine,theclanks/mezzanine,readevalprint/mezzanine,eino-makitalo/mezzanine,molokov/mezzanine,molokov/mezzanine,orlenko/sfpirg,douglaskastle/mezzanine,vladir/mezzanine,christianwgd/mezzanine,wrwrwr/mezzanine,wyzex/mezzanine,mush42/mezzanine,SoLoHiC/mezzanine,webounty/mezzanine,Skytorn86/mezzanine,viaregio/mezzanine,industrydive/mezzanine,guibernardino/mezzanine,dsanders11/mezzanine,wyzex/mezzanine,jerivas/mezzanine,promil23/mezzanine,saintbird/mezzanine,orlenko/sfpirg,promil23/mezzanine,gbosh/mezzanine,adrian-the-git/mezzanine,stephenmcd/mezzanine,nikolas/mezzanine,molokov/mezzanine,PegasusWang/mezzanine,jjz/mezzanine,guibernardino/mezzanine,geodesign/mezzanine,AlexHill/mezzanine,webounty/mezzanine,promil23/mezzanine,dustinrb/mezzanine,gradel/mezzanine,orlenko/plei,wbtuomela/mezzanine,wbtuomela/mezzanine,ZeroXn/mezzanine,jerivas/mezzanine,vladir/mezzanine,webounty/mezzanine,frankier/mezzanine,emile2016/mezzanine,Kniyl/mezzanine,spookylukey/mezzanine,scarcry/snm-mezzanine,joshcartme/mezzanine,fusionbox/mezzanine,dekomote/mezzanine-modeltranslation-backport,AlexHill/mezzanine,dovydas/mezzanine,Cajoline/mezzanine,Kniyl/mezzanine,gbosh/mezzanine,christianwgd/mezzanine,eino-makitalo/mezzanine,ryneeverett/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,Skytorn86/mezzanine,Cajoline/mezzanine,gbosh/mezzanine,damnfine/mezzanine,industrydive/mezzanine,orlenko/plei,cccs-web/mezzanine,Kniyl/mezzanine,scarcry/snm-mezzanine,nikolas/mezzanine,sjdines/mezzanine,sjdines/mezzanine,frankchin/mezzanine,christianwgd/mezzanine,frankier/mezzanine,tuxinhang1989/mezzanine,saintbird/mezzanine,ryneeverett/mezzanine,gradel/mezzanine,spookylukey/mezzanine,sjuxax/mezzanine,damnfine/mezzanine,wbtuomela/mezzanine,tuxinhang1989/mezzanine,Cicero-Zhao/mezzanine,dekomote/mezzanine-modeltranslation-backport,dustinrb/mezzanine,emile2016/mezzanine,viaregio/mezzanine,eino-makitalo/mezzanine,vladir/mezzanine,wrwrwr/mezzanine,ZeroXn/mezzanine,PegasusWang/mezzanine,SoLoHiC/mezzanine,douglaskastle/mezzanine,biomassives/mezzanine,emile2016/mezzanine,stephenmcd/mezzanine,dsanders11/mezzanine,adrian-the-git/mezzanine,agepoly/mezzanine,saintbird/mezzanine,dsanders11/mezzanine,fusionbox/mezzanine,gradel/mezzanine,jerivas/mezzanine,wyzex/mezzanine,joshcartme/mezzanine | mezzanine/utils/urls.py | mezzanine/utils/urls.py |
import re
import unicodedata
from django.core.urlresolvers import reverse, NoReverseMatch
from django.shortcuts import redirect
from django.utils.encoding import smart_unicode
from mezzanine.conf import settings
from mezzanine.utils.importing import import_dotted_path
def admin_url(model, url, object_id=None):
"""
Returns the URL for the given model and admin url name.
"""
opts = model._meta
url = "admin:%s_%s_%s" % (opts.app_label, opts.object_name.lower(), url)
args = ()
if object_id is not None:
args = (object_id,)
return reverse(url, args=args)
def slugify(s):
"""
Loads the callable defined by the ``SLUGIFY`` setting, which defaults
to the ``slugify_unicode`` function.
"""
return import_dotted_path(settings.SLUGIFY)(s)
def slugify_unicode(s):
"""
Replacement for Django's slugify which allows unicode chars in
slugs, for URLs in Chinese, Russian, etc.
Adopted from https://github.com/mozilla/unicode-slugify/
"""
chars = []
for char in smart_unicode(s):
cat = unicodedata.category(char)[0]
if cat in "LN" or char in "-_~":
chars.append(char)
elif cat == "Z":
chars.append(" ")
return re.sub("[-\s]+", "-", "".join(chars).strip()).lower()
def login_redirect(request):
"""
Returns the redirect response for login/signup. Favors:
- next param
- LOGIN_REDIRECT_URL setting
- homepage
"""
ignorable_nexts = ("",)
if "mezzanine.accounts" in settings.INSTALLED_APPS:
from mezzanine.accounts import urls
ignorable_nexts += (urls.SIGNUP_URL, urls.LOGIN_URL, urls.LOGOUT_URL)
next = request.REQUEST.get("next", "")
if next in ignorable_nexts:
try:
next = reverse(settings.LOGIN_REDIRECT_URL)
except NoReverseMatch:
next = "/"
return redirect(next)
def path_to_slug(path):
"""
Removes everything from the given URL path, including
``PAGES_SLUG`` if it is set, returning a slug that would match a
``Page`` instance's slug.
"""
from mezzanine.urls import PAGES_SLUG
for prefix in (settings.SITE_PREFIX, PAGES_SLUG):
path = path.strip("/").replace(prefix, "", 1)
return path or "/"
|
import re
import unicodedata
from django.core.urlresolvers import reverse, NoReverseMatch
from django.shortcuts import redirect
from django.utils.encoding import smart_unicode
from mezzanine.conf import settings
from mezzanine.utils.importing import import_dotted_path
def admin_url(model, url, object_id=None):
"""
Returns the URL for the given model and admin url name.
"""
opts = model._meta
url = "admin:%s_%s_%s" % (opts.app_label, opts.object_name.lower(), url)
args = ()
if object_id is not None:
args = (object_id,)
return reverse(url, args=args)
def slugify(s):
"""
Loads the callable defined by the ``SLUGIFY`` setting, which defaults
to the ``slugify_unicode`` function.
"""
return import_dotted_path(settings.SLUGIFY)(s)
def slugify_unicode(s):
"""
Replacement for Django's slugify which allows unicode chars in
slugs, for URLs in Chinese, Russian, etc.
Adopted from https://github.com/mozilla/unicode-slugify/
"""
chars = []
for char in smart_unicode(s):
cat = unicodedata.category(char)[0]
if cat in "LN" or char in "-_~":
chars.append(char)
elif cat == "Z":
chars.append(" ")
return re.sub("[-\s]+", "-", "".join(chars).strip()).lower()
def login_redirect(request):
"""
Returns the redirect response for login/signup. Favors:
- next param
- LOGIN_REDIRECT_URL setting
- homepage
"""
ignorable_nexts = ("")
if "mezzanine.accounts" in settings.INSTALLED_APPS:
from mezzanine.accounts import urls
ignorable_nexts += (urls.SIGNUP_URL, urls.LOGIN_URL, urls.LOGOUT_URL)
next = request.REQUEST.get("next", "")
if next in ignorable_nexts:
try:
next = reverse(settings.LOGIN_REDIRECT_URL)
except NoReverseMatch:
next = "/"
return redirect(next)
def path_to_slug(path):
"""
Removes everything from the given URL path, including
``PAGES_SLUG`` if it is set, returning a slug that would match a
``Page`` instance's slug.
"""
from mezzanine.urls import PAGES_SLUG
for prefix in (settings.SITE_PREFIX, PAGES_SLUG):
path = path.strip("/").replace(prefix, "", 1)
return path or "/"
| bsd-2-clause | Python |
5e55978a81cc306ee92ba12e557a1652c7f80ad5 | Create dimension_reduction.py | gdarnell/arsvd | dimension_reduction.py | dimension_reduction.py | #!/usr/bin/python
import sys
import time
import numpy as np
from scipy import linalg, stats
def rsvd(X, dstar, power_iters=2):
""" Perform rsvd algorithm on input matrix.
Method must be supplied dstar.
Returns truncated svd (U,S,V).
Parameters
----------
X : int matrix
Matrix of n x m integers, where m <= n. If n < m,
matrix will be transposed to enforce m <= n.
dstar : int
The latent (underlying) matrix rank that will be
used to truncate the larger dimension (m).
power_iters : int
default: 2
Number of power iterations used (random matrix multiplications)
Returns
-------
int matrix
Matrix of left singular vectors.
int matrix
Matrix of singular values.
int matrix
Matrix of right singular vectors.
"""
if(X.shape[0] < X.shape[1]):
X = X.T # transpose X
if(power_iters < 1):
power_iters = 1
# follows manuscript notation as closely as possible
P = np.random.randn(X.shape[1],dstar)
for i in range(power_iters):
P = np.dot(X.T,P)
P = np.dot(X,P)
Q,R = np.linalg.qr(P)
B = np.dot(Q.T,X)
U,S,V = linalg.svd(B)
U = np.dot(Q,U)
return U,S,V
def stabilityMeasure(X, d_max, B=5, power_iters=2):
""" Calculate stability of
Parameters
----------
X : int matrix
input matrix to determine rank of
d_max : int
upper bound rank to estimate
B : int
default: 5
number of projections to correlate
power_iters : int
default: 2
Number of power iterations used (random matrix multiplications)
Returns
-------
int
Latent (lower-dimensional) matrix rank
"""
singular_basis = np.zeros((B,X.shape[0],d_max))
# calculate singular basis under multiple projections
for i in range(B):
U = rsvd(X,d_max)[0]
singular_basis[i,:,:] = U[:,0:d_max]
# calculate score for each singular vector
stability_vec = np.zeros((d_max))
for k in range(d_max):
stability = 0
for i in range(0,B-1):
for j in range(1,B):
corr = stats.spearmanr(singular_basis[i,:,k],singular_basis[j,:,k])[0]
stability = stability + abs(corr)
N = B*(B-1)/2
stability = stability/N
stability_vec[k] = stability
# wilcoxon rank-sum test p-values
p_vals = np.zeros(d_max-2)
for k in range(2,d_max):
p_vals[k-2] = stats.ranksums(stability_vec[0:k-1],stability_vec[k-1:d_max])[1]
dstar = np.argmin(p_vals)
return dstar
| mit | Python | |
e58a84208a46cf749f92d8170ed81794ecfa3137 | Update downloadable clang to r347933 | davidzchen/tensorflow,karllessard/tensorflow,jbedorf/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,theflofly/tensorflow,paolodedios/tensorflow,adit-chandra/tensorflow,aam-at/tensorflow,annarev/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,aldian/tensorflow,ghchinoy/tensorflow,chemelnucfin/tensorflow,ppwwyyxx/tensorflow,aam-at/tensorflow,renyi533/tensorflow,cxxgtxy/tensorflow,DavidNorman/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,xzturn/tensorflow,gunan/tensorflow,aldian/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,theflofly/tensorflow,apark263/tensorflow,petewarden/tensorflow,annarev/tensorflow,xzturn/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,DavidNorman/tensorflow,arborh/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,jbedorf/tensorflow,adit-chandra/tensorflow,hfp/tensorflow-xsmm,davidzchen/tensorflow,hfp/tensorflow-xsmm,Intel-Corporation/tensorflow,adit-chandra/tensorflow,cxxgtxy/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,Bismarrck/tensorflow,theflofly/tensorflow,arborh/tensorflow,aldian/tensorflow,aldian/tensorflow,jbedorf/tensorflow,renyi533/tensorflow,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,alsrgv/tensorflow,apark263/tensorflow,renyi533/tensorflow,arborh/tensorflow,theflofly/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,apark263/tensorflow,alsrgv/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_saved_model,ghchinoy/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,hfp/tensorflow-xsmm,apark263/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,arborh/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,aldian/tensorflow,petewarden/tensorflow,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,frreiss/tensorflow-fred,adit-chandra/tensorflow,ppwwyyxx/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jendap/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,annarev/tensorflow,ageron/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,Bismarrck/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,yongtang/tensorflow,jhseu/tensorflow,ageron/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,aldian/tensorflow,davidzchen/tensorflow,jendap/tensorflow,sarvex/tensorflow,arborh/tensorflow,jbedorf/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow,kevin-coder/tensorflow-fork,freedomtan/tensorflow,yongtang/tensorflow,jhseu/tensorflow,Bismarrck/tensorflow,kevin-coder/tensorflow-fork,gunan/tensorflow,arborh/tensorflow,apark263/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,kevin-coder/tensorflow-fork,petewarden/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,cxxgtxy/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,apark263/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,jendap/tensorflow,DavidNorman/tensorflow,davidzchen/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,jbedorf/tensorflow,paolodedios/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,jendap/tensorflow,jendap/tensorflow,kevin-coder/tensorflow-fork,jendap/tensorflow,paolodedios/tensorflow,cxxgtxy/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,cxxgtxy/tensorflow,aam-at/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,jhseu/tensorflow,xzturn/tensorflow,alsrgv/tensorflow,jhseu/tensorflow,renyi533/tensorflow,ageron/tensorflow,davidzchen/tensorflow,alsrgv/tensorflow,apark263/tensorflow,alsrgv/tensorflow,xzturn/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,Intel-tensorflow/tensorflow,annarev/tensorflow,jbedorf/tensorflow,cxxgtxy/tensorflow,chemelnucfin/tensorflow,Intel-tensorflow/tensorflow,kevin-coder/tensorflow-fork,Bismarrck/tensorflow,ageron/tensorflow,jendap/tensorflow,gautam1858/tensorflow,ageron/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,petewarden/tensorflow,aam-at/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,annarev/tensorflow,sarvex/tensorflow,ageron/tensorflow,chemelnucfin/tensorflow,asimshankar/tensorflow,ghchinoy/tensorflow,Intel-Corporation/tensorflow,ageron/tensorflow,gunan/tensorflow,jbedorf/tensorflow,sarvex/tensorflow,tensorflow/tensorflow,freedomtan/tensorflow,adit-chandra/tensorflow,sarvex/tensorflow,jendap/tensorflow,apark263/tensorflow,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,arborh/tensorflow,aam-at/tensorflow,karllessard/tensorflow,theflofly/tensorflow,tensorflow/tensorflow,freedomtan/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,Bismarrck/tensorflow,aam-at/tensorflow,alsrgv/tensorflow,hfp/tensorflow-xsmm,jhseu/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,davidzchen/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,petewarden/tensorflow,adit-chandra/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,DavidNorman/tensorflow,Intel-Corporation/tensorflow,theflofly/tensorflow,asimshankar/tensorflow,cxxgtxy/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,gunan/tensorflow,alsrgv/tensorflow,xzturn/tensorflow,asimshankar/tensorflow,ageron/tensorflow,ageron/tensorflow,theflofly/tensorflow,hfp/tensorflow-xsmm,frreiss/tensorflow-fred,gautam1858/tensorflow,alsrgv/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,renyi533/tensorflow,arborh/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ppwwyyxx/tensorflow,theflofly/tensorflow,apark263/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,ghchinoy/tensorflow,ageron/tensorflow,ageron/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,asimshankar/tensorflow,yongtang/tensorflow,jbedorf/tensorflow,paolodedios/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xzturn/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,jendap/tensorflow,Bismarrck/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,alsrgv/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,ppwwyyxx/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,jhseu/tensorflow,ppwwyyxx/tensorflow,apark263/tensorflow,jendap/tensorflow,asimshankar/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,freedomtan/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,jbedorf/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,jhseu/tensorflow,xzturn/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,DavidNorman/tensorflow,jbedorf/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,tensorflow/tensorflow,hfp/tensorflow-xsmm,ageron/tensorflow,xzturn/tensorflow,xzturn/tensorflow,kevin-coder/tensorflow-fork,adit-chandra/tensorflow,yongtang/tensorflow,asimshankar/tensorflow,Bismarrck/tensorflow,jbedorf/tensorflow,arborh/tensorflow,apark263/tensorflow,theflofly/tensorflow,kevin-coder/tensorflow-fork,adit-chandra/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,karllessard/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,chemelnucfin/tensorflow,sarvex/tensorflow,sarvex/tensorflow,adit-chandra/tensorflow,chemelnucfin/tensorflow,petewarden/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,alsrgv/tensorflow,arborh/tensorflow,ghchinoy/tensorflow,sarvex/tensorflow,xzturn/tensorflow,ghchinoy/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,renyi533/tensorflow,aam-at/tensorflow,jhseu/tensorflow,renyi533/tensorflow,annarev/tensorflow,jhseu/tensorflow,davidzchen/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,karllessard/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow | third_party/clang_toolchain/download_clang.bzl | third_party/clang_toolchain/download_clang.bzl | """ Helpers to download a recent clang release."""
def _get_platform_folder(os_name):
os_name = os_name.lower()
if os_name.startswith("windows"):
return "Win"
if os_name.startswith("mac os"):
return "Mac"
if not os_name.startswith("linux"):
fail("Unknown platform")
return "Linux_x64"
def _download_chromium_clang(
repo_ctx,
platform_folder,
package_version,
sha256,
out_folder):
cds_url = "https://commondatastorage.googleapis.com/chromium-browser-clang"
cds_file = "clang-%s.tgz" % package_version
cds_full_url = "{0}/{1}/{2}".format(cds_url, platform_folder, cds_file)
repo_ctx.download_and_extract(cds_full_url, output = out_folder, sha256 = sha256)
def download_clang(repo_ctx, out_folder):
""" Download a fresh clang release and put it into out_folder.
Clang itself will be located in 'out_folder/bin/clang'.
We currently download one of the latest releases of clang by the
Chromium project (see
https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
Args:
repo_ctx: An instance of repository_context object.
out_folder: A folder to extract the compiler into.
"""
# TODO(ibiryukov): we currently download and extract some extra tools in the
# clang release (e.g., sanitizers). We should probably remove the ones
# we don't need and document the ones we want provide in addition to clang.
# Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
# can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
CLANG_REVISION = "347933"
CLANG_SUB_REVISION = 1
package_version = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION)
checksums = {
"Linux_x64": "cae3643fdf5d46fc9bc8731212bb37573547148d90b64b083165e090133d11b0",
"Mac": "083a0e91a38c06e568652313ac7372b17a101268f7d65533d721ca30413442b4",
"Win": "43160487cfc7e88076a369a2b6e8e4a0f42e104c28d8903f3aaa62d630aba949",
}
platform_folder = _get_platform_folder(repo_ctx.os.name)
_download_chromium_clang(
repo_ctx,
platform_folder,
package_version,
checksums[platform_folder],
out_folder,
)
| """ Helpers to download a recent clang release."""
def _get_platform_folder(os_name):
os_name = os_name.lower()
if os_name.startswith("windows"):
return "Win"
if os_name.startswith("mac os"):
return "Mac"
if not os_name.startswith("linux"):
fail("Unknown platform")
return "Linux_x64"
def _download_chromium_clang(
repo_ctx,
platform_folder,
package_version,
sha256,
out_folder):
cds_url = "https://commondatastorage.googleapis.com/chromium-browser-clang"
cds_file = "clang-%s.tgz" % package_version
cds_full_url = "{0}/{1}/{2}".format(cds_url, platform_folder, cds_file)
repo_ctx.download_and_extract(cds_full_url, output = out_folder, sha256 = sha256)
def download_clang(repo_ctx, out_folder):
""" Download a fresh clang release and put it into out_folder.
Clang itself will be located in 'out_folder/bin/clang'.
We currently download one of the latest releases of clang by the
Chromium project (see
https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
Args:
repo_ctx: An instance of repository_context object.
out_folder: A folder to extract the compiler into.
"""
# TODO(ibiryukov): we currently download and extract some extra tools in the
# clang release (e.g., sanitizers). We should probably remove the ones
# we don't need and document the ones we want provide in addition to clang.
# Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
# can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
CLANG_REVISION = "346388"
CLANG_SUB_REVISION = 3
package_version = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION)
checksums = {
"Linux_x64": "d47b7ac4756c3f8e3bbfa0e81bf199ec8e9faa3a6b11573f0705e9c04af7ad51",
"Mac": "de2b0c701e19cda633ea02804866dd24d8506afb8cae51fbcce3415b76f4ded3",
"Win": "c7d27f13b41aa9eaaf9760903962e9b2b0f8261058df0d35170711dc60545a7d",
}
platform_folder = _get_platform_folder(repo_ctx.os.name)
_download_chromium_clang(
repo_ctx,
platform_folder,
package_version,
checksums[platform_folder],
out_folder,
)
| apache-2.0 | Python |
0bf2ed3b865180f3a46f3e34e3b90c9beccd720a | Create models.py | mcuringa/college-consultant,mcuringa/college-consultant | consultant/consultant/models.py | consultant/consultant/models.py | from django.db import models
from django import forms
from django.forms import ModelForm
class School(models.Model):
name = models.CharField(max_length=500)
four_year_college = models.Boolean(default=true)
is_public = models.Boolean(default=true)
population = models.IntergerField()
gender = models.CharField(max_length=2)
location = models.IntergerField(default=0)
zip_code = models.IntergerField(max_length=5)
state = models.CharField(max_length=2)
campus_style = models.IntergerField()
| agpl-3.0 | Python | |
b57add183aa5fb4466583756cd0a3c22b5ae8f0b | add feature extractor abstract class | eEcoLiDAR/eEcoLiDAR | laserchicken/feature_extractor.py | laserchicken/feature_extractor.py | class FeatureExtractor(object):
def get_requirements(self):
"""
Get a list of names of the point attributes that are needed for this feature extraction. For simple features,
this could be just x, y, and z. Other features can build on again other features to have been computed first.
:return: List of feature names
"""
NotImplementedError("Class %s doesn't implement aMethod()" % (self.__class__.__name__))
def get_names(self):
"""
Get a list of names of the feature values. This will return as many names as as the number feature values
that will be returned. For instance, if a feature extractor returns the first 3 Eigen values, this method
should return 3 names, for instance 'eigen_value_1', 'eigen_value_2' and 'eigen_value_3'.
:return: List of feature names
"""
NotImplementedError("Class %s doesn't implement aMethod()" % (self.__class__.__name__))
def extract_features(self, point_cloud, target):
"""
Extract the feature value(s) of the point cloud at location of the target.
:param point_cloud:
:param target:
:return:
"""
NotImplementedError("Class %s doesn't implement aMethod()" % (self.__class__.__name__))
| apache-2.0 | Python | |
c97ab456f22dca6a69e3775cc1353dbf3957389a | Add basic support for LimitlessLED | sfam/home-assistant,ErykB2000/home-assistant,open-homeautomation/home-assistant,Danielhiversen/home-assistant,toddeye/home-assistant,nugget/home-assistant,open-homeautomation/home-assistant,deisi/home-assistant,EricRho/home-assistant,tboyce021/home-assistant,theolind/home-assistant,mKeRix/home-assistant,Duoxilian/home-assistant,CCOSTAN/home-assistant,auduny/home-assistant,MungoRae/home-assistant,pottzer/home-assistant,EricRho/home-assistant,instantchow/home-assistant,JshWright/home-assistant,MartinHjelmare/home-assistant,hmronline/home-assistant,tboyce1/home-assistant,lukas-hetzenecker/home-assistant,bdfoster/blumate,eagleamon/home-assistant,Zac-HD/home-assistant,alexmogavero/home-assistant,postlund/home-assistant,HydrelioxGitHub/home-assistant,kennedyshead/home-assistant,oandrew/home-assistant,balloob/home-assistant,alanbowman/home-assistant,eagleamon/home-assistant,maddox/home-assistant,dmeulen/home-assistant,tinloaf/home-assistant,nevercast/home-assistant,bencmbrook/home-assistant,florianholzapfel/home-assistant,leoc/home-assistant,sanmiguel/home-assistant,partofthething/home-assistant,MungoRae/home-assistant,GenericStudent/home-assistant,betrisey/home-assistant,mKeRix/home-assistant,deisi/home-assistant,dorant/home-assistant,betrisey/home-assistant,g12mcgov/home-assistant,leoc/home-assistant,qedi-r/home-assistant,bdfoster/blumate,morphis/home-assistant,aequitas/home-assistant,ma314smith/home-assistant,bencmbrook/home-assistant,mikaelboman/home-assistant,vitorespindola/home-assistant,PetePriority/home-assistant,tchellomello/home-assistant,srcLurker/home-assistant,JshWright/home-assistant,MungoRae/home-assistant,xifle/home-assistant,molobrakos/home-assistant,aequitas/home-assistant,badele/home-assistant,hexxter/home-assistant,mahendra-r/home-assistant,soldag/home-assistant,jaharkes/home-assistant,keerts/home-assistant,partofthething/home-assistant,HydrelioxGitHub/home-assistant,Nzaga/home-assistant,shaftoe/home-assistant,adrienbrault/home-assistant,alexkolar/home-assistant,mezz64/home-assistant,fbradyirl/home-assistant,emilhetty/home-assistant,philipbl/home-assistant,emilhetty/home-assistant,michaelarnauts/home-assistant,eagleamon/home-assistant,ma314smith/home-assistant,aronsky/home-assistant,coteyr/home-assistant,miniconfig/home-assistant,luxus/home-assistant,teodoc/home-assistant,mikaelboman/home-assistant,theolind/home-assistant,happyleavesaoc/home-assistant,sffjunkie/home-assistant,jabesq/home-assistant,pschmitt/home-assistant,sffjunkie/home-assistant,Julian/home-assistant,sffjunkie/home-assistant,caiuspb/home-assistant,coteyr/home-assistant,happyleavesaoc/home-assistant,tboyce1/home-assistant,nkgilley/home-assistant,tomduijf/home-assistant,xifle/home-assistant,soldag/home-assistant,persandstrom/home-assistant,emilhetty/home-assistant,varunr047/homefile,luxus/home-assistant,sfam/home-assistant,mezz64/home-assistant,LinuxChristian/home-assistant,justyns/home-assistant,keerts/home-assistant,aoakeson/home-assistant,qedi-r/home-assistant,varunr047/homefile,w1ll1am23/home-assistant,robbiet480/home-assistant,caiuspb/home-assistant,GenericStudent/home-assistant,jnewland/home-assistant,kyvinh/home-assistant,philipbl/home-assistant,g12mcgov/home-assistant,fbradyirl/home-assistant,tmm1/home-assistant,sffjunkie/home-assistant,bdfoster/blumate,tmm1/home-assistant,titilambert/home-assistant,shaftoe/home-assistant,nugget/home-assistant,jaharkes/home-assistant,vitorespindola/home-assistant,jamespcole/home-assistant,xifle/home-assistant,g12mcgov/home-assistant,florianholzapfel/home-assistant,leppa/home-assistant,DavidLP/home-assistant,ma314smith/home-assistant,SEJeff/home-assistant,nnic/home-assistant,kennedyshead/home-assistant,sfam/home-assistant,dorant/home-assistant,Theb-1/home-assistant,happyleavesaoc/home-assistant,postlund/home-assistant,dorant/home-assistant,nkgilley/home-assistant,Smart-Torvy/torvy-home-assistant,PetePriority/home-assistant,turbokongen/home-assistant,sdague/home-assistant,miniconfig/home-assistant,ct-23/home-assistant,emilhetty/home-assistant,tchellomello/home-assistant,aoakeson/home-assistant,home-assistant/home-assistant,jawilson/home-assistant,jawilson/home-assistant,molobrakos/home-assistant,varunr047/homefile,varunr047/homefile,ct-23/home-assistant,morphis/home-assistant,home-assistant/home-assistant,kyvinh/home-assistant,dmeulen/home-assistant,robbiet480/home-assistant,rohitranjan1991/home-assistant,Cinntax/home-assistant,mikaelboman/home-assistant,justyns/home-assistant,auduny/home-assistant,JshWright/home-assistant,pottzer/home-assistant,devdelay/home-assistant,tboyce021/home-assistant,robjohnson189/home-assistant,nevercast/home-assistant,alanbowman/home-assistant,deisi/home-assistant,Julian/home-assistant,aoakeson/home-assistant,instantchow/home-assistant,michaelarnauts/home-assistant,Julian/home-assistant,hexxter/home-assistant,tomduijf/home-assistant,ewandor/home-assistant,theolind/home-assistant,Zac-HD/home-assistant,shaftoe/home-assistant,Zac-HD/home-assistant,open-homeautomation/home-assistant,Zyell/home-assistant,maddox/home-assistant,jnewland/home-assistant,PetePriority/home-assistant,mikaelboman/home-assistant,rohitranjan1991/home-assistant,Teagan42/home-assistant,betrisey/home-assistant,oandrew/home-assistant,jnewland/home-assistant,maddox/home-assistant,alexmogavero/home-assistant,srcLurker/home-assistant,pottzer/home-assistant,molobrakos/home-assistant,auduny/home-assistant,Zyell/home-assistant,robjohnson189/home-assistant,Duoxilian/home-assistant,persandstrom/home-assistant,EricRho/home-assistant,sdague/home-assistant,MartinHjelmare/home-assistant,tboyce1/home-assistant,Smart-Torvy/torvy-home-assistant,SEJeff/home-assistant,w1ll1am23/home-assistant,MungoRae/home-assistant,coteyr/home-assistant,joopert/home-assistant,teodoc/home-assistant,mahendra-r/home-assistant,DavidLP/home-assistant,LinuxChristian/home-assistant,florianholzapfel/home-assistant,oandrew/home-assistant,kyvinh/home-assistant,stefan-jonasson/home-assistant,Zyell/home-assistant,joopert/home-assistant,Zac-HD/home-assistant,alanbowman/home-assistant,deisi/home-assistant,aronsky/home-assistant,mKeRix/home-assistant,SEJeff/home-assistant,hmronline/home-assistant,varunr047/homefile,tinloaf/home-assistant,ewandor/home-assistant,keerts/home-assistant,nugget/home-assistant,persandstrom/home-assistant,philipbl/home-assistant,jabesq/home-assistant,miniconfig/home-assistant,open-homeautomation/home-assistant,mahendra-r/home-assistant,nnic/home-assistant,dmeulen/home-assistant,tomduijf/home-assistant,tmm1/home-assistant,mKeRix/home-assistant,jaharkes/home-assistant,miniconfig/home-assistant,stefan-jonasson/home-assistant,jamespcole/home-assistant,nnic/home-assistant,srcLurker/home-assistant,bdfoster/blumate,morphis/home-assistant,vitorespindola/home-assistant,Smart-Torvy/torvy-home-assistant,michaelarnauts/home-assistant,jabesq/home-assistant,sanmiguel/home-assistant,LinuxChristian/home-assistant,alexmogavero/home-assistant,nevercast/home-assistant,Duoxilian/home-assistant,balloob/home-assistant,betrisey/home-assistant,morphis/home-assistant,jaharkes/home-assistant,JshWright/home-assistant,emilhetty/home-assistant,jamespcole/home-assistant,leppa/home-assistant,HydrelioxGitHub/home-assistant,shaftoe/home-assistant,Cinntax/home-assistant,caiuspb/home-assistant,adrienbrault/home-assistant,alexkolar/home-assistant,ma314smith/home-assistant,rohitranjan1991/home-assistant,balloob/home-assistant,sanmiguel/home-assistant,titilambert/home-assistant,ct-23/home-assistant,robjohnson189/home-assistant,hexxter/home-assistant,CCOSTAN/home-assistant,lukas-hetzenecker/home-assistant,ErykB2000/home-assistant,bdfoster/blumate,aequitas/home-assistant,FreekingDean/home-assistant,fbradyirl/home-assistant,bencmbrook/home-assistant,mikaelboman/home-assistant,tboyce1/home-assistant,FreekingDean/home-assistant,toddeye/home-assistant,hexxter/home-assistant,stefan-jonasson/home-assistant,sander76/home-assistant,stefan-jonasson/home-assistant,alexkolar/home-assistant,justyns/home-assistant,eagleamon/home-assistant,robjohnson189/home-assistant,hmronline/home-assistant,Theb-1/home-assistant,happyleavesaoc/home-assistant,oandrew/home-assistant,teodoc/home-assistant,MartinHjelmare/home-assistant,luxus/home-assistant,keerts/home-assistant,alexmogavero/home-assistant,ct-23/home-assistant,badele/home-assistant,florianholzapfel/home-assistant,pschmitt/home-assistant,Nzaga/home-assistant,deisi/home-assistant,Duoxilian/home-assistant,srcLurker/home-assistant,ct-23/home-assistant,instantchow/home-assistant,badele/home-assistant,hmronline/home-assistant,LinuxChristian/home-assistant,CCOSTAN/home-assistant,DavidLP/home-assistant,MungoRae/home-assistant,ErykB2000/home-assistant,leoc/home-assistant,devdelay/home-assistant,devdelay/home-assistant,Danielhiversen/home-assistant,Julian/home-assistant,ewandor/home-assistant,LinuxChristian/home-assistant,turbokongen/home-assistant,philipbl/home-assistant,Theb-1/home-assistant,sander76/home-assistant,Teagan42/home-assistant,xifle/home-assistant,Smart-Torvy/torvy-home-assistant,kyvinh/home-assistant,tinloaf/home-assistant,devdelay/home-assistant,dmeulen/home-assistant,leoc/home-assistant,Nzaga/home-assistant,hmronline/home-assistant,sffjunkie/home-assistant | homeassistant/components/light/limitlessled.py | homeassistant/components/light/limitlessled.py | """
homeassistant.components.light.limitlessled
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for LimitlessLED bulbs, also known as...
EasyBulb
AppLight
AppLamp
MiLight
LEDme
dekolight
iLight
"""
import random
import logging
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF, DEVICE_DEFAULT_NAME
from homeassistant.components.light import ATTR_BRIGHTNESS
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
try:
import ledcontroller
except ImportError:
_LOGGER.exception("Error while importing dependency ledcontroller.")
return
led = ledcontroller.LedController(config['host'])
lights = []
for i in range(1, 5):
if 'group_%d_name' % (i) in config:
lights.append(
LimitlessLED(
led,
i,
config['group_%d_name' % (i)],
STATE_OFF
)
)
add_devices_callback(lights)
class LimitlessLED(ToggleEntity):
def __init__(self, led, group, name, state, brightness=180):
self.led = led
self.group = group
# LimitlessLEDs don't report state, we have track it ourselves.
self.led.off(self.group)
self._name = name or DEVICE_DEFAULT_NAME
self._state = state
self._brightness = brightness
@property
def should_poll(self):
""" No polling needed for a demo light. """
return False
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def state(self):
""" Returns the name of the device if any. """
return self._state
@property
def state_attributes(self):
""" Returns optional state attributes. """
if self.is_on:
return {
ATTR_BRIGHTNESS: self._brightness,
}
@property
def is_on(self):
""" True if device is on. """
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
self._state = STATE_ON
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
self.led.set_brightness(self._brightness, self.group)
def turn_off(self, **kwargs):
""" Turn the device off. """
self._state = STATE_OFF
self.led.off(self.group)
| apache-2.0 | Python | |
278a69902937812a87de0fe476787cc0f702231f | Create Triangle.py | SpAiNiOr/mystudy,SpAiNiOr/mystudy,SpAiNiOr/mystudy | learning/test/Triangle.py | learning/test/Triangle.py | class Triangle(object):
number_of_sides = 3
def __init__(self, angle1, angle2, angle3):
self.angle1 = angle1
self.angle2 = angle2
self.angle3 = angle3
def check_angles(self):
if (self.angle1+self.angle2+self.angle3) == 180:
return True
else:
return False
my_triangle = Triangle(90, 30, 60)
print my_triangle.number_of_sides
print my_triangle.check_angles()
class Equilateral(Triangle):
angle = 60
# means Equilateral is all the way same, so we don't need user to provide a arg
def __init__(self):
self.angle1 = 60
self.angle2 = 60
self.angle3 = 60
| apache-2.0 | Python | |
99c5cedcb910a8fe7919bb773f39da76da51332b | improve module description | charbeljc/account-financial-tools,diagramsoftware/account-financial-tools,factorlibre/account-financial-tools,andhit-r/account-financial-tools,andrius-preimantas/account-financial-tools,factorlibre/account-financial-tools,dvitme/account-financial-tools,luc-demeyer/account-financial-tools,damdam-s/account-financial-tools,Antiun/account-financial-tools,cysnake4713/account-financial-tools,OpenPymeMx/account-financial-tools,abstract-open-solutions/account-financial-tools,alhashash/account-financial-tools,nagyv/account-financial-tools,bringsvor/account-financial-tools,Pexego/account-financial-tools,Domatix/account-financial-tools,raycarnes/account-financial-tools,Nowheresly/account-financial-tools,syci/account-financial-tools,Endika/account-financial-tools,DarkoNikolovski/account-financial-tools,open-synergy/account-financial-tools,amoya-dx/account-financial-tools,andrius-preimantas/account-financial-tools,xpansa/account-financial-tools,VitalPet/account-financial-tools,pedrobaeza/account-financial-tools,VitalPet/account-financial-tools,cysnake4713/account-financial-tools,vauxoo-dev/account-financial-tools,luc-demeyer/account-financial-tools,taktik/account-financial-tools,OpenPymeMx/account-financial-tools,andhit-r/account-financial-tools,akretion/account-financial-tools,iDTLabssl/account-financial-tools,OpenPymeMx/account-financial-tools,abstract-open-solutions/account-financial-tools,credativUK/account-financial-tools,syci/account-financial-tools,rschnapka/account-financial-tools,yelizariev/account-financial-tools,raycarnes/account-financial-tools,taktik/account-financial-tools,diagramsoftware/account-financial-tools,lepistone/account-financial-tools,gurneyalex/account-financial-tools,open-synergy/account-financial-tools,Antiun/account-financial-tools,adhoc-dev/oca-account-financial-tools,damdam-s/account-financial-tools,akretion/account-financial-tools,acsone/account-financial-tools,dvitme/account-financial-tools,rschnapka/account-financial-tools,yelizariev/account-financial-tools,pedrobaeza/account-financial-tools,Domatix/account-financial-tools,ClearCorp-dev/account-financial-tools,Pexego/account-financial-tools,Domatix/account-financial-tools,lepistone/account-financial-tools,credativUK/account-financial-tools,gurneyalex/account-financial-tools,xpansa/account-financial-tools,DarkoNikolovski/account-financial-tools,vauxoo-dev/account-financial-tools,acsone/account-financial-tools,adhoc-dev/oca-account-financial-tools,Endika/account-financial-tools,iDTLabssl/account-financial-tools,nagyv/account-financial-tools,alhashash/account-financial-tools,acsone/account-financial-tools,Nowheresly/account-financial-tools,open-synergy/account-financial-tools,VitalPet/account-financial-tools,bringsvor/account-financial-tools,amoya-dx/account-financial-tools,charbeljc/account-financial-tools,ClearCorp-dev/account-financial-tools | account_journal_period_close/__openerp__.py | account_journal_period_close/__openerp__.py | # -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
"name": "Account Journal Period Close",
"version": "1.0",
"author": "ACSONE SA/NV",
"maintainer": "ACSONE SA/NV",
"website": "http://www.acsone.eu",
"images": [],
"category": "Accounting",
"depends": [
"account"],
"description": """
Close period per journal
========================
This module allows fine grained control of period closing.
Each journal can be closed independently for any period
(using buttons on the fiscal period view).
A common use case is letting accountants close the sale
and purchase journals when the VAT declaration is done for
a given period, while leaving the miscellaneous journal open.
From a technical standpoint, the module leverages the
account.journal.period model that is present in Odoo core.
""",
"data": ['view/account_view.xml'],
"demo": [],
"test": [],
"licence": "AGPL-3",
"installable": True,
"auto_install": False,
"application": True,
}
| # -*- coding: utf-8 -*-
#
#
# Authors: Adrien Peiffer
# Copyright (c) 2014 Acsone SA/NV (http://www.acsone.eu)
# All Rights Reserved
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs.
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contact a Free Software
# Service Company.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
"name": "Account Journal Period Close",
"version": "1.0",
"author": "ACSONE SA/NV",
"maintainer": "ACSONE SA/NV",
"website": "http://www.acsone.eu",
"images": [],
"category": "Accounting",
"depends": [
"account"],
"description": """
Account Journal Period Close
==============================
this module allows to add some account move in a close period.
To do this, you have to specify account journals on which will allow writing of account move on the period form view
""",
"data": ['view/account_view.xml'],
"demo": [],
"test": [],
"licence": "AGPL-3",
"installable": True,
"auto_install": False,
"application": True,
}
| agpl-3.0 | Python |
94d342cc643b89f33e637e86e064cce747635696 | Update release version | BryceLohr/authentic,adieu/authentic2,BryceLohr/authentic,pu239ppy/authentic2,adieu/authentic2,adieu/authentic2,adieu/authentic2,pu239ppy/authentic2,BryceLohr/authentic,pu239ppy/authentic2,BryceLohr/authentic,pu239ppy/authentic2 | authentic2/__init__.py | authentic2/__init__.py | import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), 'vendor'))
# The version of Authentic
VERSION = "2.0.2"
| import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), 'vendor'))
# The version of Authentic
VERSION = "2.0.1"
| agpl-3.0 | Python |
531ed8190dcc1c29c642867e9a916cf3aad89e5b | add module `models`, including 'ValidateModelMixin` | lookup/lu-dj-utils,lookup/lu-dj-utils | lu_dj_utils/models.py | lu_dj_utils/models.py | # coding: utf-8
"""Source: lookup_www.common.models (but just a subset)
"""
from __future__ import absolute_import, print_function, unicode_literals
###############################################################################
# MIXINS
###############################################################################
class ValidateModelMixin(object):
"""Make :meth:`save` call :meth:`full_clean`.
.. warning:
This should be the left-most mixin/super-class of a model.
Do you think Django models ``save`` method will validate all fields
(i.e. call ``full_clean``) before saving or any time at all? Wrong!
I discovered this awful truth when I couldn't understand why
a model object with an email field (without `blank=True`) could be
saved with an empty string as email address.
More info:
* "Why doesn't django's model.save() call full clean?"
http://stackoverflow.com/questions/4441539/
* "Model docs imply that ModelForm will call Model.full_clean(),
but it won't."
https://code.djangoproject.com/ticket/13100
"""
def save(self, *args, **kwargs):
"""Call :meth:`full_clean` before saving."""
self.full_clean()
super(ValidateModelMixin, self).save(*args, **kwargs)
| bsd-3-clause | Python | |
d1e61c154d38eb63c03ad5f3111fd230f33b1204 | Add politifact plugin. | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria | plumeria/plugins/politifact.py | plumeria/plugins/politifact.py | import re
import plumeria.util.http as http
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("politifact", "fact check", category="Search", params=[Text('name')])
@rate_limit()
async def politifact(message, name):
"""
Fact check for recently said/checked statements by a person.
Example::
fact check barack obama
"""
name_dashed = re.sub(" +", "-", name.lower())
r = await http.get("http://www.politifact.com/api/statements/truth-o-meter/people/{}/json/".format(name_dashed),
params={
"n": 20,
})
results = SafeStructure(r.json())
if not results:
raise CommandError("No results found. Either the person isn't fact checked by politifact.com, you "
"didn't write the person's entire name, or you misspelled the name.")
return build_mapping(
[(e.ruling.ruling, "{} ({})".format(e.ruling_headline, e.statement_context)) for e in results[:10]])
| mit | Python | |
6c142b44775007937903409b61829eb8dd594f99 | add migrator for previous bugs | lbryio/lbry,lbryio/lbry,lbryio/lbry | lbrynet/extras/daemon/migrator/migrate9to10.py | lbrynet/extras/daemon/migrator/migrate9to10.py | import sqlite3
import os
def do_migration(conf):
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = "select stream_hash, sd_hash from main.stream"
for stream_hash, sd_hash in cursor.execute(query):
head_blob_hash = cursor.execute(
"select blob_hash from stream_blob where position = 0 and stream_hash = ?",
(stream_hash,)
).fetchone()
if not head_blob_hash:
continue
cursor.execute("update blob set should_announce=1 where blob_hash in (?, ?)", (sd_hash, head_blob_hash[0],))
connection.commit()
connection.close()
| mit | Python | |
c634f327d63a74f115408daaae40b8ffeff4a2e8 | add start of matchpixels | ajtag/ln2015 | utils/match_pixels.py | utils/match_pixels.py | __author__ = 'ajtag'
import csv
import xml.etree.ElementTree as ET
import os.path
import math
from collections import namedtuple
white = 255,255,255
Lamp = namedtuple("Lamp", ["x", "y", 'name', 'dmx', 'channel'])
def parse_imagemask_svg(x, y, scale, x_offset = 19, y_offset = 0):
tree = ET.parse('../Resources/LS-TRIN-0023 East Mall.svg')
root = tree.getroot()
groups = root.findall('{http://www.w3.org/2000/svg}g')
mnlx, mxlx, mnly, mxly = None, None, None, None
tmplamps = []
for g in groups:
paths = g.findall('{http://www.w3.org/2000/svg}path')
if mnlx is None:
mnlx = float(paths[0].attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cx'])
mxlx = float(paths[0].attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cx'])
mnly = float(paths[0].attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cy'])
mxly = float(paths[0].attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cy'])
for p in paths:
lampx = float(p.attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cx'])
lampy = float(p.attrib['{http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd}cy'])
mnlx = min(mnlx, lampx)
mxlx = max(mxlx, lampx)
mnly = min(mnly, lampy)
mxly = max(mxly, lampy)
tmplamps.append((lampx, lampy))
return [(x_offset + ((lamp[0] - mnlx)/(mxlx - mnlx) * x * scale), y_offset + ((lamp[1] - mnly)/(mxly - mnly) * y * scale)) for lamp in tmplamps]
f = open(os.path.join('..', 'Resources', 'pixels.csv'))
ch = csv.DictReader(f)
madrixlamps = [Lamp(i['X'], i['Y'], i['Name'], i['Universe'], i['Channel']) for i in ch]
f.close()
scale = 1
# missing light to the left of 19px in madrix lamps
planlamps = [Lamp(i[0], i[1], None, None, None) for i in parse_imagemask_svg(132, 70, scale)]
for i in enumerate(zip(madrixlamps, planlamps)):
print(i)
def distance(l1, l2):
return math.sqrt(pow(l1.x - l2.x, 2) + pow(l1.y - l2.y, 2, 2))
| mit | Python | |
6979f943c6320ba077eb0ff93159854e929cc11f | Bump version to 0.7.0 | thombashi/sqlitebiter,thombashi/sqlitebiter | sqlitebiter/_version.py | sqlitebiter/_version.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
VERSION = "0.7.0"
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
VERSION = "0.6.2"
| mit | Python |
d4a13cdea28d109cb2595fb6ab03c70b4c060483 | Remove unused import | vivek8943/tracker_project,vivek8943/tracker_project,abarto/tracker_project,abarto/tracker_project,abarto/tracker_project,vivek8943/tracker_project | tracker_project/tracker/sockets.py | tracker_project/tracker/sockets.py | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from kombu import BrokerConnection
from kombu.mixins import ConsumerMixin
from socketio.namespace import BaseNamespace
from socketio.sdjango import namespace
from .queues import notifications_queue
@namespace('/notifications')
class NotificationsNamespace(BaseNamespace):
def __init__(self, *args, **kwargs):
super(NotificationsNamespace, self).__init__(*args, **kwargs)
def get_initial_acl(self):
return ['recv_connect']
def recv_connect(self):
if self.request.user.is_authenticated():
self.lift_acl_restrictions()
self.spawn(self._dispatch)
else:
self.disconnect(silent=True)
def _dispatch(self):
with BrokerConnection(settings.AMPQ_URL) as connection:
NotificationsConsumer(connection, self.socket, self.ns_name).run()
class NotificationsConsumer(ConsumerMixin):
def __init__(self, connection, socket, ns_name):
self.connection = connection
self.socket = socket
self.ns_name = ns_name
def get_consumers(self, Consumer, channel):
return [Consumer(queues=[notifications_queue], callbacks=[self.process_notification])]
def process_notification(self, body, message):
self.socket.send_packet(dict(
type='event',
name='notification',
args=(body,),
endpoint=self.ns_name
))
message.ack() | from __future__ import absolute_import, unicode_literals
import logging
from django.conf import settings
from kombu import BrokerConnection
from kombu.mixins import ConsumerMixin
from socketio.namespace import BaseNamespace
from socketio.sdjango import namespace
from .queues import notifications_queue
@namespace('/notifications')
class NotificationsNamespace(BaseNamespace):
def __init__(self, *args, **kwargs):
super(NotificationsNamespace, self).__init__(*args, **kwargs)
def get_initial_acl(self):
return ['recv_connect']
def recv_connect(self):
if self.request.user.is_authenticated():
self.lift_acl_restrictions()
self.spawn(self._dispatch)
else:
self.disconnect(silent=True)
def _dispatch(self):
with BrokerConnection(settings.AMPQ_URL) as connection:
NotificationsConsumer(connection, self.socket, self.ns_name).run()
class NotificationsConsumer(ConsumerMixin):
def __init__(self, connection, socket, ns_name):
self.connection = connection
self.socket = socket
self.ns_name = ns_name
def get_consumers(self, Consumer, channel):
return [Consumer(queues=[notifications_queue], callbacks=[self.process_notification])]
def process_notification(self, body, message):
self.socket.send_packet(dict(
type='event',
name='notification',
args=(body,),
endpoint=self.ns_name
))
message.ack() | mit | Python |
6a999c5a006f603c2218a4cfbc1d6ccb38178bb5 | Fix up utils and node | Storj/downstream-node,Storj/downstream-node | downstream_node/lib/utils.py | downstream_node/lib/utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from heartbeat import Challenge
def query_to_list(query):
""" Takes a model query thing nad returns a list of dicts with the data
Example:
..:
result = utils.query_to_list(MyTable.query)
:param query: Query object
:return: List of dicts representing a model
"""
lst = []
for row in query.all():
row_dict = {}
for col in row.__mapper__.mapped_table.columns:
if col.name not in ['id', 'response']:
row_dict[col.name] = getattr(row, col.name)
lst.append(row_dict)
return lst
def load_heartbeat(heartbeat, query):
""" Loads a Heartbeat object with query data
:param heartbeat: Heartbeat instance object
:param query: query as a SQLAlchemy query result
:return: Loaded Heartbeat
"""
challenges = []
for row in query:
challenge = Challenge(row.block, row.seed)
challenge.response = row.response
challenges.append(challenge)
heartbeat.challenges = challenges
return heartbeat
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from heartbeat import Challenge
def model_to_json(model):
""" Returns a JSON representation of an SQLAlchemy-backed object.
From Zato: https://github.com/zatosource/zato
"""
_json = {}
_json['fields'] = {}
_json['pk'] = getattr(model, 'id')
for col in model._sa_class_manager.mapper.mapped_table.columns:
_json['fields'][col.name] = getattr(model, col.name)
return json.dumps([_json])
def query_to_list(query):
lst = []
for row in query.all():
row_dict = {}
for col in row.__mapper__.mapped_table.columns:
if col.name not in ['id', 'response']:
row_dict[col.name] = getattr(row, col.name)
lst.append(row_dict)
return lst
def load_heartbeat(heartbeat, query):
challenges = []
for row in query:
challenge = Challenge(row.block, row.seed)
challenge.response = row.response
challenges.append(challenge)
heartbeat.challenges = challenges
return heartbeat
| mit | Python |
3b5dc1cbc520fe59886c1e37c74a7f3e11f7150d | Remove of debugging code | SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC,SKA-ScienceDataProcessor/RC | MS1/ddp-erlang-style/create_floats.py | MS1/ddp-erlang-style/create_floats.py | #!/usr/bin/python
__author__ = 'mcsquaredjr'
from dna_lib import *
from string import Template
import subprocess
CMD = "../create-floats INPUT $ITEMCOUNT $CHUNKCOUNT $CHUNKNO"
DEBUG = False
lines = my_lines()
myip = get_ip()
chunks = chunk_numbers()
for i, line in enumerate(lines):
port = int(MIN_PORT) + i + 1
msg = "PYTHON DEBUG: IP: {0}\tPORT: {1}\tCHUNK NUMBER:{2}\t"
print msg.format(myip, port, chunks[i])
cmd_str = Template(CMD).substitute(ITEMCOUNT=ITEMCOUNT, CHUNKCOUNT=chunk_count(), CHUNKNO=chunks[i])
if DEBUG:
print cmd_str
else:
subprocess.call(cmd_str, shell=True)
| #!/usr/bin/python
__author__ = 'mcsquaredjr'
from dna_lib import *
from string import Template
import subprocess
CMD = "../create-floats INPUT $ITEMCOUNT $CHUNKCOUNT $CHUNKNO"
DEBUG = False
lines = my_lines()
myip = get_ip()
chunks = chunk_numbers()
for i, line in enumerate(lines):
port = int(MIN_PORT) + i + 1
msg = "PYTHON DEBUG: IP: {0}\tPORT: {1}\tCHUNK NUMBER:{2}\t"
print msg.format(myip, port, chunks[i])
cmd_str = Template(CMD).substitute(ITEMCOUNT=ITEMCOUNT, CHUNKCOUNT=chunk_count(), CHUNKNO=chunks[i])
if DEBUG:
print cmd_str
else:
if i == 0 or i == 11:
subprocess.call(cmd_str, shell=True)
| apache-2.0 | Python |
9f401479498b6694a6a6040a50de65518290a4cd | add wrap_nested for wrapping nested dicts | pankshok/xoinvader,pkulev/xoinvader | xoinvader/settings.py | xoinvader/settings.py | """
Module for handling settings.
"""
class Settings(dict):
"""Container for storing all game settings."""
def __init__(self, wrap_nested=False, *args, **kwargs):
super(Settings, self).__init__(*args, **kwargs)
if wrap_nested:
self._wrap_nested()
def _wrap_nested(self):
for key, value in self.iteritems():
if type(value) == dict:
self[key] = Settings(True, value)
def __getattr__(self, name):
return self[name]
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
| """
Module for handling settings.
"""
class Settings(dict):
"""Container for storing all game settings."""
def __init__(self, *args, **kwargs):
super(Settings, self).__init__(*args, **kwargs)
def __getattr__(self, name):
return self[name]
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
| mit | Python |
b5080ff954af99379b08cb7a6cec636f41073a34 | Allow piping from standard input | XueWei/gTTS,pndurette/gTTS,XueWei/gTTS | bin/gtts-cli.py | bin/gtts-cli.py | #! /usr/bin/python
from __future__ import print_function
from gtts import gTTS
from gtts import __version__
import sys
import argparse
import os
import codecs
def languages():
"""Sorted pretty printed string of supported languages"""
return ", ".join(sorted("{}: '{}'".format(gTTS.LANGUAGES[k], k) for k in gTTS.LANGUAGES))
# Args
desc = "Creates an mp3 file from spoken text via the Google Text-to-Speech API ({v})".format(v=__version__)
parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.RawTextHelpFormatter)
text_group = parser.add_mutually_exclusive_group(required=True)
text_group.add_argument('text', nargs='?', help="text to speak")
text_group.add_argument('-f', '--file', help="file to speak")
parser.add_argument("-o", '--destination', help="destination mp3 file", action='store')
parser.add_argument('-l', '--lang', default='en', help="ISO 639-1/IETF language tag to speak in:\n" + languages())
parser.add_argument('--debug', default=False, action="store_true")
args = parser.parse_args()
try:
if args.text:
if args.text == "-":
text = sys.stdin.read()
else:
text = args.text
else:
with codecs.open(args.file, "r", "utf-8") as f:
text = f.read()
# TTSTF (Text to Speech to File)
tts = gTTS(text=text, lang=args.lang, debug=args.debug)
if args.destination:
tts.save(args.destination)
else:
tts.write_to_fp(os.fdopen(sys.stdout.fileno(), "wb"))
except Exception as e:
if args.destination:
print(str(e))
else:
print("ERROR: ", e, file=sys.stderr)
| #! /usr/bin/python
from __future__ import print_function
from gtts import gTTS
from gtts import __version__
import sys
import argparse
import os
import codecs
def languages():
"""Sorted pretty printed string of supported languages"""
return ", ".join(sorted("{}: '{}'".format(gTTS.LANGUAGES[k], k) for k in gTTS.LANGUAGES))
# Args
desc = "Creates an mp3 file from spoken text via the Google Text-to-Speech API ({v})".format(v=__version__)
parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.RawTextHelpFormatter)
text_group = parser.add_mutually_exclusive_group(required=True)
text_group.add_argument('text', nargs='?', help="text to speak")
text_group.add_argument('-f', '--file', help="file to speak")
parser.add_argument("-o", '--destination', help="destination mp3 file", action='store')
parser.add_argument('-l', '--lang', default='en', help="ISO 639-1/IETF language tag to speak in:\n" + languages())
parser.add_argument('--debug', default=False, action="store_true")
args = parser.parse_args()
try:
if args.text:
text = args.text
else:
with codecs.open(args.file, "r", "utf-8") as f:
text = f.read()
# TTSTF (Text to Speech to File)
tts = gTTS(text=text, lang=args.lang, debug=args.debug)
if args.destination:
tts.save(args.destination)
else:
tts.write_to_fp(os.fdopen(sys.stdout.fileno(), "wb"))
except Exception as e:
if args.destination:
print(str(e))
else:
print("ERROR: ", e, file=sys.stderr)
| mit | Python |
3395e7f1954f939e6d7edfc9cf7886e4256ce6fc | Solve vehicle forces using a matrix. TODO: get results to match previous method of using the brute force equations. | kktse/uwfm | ymd/ymd_calculator.py | ymd/ymd_calculator.py | import numpy as np
import math
def tire_model(alpha):
""" returns tire force generated at a given slip angle
input - alpha (degree)
output - lateral tire force (N)
"""
# assume 600N/degree cornering stiffness
return 100*alpha
def main():
# define test conditions
#beta = math.radians(1) # degrees
#delta = math.radians(0) # degrees
beta = 1
delta = 0
# define vehicle parameters
wheelbase = 1000 # in milimetres [mm]
trackwidth = 700 # in milimetres [mm]
weightdist = .70 # percentage [%]
mass = 250 # kilograms [kg]
cornerstiff = 100
# calculate vehicle characteristics
a = wheelbase * weightdist / 1000
b = wheelbase/1000 - a
# solve for tire forces
force_f = tire_model(beta+delta)
force_r = tire_model(beta)
f_lat = force_f + force_r
a_lat = f_lat/mass
yaw = a*force_f - b*force_r
# develop matrix
# TODO: better naming perhaps?
a11 = cornerstiff*math.sin(delta)
a12 = cornerstiff*math.sin(delta)
a13 = cornerstiff
a14 = cornerstiff
a21 = cornerstiff*math.cos(delta)
a22 = cornerstiff*math.cos(delta)
a23 = 0
a24 = 0
a31 = cornerstiff*(trackwidth/1000/2*math.sin(delta) + a*math.cos(delta))
a32 = cornerstiff*(-trackwidth/1000/2*math.sin(delta) + a*math.cos(delta))
a33 = cornerstiff*-b
a34 = cornerstiff*-b
matrix = np.matrix([[a11, a12, a13, a14],
[a21, a22, a23, a24],
[a31, a32, a33, a34]])
slipangles = np.matrix('1;1;1;1')
result = matrix*slipangles
print(result)
print(f_lat)
print(a_lat)
print(yaw)
print('I like turtles')
x = np.array([1, 2, 3])
print(x)
force_fr = tire_model(2)
print (force_fr)
if __name__ == "__main__":
main()
| import numpy as np
def tire_model(alpha):
""" returns tire force generated at a given slip angle
input - alpha (degree)
output - lateral tire force (N)
"""
# assume 600N/degree cornering stiffness
return 100*alpha
def main():
# define test conditions
beta = 9 # degrees
delta = 0 # degrees
# define vehicle parameters
wheelbase = 1000 # in milimetres [mm]
trackwidth = 700 # in milimetres [mm]
weightdist = .50 # percentage [%]
mass = 250 # kilograms [kg]
# calculate vehicle characteristics
a = wheelbase * weightdist / 1000
b = wheelbase/1000 - a
# solve for tire forces
force_f = tire_model(beta+delta)
force_r = tire_model(beta)
f_lat = force_f + force_r
a_lat = f_lat/mass
yaw = a*force_f - b*force_r
print(f_lat)
print(a_lat)
print(yaw)
print('I like turtles')
x = np.array([1, 2, 3])
print(x)
force_fr = tire_model(2)
print (force_fr)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
ad2b8bce02c7b7b7ab477fc7fccc1b38fb60e63a | Allow for the case when a header isn't needed | Astroua/TurbuStat,e-koch/TurbuStat | turbustat/statistics/input_base.py | turbustat/statistics/input_base.py | # Licensed under an MIT open source license - see LICENSE
from astropy.io.fits import PrimaryHDU
from spectral_cube import SpectralCube
from spectral_cube.lower_dimensional_structures import LowerDimensionalObject
import numpy as np
def input_data(data, no_header=False):
'''
Accept a variety of input data forms and return those expected by the
various statistics.
Parameters
----------
data : astropy.io.fits.PrimaryHDU, SpectralCube,
spectral_cube.LowerDimensionalObject, np.ndarray or a tuple/list
with the data and the header
Data to be used with a given statistic or distance metric. no_header
must be enabled when passing only an array in.
no_header : bool, optional
When enabled, returns only the data without the header.
Returns
-------
ouput_data : tuple or np.ndarray
A tuple containing the data and the header. Or an array when no_header
is enabled.
'''
if isinstance(data, PrimaryHDU):
output_data = (data.data, data.header)
elif isinstance(data, SpectralCube):
output_data = (data.filled_data[:].value, data.header)
elif isinstance(data, LowerDimensionalObject):
output_data = (data.value, data.header)
elif isinstance(data, tuple) or isinstance(data, list):
if len(data) != 2:
raise TypeError("Must have two items: data and the header.")
output_data = data
elif isinstance(data, np.ndarray):
if not no_header:
raise TypeError("no_header must be enabled when giving data"
" without a header.")
output_data = (data, )
else:
raise TypeError("Input data is not of an accepted form:"
" astropy.io.fits.PrimaryHDU, SpectralCube,"
" spectral_cube.LowerDimensionalObject or a tuple or"
" list containing the data and header, in that order.")
if no_header:
return output_data[0]
return output_data
| # Licensed under an MIT open source license - see LICENSE
from astropy.io.fits import PrimaryHDU
from spectral_cube import SpectralCube
from spectral_cube.lower_dimensional_structures import LowerDimensionalObject
def input_data(data):
'''
Accept a variety of input data forms and return those expected by the
various statistics.
'''
if isinstance(data, PrimaryHDU):
return (data.data, data.header)
elif isinstance(data, SpectralCube):
return (data.filled_data[:].value, data.header)
elif isinstance(data, LowerDimensionalObject):
return (data.value, data.header)
elif isinstance(data, tuple) or isinstance(data, list):
if len(data) != 2:
raise TypeError("Must have two items: data and the header.")
return data
else:
raise TypeError("Input data is not of an accepted form:"
" astropy.io.fits.PrimaryHDU, SpectralCube,"
" spectral_cube.LowerDimensionalObject or a tuple or"
" list containing the data and header, in that order.")
| mit | Python |
ccba0b9af690d98f12e5ee36b024c6e781709cba | handle exception from rmtree of a tmpdir | conda/kapsel,conda/kapsel | project/internal/test/tmpfile_utils.py | project/internal/test/tmpfile_utils.py | from __future__ import print_function, absolute_import
import tempfile
import shutil
import os
import sys
from project.internal.makedirs import makedirs_ok_if_exists
from project.local_state_file import LocalStateFile
local_tmp = os.path.abspath("./build/tmp")
makedirs_ok_if_exists(local_tmp)
class TmpDir(object):
def __init__(self, prefix):
self._dir = tempfile.mkdtemp(prefix=prefix, dir=local_tmp)
def __exit__(self, type, value, traceback):
try:
shutil.rmtree(path=self._dir)
except Exception as e:
# prefer original exception to rmtree exception
if value is None:
print("Exception cleaning up TmpDir %s: %s" % (self._dir, str(e)), file=sys.stderr)
raise e
else:
print("Failed to clean up TmpDir %s: %s" % (self._dir, str(e)), file=sys.stderr)
raise value
def __enter__(self):
return self._dir
def with_directory_contents(contents, func):
with (TmpDir(prefix="test-")) as dirname:
for filename, file_content in contents.items():
path = os.path.join(dirname, filename)
makedirs_ok_if_exists(os.path.dirname(path))
f = open(path, 'w')
f.write(file_content)
f.flush()
f.close()
func(os.path.realpath(dirname))
def with_temporary_file(func, dir=None):
if dir is None:
dir = local_tmp
import tempfile
# Windows throws a permission denied if we use delete=True for
# auto-delete, and then try to open the file again ourselves
# with f.name. So we manually delete in the finally block
# below.
f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
try:
func(f)
finally:
f.close()
os.remove(f.name)
def with_file_contents(contents, func, dir=None):
def with_file_object(f):
f.write(contents.encode("UTF-8"))
f.flush()
# Windows will get mad if we try to rename it without closing,
# and some users of with_file_contents want to rename it.
f.close()
func(f.name)
with_temporary_file(with_file_object, dir=dir)
def tmp_local_state_file():
import tempfile
f = tempfile.NamedTemporaryFile(dir=local_tmp)
local_state = LocalStateFile(f.name)
f.close()
return local_state
| import tempfile
import shutil
import os
from project.internal.makedirs import makedirs_ok_if_exists
from project.local_state_file import LocalStateFile
local_tmp = os.path.abspath("./build/tmp")
makedirs_ok_if_exists(local_tmp)
class TmpDir(object):
def __init__(self, prefix):
self._dir = tempfile.mkdtemp(prefix=prefix, dir=local_tmp)
def __exit__(self, type, value, traceback):
shutil.rmtree(path=self._dir)
def __enter__(self):
return self._dir
def with_directory_contents(contents, func):
with (TmpDir(prefix="test-")) as dirname:
for filename, file_content in contents.items():
path = os.path.join(dirname, filename)
makedirs_ok_if_exists(os.path.dirname(path))
f = open(path, 'w')
f.write(file_content)
f.flush()
f.close()
func(os.path.realpath(dirname))
def with_temporary_file(func, dir=None):
if dir is None:
dir = local_tmp
import tempfile
# Windows throws a permission denied if we use delete=True for
# auto-delete, and then try to open the file again ourselves
# with f.name. So we manually delete in the finally block
# below.
f = tempfile.NamedTemporaryFile(dir=dir, delete=False)
try:
func(f)
finally:
f.close()
os.remove(f.name)
def with_file_contents(contents, func, dir=None):
def with_file_object(f):
f.write(contents.encode("UTF-8"))
f.flush()
# Windows will get mad if we try to rename it without closing,
# and some users of with_file_contents want to rename it.
f.close()
func(f.name)
with_temporary_file(with_file_object, dir=dir)
def tmp_local_state_file():
import tempfile
f = tempfile.NamedTemporaryFile(dir=local_tmp)
local_state = LocalStateFile(f.name)
f.close()
return local_state
| bsd-3-clause | Python |
fcdbb6f6a0dc102161f1f752202ae9b95a4afbc0 | simplify API | bluedynamics/bda.bfg.app,bluedynamics/bda.bfg.app | src/bda/bfg/app/browser/form.py | src/bda/bfg/app/browser/form.py | from yafowil.base import factory
from yafowil.controller import Controller
from paste.httpexceptions import HTTPFound
from bda.bfg.tile import Tile
from bda.bfg.app.browser.utils import make_url
class Form(Tile):
@property
def form(self):
"""Return yafowil compound.
Not implemented in base class.
"""
raise NotImplementedError(u"``form`` property must be provided "
u"by deriving object.")
def __call__(self, model, request):
self.model = model
self.request = request
return self._process_form()
def _process_form(self):
self.prepare()
if not self.show:
return ''
controller = Controller(self.form, self.request)
if not controller.next:
return controller.rendered
if isinstance(controller.next, HTTPFound):
self.redirect(controller.next.location())
return
return controller.next
class AddForm(Form):
"""form hooking the hidden value 'factory' to self.form on __call__
"""
def __call__(self, model, request):
self.model = model
self.request = request
form = self.form
form['factory'] = factory('proxy', value=request.params.get('factory'))
return self._process_form()
def next(self, request):
return HTTPFound(make_url(request.request, node=self.model.__parent__))
class EditForm(Form):
"""form hooking the hidden value 'from' to self.form on __call__
"""
def __call__(self, model, request):
self.model = model
self.request = request
form = self.form
form['from'] = factory('proxy', value=request.params.get('from'))
return self._process_form()
def next(self, request):
if request.get('from') == 'parent':
url = make_url(request.request, node=self.model.__parent__)
else:
url = make_url(request.request, node=self.model)
return HTTPFound(url) | from yafowil.base import factory
from yafowil.controller import Controller
from paste.httpexceptions import HTTPFound
from bda.bfg.tile import Tile
from bda.bfg.app.browser.utils import make_url
class Form(Tile):
@property
def form(self):
"""Return yafowil compound.
Not implemented in base class.
"""
raise NotImplementedError(u"``form`` property must be provided "
u"by deriving object.")
def __call__(self, model, request):
self.model = model
self.request = request
return self._process_form()
def _process_form(self, form=None):
self.prepare()
form = form or self.form
if not self.show:
return ''
controller = Controller(form, self.request)
if not controller.next:
return controller.rendered
if isinstance(controller.next, HTTPFound):
self.redirect(controller.next.location())
return
return controller.next
class AddForm(Form):
"""form hooking the hidden value 'factory' to self.form on __call__
"""
def __call__(self, model, request):
self.model = model
self.request = request
form = self.form
form['factory'] = factory('proxy', value=request.params.get('factory'))
return self._process_form(form)
def next(self, request):
return HTTPFound(make_url(request.request, node=self.model.__parent__))
class EditForm(Form):
"""form hooking the hidden value 'from' to self.form on __call__
"""
def __call__(self, model, request):
self.model = model
self.request = request
form = self.form
form['from'] = factory('proxy', value=request.params.get('from'))
return self._process_form(form)
def next(self, request):
if request.get('from') == 'parent':
url = make_url(request.request, node=self.model.__parent__)
else:
url = make_url(request.request, node=self.model)
return HTTPFound(url) | bsd-3-clause | Python |
21b6f0f94885493a74e5b28ac6861c3d80865a6e | Rename langlink to blango_lang, so it's passed to the template when calling locals(). | fiam/blangoblog,fiam/blangoblog,fiam/blangoblog | blango/views.py | blango/views.py | from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.db import connection
try:
from django import newforms as forms
except ImportError:
from django import forms
from datetime import date
from settings import LANGUAGE_CODE
from blango.models import *
def iso639_1(val):
return val.split('-')[0]
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('author', 'author_uri', 'author_email', 'body')
def save(self, entry):
self.instance.entry = entry
super(CommentForm, self).save()
def dates_for_language(language):
cursor = connection.cursor()
cursor.execute('''SELECT DISTINCT YEAR(published),MONTH(published) FROM
blango_entry WHERE language_id = %d ORDER BY
YEAR(published) DESC, MONTH(published) DESC
''' % language.id)
return [date(row[0], row[1], 1) for row in cursor.fetchall()]
def list_view(request, lang, tag_slug, year, month, page):
entries = Entry.objects.all()
if tag_slug:
tag = get_object_or_404(Tag, slug=tag_slug)
entries = entries.filter(tags=tag)
if year and month:
entries = entries.filter(published__year=year, published__month=month)
if lang:
blango_lang = lang + '/'
else:
lang = iso639_1(request.LANGUAGE_CODE)
blango_lang = ''
language = get_object_or_404(Language, iso639_1=lang)
entries = entries.filter(language=language)
dates = dates_for_language(language)
tags = Tag.for_language(language)
languages = Language.objects.all()
return render_to_response('blango/list.html', locals(),
context_instance=RequestContext(request))
def entry_view(request, entry_slug):
entry = get_object_or_404(Entry, slug=entry_slug)
dates = dates_for_language(entry.language)
tags = Tag.for_language(entry.language)
comment_form = CommentForm()
if request.method == 'POST':
try:
comment_form = CommentForm(request.POST)
comment_form.save(entry)
return HttpResponseRedirect(entry.get_absolute_url())
except ValueError:
pass
return render_to_response('blango/entry.html', locals(),
context_instance=RequestContext(request))
| from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.db import connection
try:
from django import newforms as forms
except ImportError:
from django import forms
from datetime import date
from settings import LANGUAGE_CODE
from blango.models import *
def iso639_1(val):
return val.split('-')[0]
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('author', 'author_uri', 'author_email', 'body')
def save(self, entry):
self.instance.entry = entry
super(CommentForm, self).save()
def dates_for_language(language):
cursor = connection.cursor()
cursor.execute('''SELECT DISTINCT YEAR(published),MONTH(published) FROM
blango_entry WHERE language_id = %d ORDER BY
YEAR(published) DESC, MONTH(published) DESC
''' % language.id)
return [date(row[0], row[1], 1) for row in cursor.fetchall()]
def list_view(request, lang, tag_slug, year, month, page):
entries = Entry.objects.all()
if tag_slug:
tag = get_object_or_404(Tag, slug=tag_slug)
entries = entries.filter(tags=tag)
if year and month:
entries = entries.filter(published__year=year, published__month=month)
if lang:
langlink = lang + '/'
else:
lang = iso639_1(request.LANGUAGE_CODE)
langlink = ''
language = get_object_or_404(Language, iso639_1=lang)
entries = entries.filter(language=language)
dates = dates_for_language(language)
tags = Tag.for_language(language)
languages = Language.objects.all()
return render_to_response('blango/list.html', locals(),
context_instance=RequestContext(request))
def entry_view(request, entry_slug):
entry = get_object_or_404(Entry, slug=entry_slug)
dates = dates_for_language(entry.language)
tags = Tag.for_language(entry.language)
comment_form = CommentForm()
if request.method == 'POST':
try:
comment_form = CommentForm(request.POST)
comment_form.save(entry)
return HttpResponseRedirect(entry.get_absolute_url())
except ValueError:
pass
return render_to_response('blango/entry.html', locals(),
context_instance=RequestContext(request))
| bsd-3-clause | Python |
47843e9effeb78b5facf259f732865970e886113 | Rename form sets | mbertheau/jquery.django-formset-example,mbertheau/jquery.django-formset-example | blocks/views.py | blocks/views.py | from .forms import BlockForm
from django.core.urlresolvers import reverse
from django.forms.models import inlineformset_factory
from django.views.generic import (
ListView,
CreateView,
UpdateView,
)
from nested_formset import nestedformset_factory
from blocks import models
class BlockView(object):
model = models.Block
# don't conflict with django's block template context variable
context_object_name = "Block"
form_class = BlockForm
def get_success_url(self):
return reverse('blocks-list')
class ListBlocksView(ListView):
model = models.Block
class CreateBlockView(BlockView, CreateView):
pass
NestedBlockFormSet = nestedformset_factory(
models.Block,
models.Building,
nested_formset=inlineformset_factory(
models.Building,
models.Tenant,
fields='__all__'
)
)
BlockFormSet = inlineformset_factory(models.Block, models.Building, fields='__all__')
class EditBuildingsView(BlockView, UpdateView):
template_name = 'blocks/building_form.html'
form_class = NestedBlockFormSet
class EditBuildingsDynamicView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic.html'
form_class = BlockFormSet
class EditBuildingsDynamicTabsView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic_tabs.html'
form_class = BlockFormSet
class EditBuildingsDynamicTabsNestedView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic_tabs_nested.html'
form_class = NestedBlockFormSet
| from .forms import BlockForm
from django.core.urlresolvers import reverse
from django.forms.models import inlineformset_factory
from django.views.generic import (
ListView,
CreateView,
UpdateView,
)
from nested_formset import nestedformset_factory
from blocks import models
class BlockView(object):
model = models.Block
# don't conflict with django's block template context variable
context_object_name = "Block"
form_class = BlockForm
def get_success_url(self):
return reverse('blocks-list')
class ListBlocksView(ListView):
model = models.Block
class CreateBlockView(BlockView, CreateView):
pass
NestedBlockForm = nestedformset_factory(
models.Block,
models.Building,
nested_formset=inlineformset_factory(
models.Building,
models.Tenant,
fields='__all__'
)
)
BlockForm = inlineformset_factory(models.Block, models.Building, fields='__all__')
class EditBuildingsView(BlockView, UpdateView):
template_name = 'blocks/building_form.html'
form_class = NestedBlockForm
class EditBuildingsDynamicView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic.html'
form_class = BlockForm
class EditBuildingsDynamicTabsView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic_tabs.html'
form_class = BlockForm
class EditBuildingsDynamicTabsNestedView(BlockView, UpdateView):
template_name = 'blocks/building_form_dynamic_tabs_nested.html'
form_class = NestedBlockForm
| mit | Python |
d9d754270972cc6d66d32b68240934a988fc9d54 | Use a temporal media directory when running the selenium tests | rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud | src/catalogue/tests/selenium.py | src/catalogue/tests/selenium.py | # -*- coding: utf-8 -*-
import time
from shutil import rmtree
from tempfile import mkdtemp
from django.conf import settings
import catalogue.utils
from commons.test import WirecloudSeleniumTestCase
from commons.wgt import WgtDeployer
__test__ = False
class CatalogueSeleniumTests(WirecloudSeleniumTestCase):
__test__ = True
def setUp(self):
self.old_CATALOGUE_MEDIA_ROOT = settings.CATALOGUE_MEDIA_ROOT
settings.CATALOGUE_MEDIA_ROOT = mkdtemp()
self.old_deployer = catalogue.utils.wgt_deployer
catalogue.utils.wgt_deployer = WgtDeployer(settings.CATALOGUE_MEDIA_ROOT)
super(CatalogueSeleniumTests, self).setUp()
def tearDown(self):
rmtree(settings.CATALOGUE_MEDIA_ROOT, ignore_errors=True)
settings.CATALOGUE_MEDIA_ROOT = self.old_CATALOGUE_MEDIA_ROOT
catalogue.utils.wgt_deployer = self.old_deployer
super(CatalogueSeleniumTests, self).tearDown()
def test_add_gadget_to_catalog_wgt(self):
driver = self.driver
self.login()
self.change_main_view("marketplace")
time.sleep(3)
self.add_wgt_gadget_to_catalogue('Morfeo_Calendar_Viewer.wgt', 'Calendar Viewer')
self.add_wgt_gadget_to_catalogue('Morfeo_Cliente_Correo.wgt', 'Cliente Correo')
self.add_wgt_gadget_to_catalogue('Morfeo_FeedList.wgt', 'FeedList')
self.add_wgt_gadget_to_catalogue('Morfeo_FeedReader.wgt', 'FeedReader')
driver.get(self.get_live_server_url() + "admin/")
driver.find_element_by_link_text("Catalogue resources").click()
driver.find_element_by_link_text("Calendar Viewer").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("Cliente Correo").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("FeedList").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("FeedReader").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("Log out").click()
| # -*- coding: utf-8 -*-
import time
from commons.test import WirecloudSeleniumTestCase
__test__ = False
class CatalogueSeleniumTests(WirecloudSeleniumTestCase):
__test__ = True
def test_add_gadget_to_catalog_wgt(self):
driver = self.driver
self.login()
self.change_main_view("marketplace")
time.sleep(3)
self.add_wgt_gadget_to_catalogue('Morfeo_Calendar_Viewer.wgt', 'Calendar Viewer')
self.add_wgt_gadget_to_catalogue('Morfeo_Cliente_Correo.wgt', 'Cliente Correo')
self.add_wgt_gadget_to_catalogue('Morfeo_FeedList.wgt', 'FeedList')
self.add_wgt_gadget_to_catalogue('Morfeo_FeedReader.wgt', 'FeedReader')
driver.get(self.get_live_server_url() + "admin/")
driver.find_element_by_link_text("Catalogue resources").click()
driver.find_element_by_link_text("Calendar Viewer").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("Cliente Correo").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("FeedList").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("FeedReader").click()
driver.find_element_by_link_text("Delete").click()
driver.find_element_by_xpath("//input[@value=\"Yes, I'm sure\"]").click()
driver.find_element_by_link_text("Log out").click()
| agpl-3.0 | Python |
2ef4362be90e2314b69a2ff17ccb5d25ef8905fd | Set default version for cloud databases. | rackerlabs/rackspace-sdk-plugin,briancurtin/rackspace-sdk-plugin | rackspace/database/database_service.py | rackspace/database/database_service.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
if not version:
version = "v1"
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack import service_filter
class DatabaseService(service_filter.ServiceFilter):
"""The database service."""
valid_versions = [service_filter.ValidVersion('v1', path='v1.0')]
def __init__(self, version=None):
"""Create a database service."""
super(DatabaseService, self).__init__(service_type="rax:database",
service_name="cloudDatabases",
version=version)
| apache-2.0 | Python |
2639f65a01952d9c819276f2d2222707db53c13a | Add daughter and son | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/radar/models/family_histories.py | radar/radar/models/family_histories.py | from collections import OrderedDict
from sqlalchemy import Column, Integer, ForeignKey, Boolean, String, Index
from sqlalchemy.dialects import postgresql
from sqlalchemy import orm
from radar.database import db
from radar.models.common import MetaModelMixin, uuid_pk_column, patient_id_column, patient_relationship
from radar.models.logs import log_changes
RELATIONSHIPS = OrderedDict([
(1, 'Mother'),
(2, 'Father'),
(3, 'Sister'),
(4, 'Brother'),
(5, 'Grandmother - Maternal'),
(6, 'Grandmother - Paternal'),
(15, 'Grandfather - Maternal'),
(16, 'Grandfather - Paternal'),
(7, 'Aunt - Maternal'),
(8, 'Aunt - Paternal'),
(9, 'Uncle - Maternal'),
(10, 'Uncle - Paternal'),
(11, 'Cousin - Maternal'),
(12, 'Cousin - Paternal'),
(13, 'Half Sister'),
(14, 'Half Brother'),
(17, 'Daughter'),
(18, 'Son'),
])
@log_changes
class FamilyHistory(db.Model, MetaModelMixin):
__tablename__ = 'family_histories'
id = uuid_pk_column()
patient_id = patient_id_column()
patient = patient_relationship('family_histories')
group_id = Column(Integer, ForeignKey('groups.id'), nullable=False)
group = orm.relationship('Group')
parental_consanguinity = Column(Boolean)
family_history = Column(Boolean)
other_family_history = Column(String)
Index('family_histories_patient_idx', FamilyHistory.patient_id)
Index('family_histories_group_idx', FamilyHistory.group_id)
Index(
'family_histories_patient_group_idx',
FamilyHistory.patient_id,
FamilyHistory.group_id,
unique=True
)
@log_changes
class FamilyHistoryRelative(db.Model):
__tablename__ = 'family_history_relatives'
id = Column(Integer, primary_key=True)
family_history_id = Column(postgresql.UUID, ForeignKey('family_histories.id'), nullable=False)
family_history = orm.relationship('FamilyHistory', backref=orm.backref('relatives', cascade='all, delete-orphan', passive_deletes=True))
relationship = Column(Integer, nullable=False)
patient_id = Column(Integer, ForeignKey('patients.id', onupdate='CASCADE', ondelete='SET NULL'))
patient = orm.relationship('Patient')
Index('family_history_relatives_family_history_id_idx', FamilyHistoryRelative.family_history_id)
| from collections import OrderedDict
from sqlalchemy import Column, Integer, ForeignKey, Boolean, String, Index
from sqlalchemy.dialects import postgresql
from sqlalchemy import orm
from radar.database import db
from radar.models.common import MetaModelMixin, uuid_pk_column, patient_id_column, patient_relationship
from radar.models.logs import log_changes
RELATIONSHIPS = OrderedDict([
(1, 'Mother'),
(2, 'Father'),
(3, 'Sister'),
(4, 'Brother'),
(5, 'Grandmother - Maternal'),
(6, 'Grandmother - Paternal'),
(15, 'Grandfather - Maternal'),
(16, 'Grandfather - Paternal'),
(7, 'Aunt - Maternal'),
(8, 'Aunt - Paternal'),
(9, 'Uncle - Maternal'),
(10, 'Uncle - Paternal'),
(11, 'Cousin - Maternal'),
(12, 'Cousin - Paternal'),
(13, 'Half Sister'),
(14, 'Half Brother'),
])
@log_changes
class FamilyHistory(db.Model, MetaModelMixin):
__tablename__ = 'family_histories'
id = uuid_pk_column()
patient_id = patient_id_column()
patient = patient_relationship('family_histories')
group_id = Column(Integer, ForeignKey('groups.id'), nullable=False)
group = orm.relationship('Group')
parental_consanguinity = Column(Boolean)
family_history = Column(Boolean)
other_family_history = Column(String)
Index('family_histories_patient_idx', FamilyHistory.patient_id)
Index('family_histories_group_idx', FamilyHistory.group_id)
Index(
'family_histories_patient_group_idx',
FamilyHistory.patient_id,
FamilyHistory.group_id,
unique=True
)
@log_changes
class FamilyHistoryRelative(db.Model):
__tablename__ = 'family_history_relatives'
id = Column(Integer, primary_key=True)
family_history_id = Column(postgresql.UUID, ForeignKey('family_histories.id'), nullable=False)
family_history = orm.relationship('FamilyHistory', backref=orm.backref('relatives', cascade='all, delete-orphan', passive_deletes=True))
relationship = Column(Integer, nullable=False)
patient_id = Column(Integer, ForeignKey('patients.id', onupdate='CASCADE', ondelete='SET NULL'))
patient = orm.relationship('Patient')
Index('family_history_relatives_family_history_id_idx', FamilyHistoryRelative.family_history_id)
| agpl-3.0 | Python |
a307bddf490b6ad16739593d4ca51693b9a340fe | Fix custom template tag to work with django 1.8 | 18F/regulations-site,jeremiak/regulations-site,18F/regulations-site,18F/regulations-site,eregs/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,jeremiak/regulations-site,jeremiak/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,jeremiak/regulations-site | regulations/templatetags/in_context.py | regulations/templatetags/in_context.py | from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
new_context = context.new(new_context)
return self.nodelist.render(new_context)
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
| from django import template
register = template.Library()
class InContextNode(template.Node):
def __init__(self, nodelist, subcontext_names):
self.nodelist = nodelist
self.subcontext_names = subcontext_names
def render(self, context):
new_context = {}
for field in self.subcontext_names:
value = context.get(field, {})
if isinstance(value, dict):
new_context.update(context.get(field, {}))
else:
new_context[field] = value
return self.nodelist.render(template.Context(new_context))
@register.tag('begincontext')
def in_context(parser, token):
"""
Replaces the context (inside of this block) for easy (and safe) inclusion
of sub-content.
For example, if the context is {'name': 'Kitty', 'sub': {'size': 5}}
1: {{ name }} {{ size }}
{% begincontext sub %}
2: {{ name }} {{ size }}
{% endcontext %}
3: {{ name }} {{ size }}
Will print
1: Kitty
2: 5
3: Kitty
Arguments which are not dictionaries will 'cascade' into the inner
context.
"""
nodelist = parser.parse(('endcontext',))
parser.delete_first_token()
return InContextNode(nodelist, token.split_contents()[1:])
| cc0-1.0 | Python |
0ea4b24660ea77affe4264df3a09d93ad7f51420 | Set no_auth to get-bundle and signup config endpoint | SUNET/eduid-webapp,SUNET/eduid-webapp,SUNET/eduid-webapp | src/eduid_webapp/jsconfig/app.py | src/eduid_webapp/jsconfig/app.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 NORDUnet A/S
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of the NORDUnet nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from eduid_common.api.app import eduid_init_app_no_db
from eduid_common.authn.utils import no_authn_views
def jsconfig_init_app(name: str, config: dict):
"""
Create an instance of an eduid jsconfig data app.
First, it will load the configuration from jsconfig.settings.common
then any settings given in the `config` param.
Then, the app instance will be updated with common stuff by `eduid_init_app`,
all needed blueprints will be registered with it.
"""
app = eduid_init_app_no_db(name, config)
app.config.update(config)
from eduid_webapp.jsconfig.views import jsconfig_views
app.register_blueprint(jsconfig_views)
# Register view path that should not be authorized
no_auth_paths = [
'/get-bundle',
'/signup/config'
]
app = no_authn_views(app, no_auth_paths)
app.logger.info('Init {} app...'.format(name))
return app
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 NORDUnet A/S
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of the NORDUnet nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import absolute_import
from eduid_common.api.app import eduid_init_app_no_db
def jsconfig_init_app(name, config):
"""
Create an instance of an eduid jsconfig data app.
First, it will load the configuration from jsconfig.settings.common
then any settings given in the `config` param.
Then, the app instance will be updated with common stuff by `eduid_init_app`,
all needed blueprints will be registered with it.
:param name: The name of the instance, it will affect the configuration loaded.
:type name: str
:param config: any additional configuration settings. Specially useful
in test cases
:type config: dict
:return: the flask app
:rtype: flask.Flask
"""
app = eduid_init_app_no_db(name, config)
app.config.update(config)
from eduid_webapp.jsconfig.views import jsconfig_views
app.register_blueprint(jsconfig_views)
app.logger.info('Init {} app...'.format(name))
return app
| bsd-3-clause | Python |
803ee1755dca1d78d15c69e0f8254408b87bbf9b | Update to remove empty query code | candidate-selection-tutorial-sigir2017/candidate-selection-tutorial,candidate-selection-tutorial-sigir2017/candidate-selection-tutorial,candidate-selection-tutorial-sigir2017/candidate-selection-tutorial | assignments/assignment1/exercise/src/frontend/app.py | assignments/assignment1/exercise/src/frontend/app.py | import web
import pysolr
import json
from nltk.tokenize import word_tokenize
urls = (
'/', 'SimpleIndexSearchPage',
'/searchSimpleIndex', 'SearchSimpleIndex',
)
CATEGORY = {'b': 'Business', 'e': 'Entertainment', 't': 'Science and Technology', 'm': 'Health'}
render = web.template.render('templates/', base='layout')
SOLR_SIMPLEINDEX = pysolr.Solr('http://localhost:8983/solr/simpleindex')
def get_web_input(web_input):
draw = web_input['draw']
query = web_input['search[value]']
offset = web_input['start']
count = web_input['length']
return draw, query, offset, count
def search(query, offset, count, draw, solr_endpoint):
"""
This function is responsible for hitting the solr endpoint
and returning the results back.
"""
results = solr_endpoint.search(q=query, **{
'start': int(offset),
'rows': int(count)
})
print("Saw {0} result(s) for query {1}.".format(len(results), query))
formatted_hits = []
for hit in results.docs:
formatted_hits.append(
[hit['_news_title'], hit['_news_publisher'], CATEGORY[hit['_news_category'][0]], hit['_news_url']])
response = {'draw': draw,
'recordsFiltered': results.hits,
'data': formatted_hits}
web.header('Content-Type', 'application/json')
return json.dumps(response)
class SimpleIndexSearchPage:
def GET(self):
return render.simpleIndexSearchPage()
class SearchSimpleIndex:
def GET(self):
draw, query, offset, count = get_web_input(web_input=web.input())
# TODO: Write code for handling the empty query (no keywords)
# TODO: Write code for tokenizing the search query and creating must clauses for each token
return None
if __name__ == "__main__":
app = web.application(urls, globals())
app.run()
| import web
import pysolr
import json
from nltk.tokenize import word_tokenize
urls = (
'/', 'SimpleIndexSearchPage',
'/searchSimpleIndex', 'SearchSimpleIndex',
)
CATEGORY = {'b': 'Business', 'e': 'Entertainment', 't': 'Science and Technology', 'm': 'Health'}
render = web.template.render('templates/', base='layout')
SOLR_SIMPLEINDEX = pysolr.Solr('http://localhost:8983/solr/simpleindex')
def get_web_input(web_input):
draw = web_input['draw']
query = web_input['search[value]']
if len(query) == 0:
query = '*:*'
offset = web_input['start']
count = web_input['length']
return draw, query, offset, count
def search(query, offset, count, draw, solr_endpoint):
"""
This function is responsible for hitting the solr endpoint
and returning the results back.
"""
results = solr_endpoint.search(q=query, **{
'start': int(offset),
'rows': int(count)
})
print("Saw {0} result(s) for query {1}.".format(len(results), query))
formatted_hits = []
for hit in results.docs:
formatted_hits.append(
[hit['_news_title'], hit['_news_publisher'], CATEGORY[hit['_news_category'][0]], hit['_news_url']])
response = {'draw': draw,
'recordsFiltered': results.hits,
'data': formatted_hits}
web.header('Content-Type', 'application/json')
return json.dumps(response)
class SimpleIndexSearchPage:
def GET(self):
return render.simpleIndexSearchPage()
class SearchSimpleIndex:
def GET(self):
draw, query, offset, count = get_web_input(web_input=web.input())
# TODO: Write code for handling the empty query (no keywords)
# TODO: Write code for tokenizing the search query and creating must clauses for each token
return None
if __name__ == "__main__":
app = web.application(urls, globals())
app.run() | apache-2.0 | Python |
6e6c7697c078fc0b2c121d191a54942256415015 | Bump to 0.0.13-dev | ocefpaf/pyaxiom,axiom-data-science/pyaxiom,axiom-data-science/pyaxiom,ocefpaf/pyaxiom | pyaxiom/__init__.py | pyaxiom/__init__.py | __version__ = "0.0.13-dev"
# Package level logger
import logging
try:
# Python >= 2.7
from logging import NullHandler
except ImportError:
# Python < 2.7
class NullHandler(logging.Handler):
def emit(self, record):
pass
logger = logging.getLogger("pyaxiom")
logger.addHandler(logging.NullHandler())
| __version__ = "0.0.12"
# Package level logger
import logging
try:
# Python >= 2.7
from logging import NullHandler
except ImportError:
# Python < 2.7
class NullHandler(logging.Handler):
def emit(self, record):
pass
logger = logging.getLogger("pyaxiom")
logger.addHandler(logging.NullHandler())
| mit | Python |
80a58961d0f77b4603cbc606fe96fdea603a7c19 | Add django Admin Models | UmSenhorQualquer/pyforms | pyforms/__init__.py | pyforms/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from pyforms.utils.settings_manager import conf
conf += 'pyforms.settings'
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = '3.0.0'
__maintainer__ = ["Ricardo Ribeiro", "Carlos Mão de Ferro"]
__email__ = ["ricardojvr@gmail.com", "cajomferro@gmail.com"]
__status__ = "Production"
if conf.PYFORMS_MODE in ['GUI', 'GUI-OPENCSP']:
from pyforms.gui import controls
from pyforms.gui.basewidget import BaseWidget
from pyforms.gui.appmanager import start_app
elif conf.PYFORMS_MODE in ['TERMINAL']:
from pyforms.terminal import controls
from pyforms.terminal.basewidget import BaseWidget
from pyforms.terminal.appmanager import start_app
elif conf.PYFORMS_MODE in ['WEB']:
from pyforms_web.web import Controls
from pyforms_web.web.BaseWidget import BaseWidget
from pyforms_web.web.appmanager import start_app
from pyforms_web.web.django_pyforms.model_admin import ModelAdmin
from pyforms_web.web.django_pyforms.model_admin import ViewFormAdmin
from pyforms_web.web.django_pyforms.model_admin import EditFormAdmin
| #!/usr/bin/python
# -*- coding: utf-8 -*-
from pyforms.utils.settings_manager import conf
conf += 'pyforms.settings'
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = '3.0.0'
__maintainer__ = ["Ricardo Ribeiro", "Carlos Mão de Ferro"]
__email__ = ["ricardojvr@gmail.com", "cajomferro@gmail.com"]
__status__ = "Production"
if conf.PYFORMS_MODE in ['GUI', 'GUI-OPENCSP']:
from pyforms.gui import controls
from pyforms.gui.basewidget import BaseWidget
from pyforms.gui.appmanager import start_app
elif conf.PYFORMS_MODE in ['TERMINAL']:
from pyforms.terminal import controls
from pyforms.terminal.basewidget import BaseWidget
from pyforms.terminal.appmanager import start_app
elif conf.PYFORMS_MODE in ['WEB']:
from pyforms_web.web import Controls
from pyforms_web.web.BaseWidget import BaseWidget
from pyforms_web.web.appmanager import start_app
| mit | Python |
10f0b888762415a548f7b579aa766991061947ec | BUMP 0.10.2 support subclasses of dict in c encoder | develf/mongo-python-driver,jameslittle/mongo-python-driver,aherlihy/mongo-python-driver,mongodb/mongo-python-driver,marcosleonefilho/hoop-pymongo,WingGao/mongo-python-driver,inspectlabs/mongo-python-driver,rychipman/mongo-python-driver,ShaneHarvey/mongo-python-driver,jbenet/mongo-python-driver,ShaneHarvey/mongo-python-driver,jbenet/mongo-python-driver,llvtt/mongo-python-driver,felixonmars/mongo-python-driver,jameslittle/mongo-python-driver,develf/mongo-python-driver,mongodb/mongo-python-driver,mongodb/mongo-python-driver,ameily/mongo-python-driver,marcosleonefilho/hoop-pymongo,ramnes/mongo-python-driver,gormanb/mongo-python-driver,aherlihy/mongo-python-driver,ultrabug/mongo-python-driver,ramnes/mongo-python-driver,pigate/mongo-python-driver,WingGao/mongo-python-driver,bq-xiao/mongo-python-driver,macdiesel/mongo-python-driver,llvtt/mongo-python-driver,felixonmars/mongo-python-driver,mher/pymongo,bq-xiao/mongo-python-driver,inspectlabs/mongo-python-driver,gormanb/mongo-python-driver,mher/pymongo,reedobrien/mongo-python-driver,marcosleonefilho/hoop-pymongo,ramnes/mongo-python-driver,brianwrf/mongo-python-driver,rychipman/mongo-python-driver,ameily/mongo-python-driver,ShaneHarvey/mongo-python-driver,brianwrf/mongo-python-driver,aherlihy/mongo-python-driver,jbenet/mongo-python-driver,pigate/mongo-python-driver,reedobrien/mongo-python-driver,macdiesel/mongo-python-driver,ultrabug/mongo-python-driver | pymongo/__init__.py | pymongo/__init__.py | # Copyright 2009 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Mongo driver for Python."""
import types
from pymongo.connection import Connection as PyMongo_Connection
from pymongo.son import SON
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
OFF = 0
"""Turn off database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version = "0.10.2"
"""Current version of PyMongo."""
Connection = PyMongo_Connection
"""Alias for pymongo.connection.Connection."""
def _index_list(key_or_list, direction):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
return key_or_list
def _index_document(index_list):
"""Helper to generate an index specifying document.
Takes a list of (key, direction) pairs.
"""
if not isinstance(index_list, types.ListType):
raise TypeError("if no direction is specified, key_or_list must be an"
"instance of list")
if not len(index_list):
raise ValueError("key_or_list must not be the empty list")
index = SON()
for (key, value) in index_list:
if not isinstance(key, types.StringTypes):
raise TypeError("first item in each key pair must be a string")
if not isinstance(value, types.IntType):
raise TypeError("second item in each key pair must be ASCENDING or"
"DESCENDING")
index[key] = value
return index
| # Copyright 2009 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Mongo driver for Python."""
import types
from pymongo.connection import Connection as PyMongo_Connection
from pymongo.son import SON
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
OFF = 0
"""Turn off database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version = "0.10.1"
"""Current version of PyMongo."""
Connection = PyMongo_Connection
"""Alias for pymongo.connection.Connection."""
def _index_list(key_or_list, direction):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
return key_or_list
def _index_document(index_list):
"""Helper to generate an index specifying document.
Takes a list of (key, direction) pairs.
"""
if not isinstance(index_list, types.ListType):
raise TypeError("if no direction is specified, key_or_list must be an"
"instance of list")
if not len(index_list):
raise ValueError("key_or_list must not be the empty list")
index = SON()
for (key, value) in index_list:
if not isinstance(key, types.StringTypes):
raise TypeError("first item in each key pair must be a string")
if not isinstance(value, types.IntType):
raise TypeError("second item in each key pair must be ASCENDING or"
"DESCENDING")
index[key] = value
return index
| apache-2.0 | Python |
d107ad5c8a25cda1887609bdeb8e0e6da083253a | BUMP 0.9.7 --no_ext option, version number and a bug fix | rychipman/mongo-python-driver,mongodb/mongo-python-driver,bq-xiao/mongo-python-driver,inspectlabs/mongo-python-driver,ramnes/mongo-python-driver,mher/pymongo,ameily/mongo-python-driver,llvtt/mongo-python-driver,rychipman/mongo-python-driver,WingGao/mongo-python-driver,brianwrf/mongo-python-driver,develf/mongo-python-driver,macdiesel/mongo-python-driver,develf/mongo-python-driver,jbenet/mongo-python-driver,ShaneHarvey/mongo-python-driver,jbenet/mongo-python-driver,pigate/mongo-python-driver,mongodb/mongo-python-driver,jameslittle/mongo-python-driver,gormanb/mongo-python-driver,aherlihy/mongo-python-driver,pigate/mongo-python-driver,macdiesel/mongo-python-driver,ShaneHarvey/mongo-python-driver,marcosleonefilho/hoop-pymongo,marcosleonefilho/hoop-pymongo,mongodb/mongo-python-driver,jameslittle/mongo-python-driver,felixonmars/mongo-python-driver,ultrabug/mongo-python-driver,reedobrien/mongo-python-driver,gormanb/mongo-python-driver,reedobrien/mongo-python-driver,ameily/mongo-python-driver,jbenet/mongo-python-driver,ShaneHarvey/mongo-python-driver,WingGao/mongo-python-driver,inspectlabs/mongo-python-driver,mher/pymongo,bq-xiao/mongo-python-driver,llvtt/mongo-python-driver,ultrabug/mongo-python-driver,aherlihy/mongo-python-driver,felixonmars/mongo-python-driver,ramnes/mongo-python-driver,ramnes/mongo-python-driver,marcosleonefilho/hoop-pymongo,brianwrf/mongo-python-driver,aherlihy/mongo-python-driver | pymongo/__init__.py | pymongo/__init__.py | # Copyright 2009 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Mongo driver for Python."""
import types
from pymongo.son import SON
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
OFF = 0
"""Turn off database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version = "0.9.7"
"""Current version of PyMongo."""
def _index_list(key_or_list, direction):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
return key_or_list
def _index_document(index_list):
"""Helper to generate an index specifying document.
Takes a list of (key, direction) pairs.
"""
if not isinstance(index_list, types.ListType):
raise TypeError("if no direction is specified, key_or_list must be an"
"instance of list")
if not len(index_list):
raise ValueError("key_or_list must not be the empty list")
index = SON()
for (key, value) in index_list:
if not isinstance(key, types.StringTypes):
raise TypeError("first item in each key pair must be a string")
if not isinstance(value, types.IntType):
raise TypeError("second item in each key pair must be ASCENDING or"
"DESCENDING")
index[key] = value
return index
| # Copyright 2009 10gen, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Mongo driver for Python."""
import types
from pymongo.son import SON
ASCENDING = 1
"""Ascending sort order."""
DESCENDING = -1
"""Descending sort order."""
OFF = 0
"""Turn off database profiling."""
SLOW_ONLY = 1
"""Only profile slow operations."""
ALL = 2
"""Profile all operations."""
version = "0.9.6"
"""Current version of PyMongo."""
def _index_list(key_or_list, direction):
"""Helper to generate a list of (key, direction) pairs.
Takes such a list, or a single key and direction.
"""
if direction is not None:
return [(key_or_list, direction)]
else:
return key_or_list
def _index_document(index_list):
"""Helper to generate an index specifying document.
Takes a list of (key, direction) pairs.
"""
if not isinstance(index_list, types.ListType):
raise TypeError("if no direction is specified, key_or_list must be an"
"instance of list")
if not len(index_list):
raise ValueError("key_or_list must not be the empty list")
index = SON()
for (key, value) in index_list:
if not isinstance(key, types.StringTypes):
raise TypeError("first item in each key pair must be a string")
if not isinstance(value, types.IntType):
raise TypeError("second item in each key pair must be ASCENDING or"
"DESCENDING")
index[key] = value
return index
| apache-2.0 | Python |
92e3e3d5e0a31bf5e7d210b98f1328105e4afa68 | Add a short explanation to clean-bot | HiccupinGminor/tidbits,HiccupinGminor/tidbits | python/clean-bot.py | python/clean-bot.py | #!/usr/bin/python
import math
# A naive solution to HackerRank's botclean challenge: https://www.hackerrank.com/challenges/botclean
def findAllDirt(row_position, col_position, board):
dirt_locations = {}
i = 0
for row in board:
j = 0
for col in row:
if col == "d":
#Calculate the dirt's "score"
score = abs(row_position - i) + abs(col_position - j)
dirt_locations[i,j] = score
j += 1
i += 1
return dirt_locations
def findNearestDirt(posr, posc, board):
return min(findAllDirt(posr, posc, board))
def nextMove(posr, posc, board):
nearest_dirt = findNearestDirt(posr, posc, board)
dirt_row = nearest_dirt[0]
dirt_col = nearest_dirt[1]
#If on top of the dirt
if (dirt_row - posr == 0) and (dirt_col - posc == 0):
print "CLEAN"
elif dirt_row - posr != 0:
if(dirt_row - posr > 0):
print "DOWN"
else:
print "UP"
else:
if(dirt_col - posc > 0):
print "RIGHT"
else:
print "LEFT"
if __name__ == "__main__":
pos = [int(i) for i in raw_input().strip().split()]
board = [[j for j in raw_input().strip()] for i in range(5)]
nextMove(pos[0], pos[1], board)
| #!/usr/bin/python
import math
def findAllDirt(row_position, col_position, board):
dirt_locations = {}
i = 0
for row in board:
j = 0
for col in row:
if col == "d":
#Calculate the dirt's "score"
score = abs(row_position - i) + abs(col_position - j)
dirt_locations[i,j] = score
j += 1
i += 1
return dirt_locations
def findNearestDirt(posr, posc, board):
return min(findAllDirt(posr, posc, board))
def nextMove(posr, posc, board):
nearest_dirt = findNearestDirt(posr, posc, board)
dirt_row = nearest_dirt[0]
dirt_col = nearest_dirt[1]
#If on top of the dirt
if (dirt_row - posr == 0) and (dirt_col - posc == 0):
print "CLEAN"
elif dirt_row - posr != 0:
if(dirt_row - posr > 0):
print "DOWN"
else:
print "UP"
else:
if(dirt_col - posc > 0):
print "RIGHT"
else:
print "LEFT"
if __name__ == "__main__":
pos = [int(i) for i in raw_input().strip().split()]
board = [[j for j in raw_input().strip()] for i in range(5)]
nextMove(pos[0], pos[1], board) | mit | Python |
b69c7ea7ee82d1d7ddec7ddafdb8f4cf62c5d738 | Bump Version: 0.20.1 → 0.20.2 | akaszynski/vtkInterface | pyvista/_version.py | pyvista/_version.py | """ version info for pyvista """
# major, minor, patch
version_info = 0, 20, 2
# Nice string for the version
__version__ = '.'.join(map(str, version_info))
| """ version info for pyvista """
# major, minor, patch
version_info = 0, 20, 1
# Nice string for the version
__version__ = '.'.join(map(str, version_info))
| mit | Python |
f03fc1ab982dff47c101a64b6acc47b8400620e1 | Change how URIs are built with regard to port | gmr/queries,gmr/queries | queries/__init__.py | queries/__init__.py | """
Queries: PostgreSQL database access simplified
Queries is an opinionated wrapper for interfacing with PostgreSQL that offers
caching of connections and support for PyPy via psycopg2ct.
The core `queries.Queries` class will automatically register support for UUIDs,
Unicode and Unicode arrays.
"""
__version__ = '1.2.1'
version = __version__
import logging
import platform
# Import PyPy compatibility
PYPY = False
target = platform.python_implementation()
if target == 'PyPy':
from psycopg2ct import compat
compat.register()
PYPY = True
# Add a Null logging handler to prevent logging output when un-configured
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('queries').addHandler(NullHandler())
# Defaults
DEFAULT_URI = 'postgresql://localhost:5432'
# Mappings to queries classes and methods
from queries.results import Results
from queries.session import Session
try:
from queries.tornado_session import TornadoSession
except ImportError:
TornadoSession = None
def uri(host='localhost', port=5432, dbname='postgres', user='postgres',
password=None):
"""Return a PostgreSQL connection URI for the specified values.
:param str host: Host to connect to
:param int port: Port to connect on
:param str dbname: The database name
:param str user: User to connect as
:param str password: The password to use, None for no password
:return str: The PostgreSQL connection URI
"""
if port:
host = '%s:%s' % (host, port)
if password:
return 'postgresql://%s:%s@%s/%s' % (user, password, host, dbname)
return 'postgresql://%s@%s/%s' % (user, host, dbname)
# For ease of access to different cursor types
from psycopg2.extras import DictCursor
from psycopg2.extras import NamedTupleCursor
from psycopg2.extras import RealDictCursor
from psycopg2.extras import LoggingCursor
from psycopg2.extras import MinTimeLoggingCursor
# Expose exceptions so clients do not need to import psycopg2 too
from psycopg2 import DataError
from psycopg2 import DatabaseError
from psycopg2 import IntegrityError
from psycopg2 import InterfaceError
from psycopg2 import InternalError
from psycopg2 import NotSupportedError
from psycopg2 import OperationalError
from psycopg2 import ProgrammingError
from psycopg2.extensions import QueryCanceledError
from psycopg2.extensions import TransactionRollbackError
| """
Queries: PostgreSQL database access simplified
Queries is an opinionated wrapper for interfacing with PostgreSQL that offers
caching of connections and support for PyPy via psycopg2ct.
The core `queries.Queries` class will automatically register support for UUIDs,
Unicode and Unicode arrays.
"""
__version__ = '1.2.1'
version = __version__
import logging
import platform
# Import PyPy compatibility
PYPY = False
target = platform.python_implementation()
if target == 'PyPy':
from psycopg2ct import compat
compat.register()
PYPY = True
# Add a Null logging handler to prevent logging output when un-configured
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
"""Python 2.6 does not have a NullHandler"""
def emit(self, record):
"""Emit a record
:param record record: The record to emit
"""
pass
logging.getLogger('queries').addHandler(NullHandler())
# Defaults
DEFAULT_URI = 'postgresql://localhost:5432'
# Mappings to queries classes and methods
from queries.results import Results
from queries.session import Session
try:
from queries.tornado_session import TornadoSession
except ImportError:
TornadoSession = None
def uri(host='localhost', port=5432, dbname='postgres', user='postgres',
password=None):
"""Return a PostgreSQL connection URI for the specified values.
:param str host: Host to connect to
:param int port: Port to connect on
:param str dbname: The database name
:param str user: User to connect as
:param str password: The password to use, None for no password
:return str: The PostgreSQL connection URI
"""
if password:
return 'postgresql://%s:%s@%s:%i/%s' % (user, password, host, port, dbname)
return 'postgresql://%s@%s:%i/%s' % (user, host, port, dbname)
# For ease of access to different cursor types
from psycopg2.extras import DictCursor
from psycopg2.extras import NamedTupleCursor
from psycopg2.extras import RealDictCursor
from psycopg2.extras import LoggingCursor
from psycopg2.extras import MinTimeLoggingCursor
# Expose exceptions so clients do not need to import psycopg2 too
from psycopg2 import DataError
from psycopg2 import DatabaseError
from psycopg2 import IntegrityError
from psycopg2 import InterfaceError
from psycopg2 import InternalError
from psycopg2 import NotSupportedError
from psycopg2 import OperationalError
from psycopg2 import ProgrammingError
from psycopg2.extensions import QueryCanceledError
from psycopg2.extensions import TransactionRollbackError
| bsd-3-clause | Python |
7bb3ddd218478b77c4d02831e1a32511bdd414fc | add todo for troubling mark bug | RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software,RoboJackets/robocup-software | rj_gameplay/rj_gameplay/role/marker.py | rj_gameplay/rj_gameplay/role/marker.py | import stp
from rj_msgs.msg import RobotIntent
# TODO: mark skill has not been updated, how is it working here (see basicDefense)?
from rj_gameplay.skill import mark
class MarkerRole(stp.role.Role):
"""Role to produce marking behavior"""
def __init__(self, robot: stp.rc.Robot, target_robot: stp.rc.Robot) -> None:
super().__init__(robot)
self.mark_skill = None
self.target_robot = target_robot
def tick(
self, world_state: stp.rc.WorldState, target_robot: stp.rc.Robot
) -> RobotIntent:
if target_robot is not None:
self.target_robot = target_robot
if self.mark_skill is None or target_robot is not None:
self.mark_skill = mark.Mark(
robot=self.robot, target_robot=self.target_robot
)
intent = self.mark_skill.tick(world_state)
return intent
def is_done(self, world_state: stp.rc.WorldState) -> bool:
return self._state == "done"
| import stp
from rj_gameplay.skill import mark
from rj_msgs.msg import RobotIntent
class MarkerRole(stp.role.Role):
"""Role to produce marking behavior"""
def __init__(self, robot: stp.rc.Robot, target_robot: stp.rc.Robot) -> None:
super().__init__(robot)
self.mark_skill = None
self.target_robot = target_robot
def tick(
self, world_state: stp.rc.WorldState, target_robot: stp.rc.Robot
) -> RobotIntent:
if target_robot is not None:
self.target_robot = target_robot
if self.mark_skill is None or target_robot is not None:
self.mark_skill = mark.Mark(
robot=self.robot, target_robot=self.target_robot
)
intent = self.mark_skill.tick(world_state)
return intent
def is_done(self, world_state: stp.rc.WorldState) -> bool:
return self._state == "done"
| apache-2.0 | Python |
7117a0e177409a0a7ba0363770219c6f774eadbd | Address issue https://github.com/globaleaks/GlobaLeaks/issues/1059 | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks | backend/globaleaks/utils/structures.py | backend/globaleaks/utils/structures.py | # -*- coding: UTF-8
# structures
# **********
#
# This file contains the complex structures stored in Storm table
# in order to checks integrity between exclusive options, provide defaults,
# supports extensions (without changing DB format)
from globaleaks import LANGUAGES_SUPPORTED_CODES
from globaleaks.models import Model
from globaleaks.utils.utility import log
from globaleaks.settings import GLSetting
from globaleaks.rest.errors import InvalidInputFormat, SubmissionFailFields
from globaleaks.utils.utility import uuid4
# Localized strings utility management
class Rosetta:
"""
This Class can manage all the localized strings inside
one Storm object. AKA: manage three language on a single
stone. Hell fucking yeah, History!
"""
def __init__(self, attrs):
self._localized_strings = {}
self._localized_attrs = attrs
def acquire_storm_object(self, obj):
for attr in self._localized_attrs:
self._localized_strings[attr] = getattr(obj, attr)
def acquire_multilang_dict(self, obj):
for attr in self._localized_attrs:
self._localized_strings[attr] = obj[attr]
def singlelang_to_multilang_dict(self, obj, language):
ret = {}
for attr in self._localized_attrs:
ret[attr] = {}
ret[attr][language] = obj[attr]
return ret
def dump_localized_attr(self, attr, language):
default_language = GLSetting.memory_copy.default_language
if attr not in self._localized_strings:
return "!! Missing value for '%s'" % attr
translated_dict = self._localized_strings[attr]
if translated_dict.has_key(language):
return translated_dict[language]
elif translated_dict.has_key(default_language):
return translated_dict[default_language]
else:
return ""
def fill_localized_keys(dictionary, attrs, language):
mo = Rosetta(attrs)
multilang_dict = mo.singlelang_to_multilang_dict(dictionary, language)
for attr in attrs:
dictionary[attr] = multilang_dict[attr]
return dictionary
def get_localized_values(dictionary, obj, attrs, language):
mo = Rosetta(attrs)
if isinstance(obj, dict):
mo.acquire_multilang_dict(obj)
elif isinstance(obj, Model):
mo.acquire_storm_object(obj)
for attr in attrs:
dictionary[attr] = mo.dump_localized_attr(attr, language)
return dictionary
| # -*- coding: UTF-8
# structures
# **********
#
# This file contains the complex structures stored in Storm table
# in order to checks integrity between exclusive options, provide defaults,
# supports extensions (without changing DB format)
from globaleaks import LANGUAGES_SUPPORTED_CODES
from globaleaks.models import Model
from globaleaks.utils.utility import log
from globaleaks.settings import GLSetting
from globaleaks.rest.errors import InvalidInputFormat, SubmissionFailFields
from globaleaks.utils.utility import uuid4
# Localized strings utility management
class Rosetta:
"""
This Class can manage all the localized strings inside
one Storm object. AKA: manage three language on a single
stone. Hell fucking yeah, History!
"""
def __init__(self, attrs):
self._localized_strings = {}
self._localized_attrs = attrs
def acquire_storm_object(self, obj):
for attr in self._localized_attrs:
self._localized_strings[attr] = getattr(obj, attr)
def acquire_multilang_dict(self, obj):
for attr in self._localized_attrs:
self._localized_strings[attr] = obj[attr]
def singlelang_to_multilang_dict(self, obj, language):
ret = {}
for attr in self._localized_attrs:
ret[attr] = {}
ret[attr][language] = obj[attr]
return ret
def dump_localized_attr(self, attr, language):
default_language = GLSetting.memory_copy.default_language
if attr not in self._localized_strings:
return "!! Missing value for '%s'" % attr
translated_dict = self._localized_strings[attr]
if translated_dict.has_key(language):
return translated_dict[language]
elif translated_dict.has_key(default_language):
return "*_Translate in '%s' [%s]" % (language, translated_dict[default_language])
else:
return "# Missing translation for '%s' in '%s'" % \
(attr, language)
def fill_localized_keys(dictionary, attrs, language):
mo = Rosetta(attrs)
multilang_dict = mo.singlelang_to_multilang_dict(dictionary, language)
for attr in attrs:
dictionary[attr] = multilang_dict[attr]
return dictionary
def get_localized_values(dictionary, obj, attrs, language):
mo = Rosetta(attrs)
if isinstance(obj, dict):
mo.acquire_multilang_dict(obj)
elif isinstance(obj, Model):
mo.acquire_storm_object(obj)
for attr in attrs:
dictionary[attr] = mo.dump_localized_attr(attr, language)
return dictionary
| agpl-3.0 | Python |
c3addf83867efcf2941d6c13aa042099cadbedaa | test email is sent | ava-project/ava-website,ava-project/ava-website,ava-project/ava-website | website/user/tests/registration.py | website/user/tests/registration.py | from django.core import mail
from django.test import TestCase, Client
from django.contrib.auth.models import User
from ..forms import RegisterForm
class RegisterTest(TestCase):
def setUp(self):
User.objects.create_user('username', 'email@email.fr', 'password')
self.client = Client()
def test_correct_registration(self):
form_data = {
'username': 'correct',
'email': 'correct@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertTrue(form.is_valid())
response = self.client.post('/user/register', form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
def test_empty_username(self):
form_data = {
'username': '',
'email': 'empty_username@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_empty_email(self):
form_data = {
'username': 'empty_email',
'email': '',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_empty_password(self):
# empty username
form_data = {
'username': 'empty_password',
'email': 'empty_password@email.fr',
'password': '',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_duplicate_email(self):
form_data = {
'username': 'different',
'email': 'email@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_duplicate_username(self):
form_data = {
'username': 'username',
'email': 'different@different.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
| from django.test import TestCase, Client
from django.contrib.auth.models import User
from ..forms import RegisterForm
class RegisterTest(TestCase):
def setUp(self):
User.objects.create_user('username', 'email@email.fr', 'password')
self.client = Client()
def test_correct_registration(self):
form_data = {
'username': 'correct',
'email': 'correct@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertTrue(form.is_valid())
response = self.client.post('/user/register', form_data)
self.assertEqual(response.status_code, 302)
def test_empty_username(self):
form_data = {
'username': '',
'email': 'empty_username@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_empty_email(self):
form_data = {
'username': 'empty_email',
'email': '',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_empty_password(self):
# empty username
form_data = {
'username': 'empty_password',
'email': 'empty_password@email.fr',
'password': '',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_duplicate_email(self):
form_data = {
'username': 'different',
'email': 'email@email.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
def test_duplicate_username(self):
form_data = {
'username': 'username',
'email': 'different@different.fr',
'password': 'test',
}
form = RegisterForm(data=form_data)
self.assertFalse(form.is_valid())
| mit | Python |
4a6f76857a626dd756675a4fe1dd3660cf63d8b7 | Move module time to main() | bowen0701/algorithms_data_structures | alg_fibonacci.py | alg_fibonacci.py | """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import print_function
def fibonacci(n):
"""Get nth number of Fibonacci series by recursion."""
if n == 0:
return 0
elif n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def main():
import time
n = 13
print('{}th number of Fibonacci series: {}'
.format(n, fibonacci(n)))
if __name__ == '__main__':
main()
| """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import print_function
import time
def fibonacci(n):
"""Get nth number of Fibonacci series by recursion."""
if n == 0:
return 0
elif n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def main():
n = 13
print('{}th number of Fibonacci series: {}'
.format(n, fibonacci(n)))
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
20043f0bbf06fabd4862744123650bad46ccf13d | fix priority reference. | alexsilva/django-xadmin,alexsilva/django-xadmin,alexsilva/django-xadmin,alexsilva/django-xadmin | xadmin/templatetags/xadmin_tags.py | xadmin/templatetags/xadmin_tags.py | import re
from django import template
from django.template import Library
from django.utils import six
from django.utils.html import escape
from django.utils.safestring import mark_safe
from xadmin.util import static, vendor as util_vendor
register = Library()
@register.simple_tag(takes_context=True)
def username_field(context):
"""Get the userame based on the variable [USERNAME_FIELD]"""
return getattr(context['original'], context['original'].USERNAME_FIELD)
@register.simple_tag(takes_context=True)
def view_block(context, block_name, *args, **kwargs):
if 'admin_view' not in context:
return ""
admin_view = context['admin_view']
nodes = []
method_name = 'block_%s' % block_name.replace('-', '_')
block_funcs = []
for view in [admin_view] + admin_view.plugins:
block_func = getattr(view, method_name, None)
if block_func and callable(block_func):
block_funcs.append((getattr(block_func, "priority", 10), block_func))
for _, block_func in sorted(block_funcs, key=lambda x: x[0],
reverse=True):
result = block_func(context, nodes, *args, **kwargs)
if result and isinstance(result, str):
nodes.append(result)
if nodes:
return mark_safe(''.join(nodes))
else:
return ""
@register.filter
def admin_urlname(value, arg):
return 'xadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
static = register.simple_tag(static)
@register.filter
def xslugify(value):
return re.sub('\W+', "_", value, re.I)
@register.simple_tag(takes_context=True)
def vendor(context, *tags):
return util_vendor(*tags).render()
class BlockcaptureNode(template.Node):
"""https://chriskief.com/2013/11/06/conditional-output-of-a-django-block/"""
def __init__(self, nodelist, varname):
self.nodelist = nodelist
self.varname = varname
def render(self, context):
output = self.nodelist.render(context)
context[self.varname] = escape(output)
return ''
@register.tag(name='blockcapture')
def do_blockcapture(parser, token):
try:
tag_name, args = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("'blockcapture' node requires a variable name.")
nodelist = parser.parse(('endblockcapture',))
parser.delete_first_token()
return BlockcaptureNode(nodelist, args)
| import re
from django import template
from django.template import Library
from django.utils import six
from django.utils.html import escape
from django.utils.safestring import mark_safe
from xadmin.util import static, vendor as util_vendor
register = Library()
@register.simple_tag(takes_context=True)
def username_field(context):
"""Get the userame based on the variable [USERNAME_FIELD]"""
return getattr(context['original'], context['original'].USERNAME_FIELD)
@register.simple_tag(takes_context=True)
def view_block(context, block_name, *args, **kwargs):
if 'admin_view' not in context:
return ""
admin_view = context['admin_view']
nodes = []
method_name = 'block_%s' % block_name.replace('-', '_')
block_funcs = []
for view in [admin_view] + admin_view.plugins:
block_func = getattr(view, method_name, None)
if block_func and callable(block_func):
block_funcs.append((getattr(view, "priority", 10), block_func))
for _, block_func in sorted(block_funcs, key=lambda x: x[0],
reverse=True):
result = block_func(context, nodes, *args, **kwargs)
if result and isinstance(result, str):
nodes.append(result)
if nodes:
return mark_safe(''.join(nodes))
else:
return ""
@register.filter
def admin_urlname(value, arg):
return 'xadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
static = register.simple_tag(static)
@register.filter
def xslugify(value):
return re.sub('\W+', "_", value, re.I)
@register.simple_tag(takes_context=True)
def vendor(context, *tags):
return util_vendor(*tags).render()
class BlockcaptureNode(template.Node):
"""https://chriskief.com/2013/11/06/conditional-output-of-a-django-block/"""
def __init__(self, nodelist, varname):
self.nodelist = nodelist
self.varname = varname
def render(self, context):
output = self.nodelist.render(context)
context[self.varname] = escape(output)
return ''
@register.tag(name='blockcapture')
def do_blockcapture(parser, token):
try:
tag_name, args = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("'blockcapture' node requires a variable name.")
nodelist = parser.parse(('endblockcapture',))
parser.delete_first_token()
return BlockcaptureNode(nodelist, args)
| bsd-3-clause | Python |
76a0bae50e51c39d5ccdaa63309cacabda219ddc | Add test for more special tabs | meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser | chrome/test/functional/special_tabs.py | chrome/test/functional/special_tabs.py | #!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pyauto_functional # Must be imported before pyauto
import pyauto
class SpecialTabsTest(pyauto.PyUITest):
"""TestCase for Special Tabs like about:version, chrome://history, etc."""
special_accelerator_tabs = {
pyauto.IDC_SHOW_HISTORY: 'History',
pyauto.IDC_MANAGE_EXTENSIONS: 'Extensions',
pyauto.IDC_SHOW_DOWNLOADS: 'Downloads',
}
special_url_tabs = {
'about:': 'About Version',
'about:appcache-internals': 'AppCache Internals',
'about:credits': 'Credits',
'about:dns': 'About DNS',
'about:histograms': 'About Histograms',
'about:net-internals': 'Network internals',
'about:plugins': 'Plug-ins',
'about:sync': 'About Sync',
'about:version': 'About Version',
'chrome://downloads': 'Downloads',
'chrome://extensions': 'Extensions',
'chrome://history': 'History',
'chrome://net-internals': 'Network internals',
'chrome://newtab': 'New Tab',
}
def testSpecialAccleratorTabs(self):
"""Test special tabs created by acclerators like IDC_SHOW_HISTORY,
IDC_SHOW_DOWNLOADS."""
for accel, title in self.special_accelerator_tabs.iteritems():
self.RunCommand(accel)
self.assertEqual(title, self.GetActiveTabTitle())
def testSpecialURLTabs(self):
"""Test special tabs created by URLs like chrome://downloads,
chrome://extensions, chrome://history, etc."""
for url, title in self.special_url_tabs.iteritems():
self.NavigateToURL(url)
self.assertEqual(title, self.GetActiveTabTitle())
if __name__ == '__main__':
pyauto_functional.Main()
| #!/usr/bin/python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pyauto_functional # Must be imported before pyauto
import pyauto
class SpecialTabsTest(pyauto.PyUITest):
"""TestCase for Special Tabs like about:version, chrome://history, etc."""
special_accelerator_tabs = {
pyauto.IDC_SHOW_HISTORY: 'History',
pyauto.IDC_MANAGE_EXTENSIONS: 'Extensions',
pyauto.IDC_SHOW_DOWNLOADS: 'Downloads',
}
special_url_tabs = {
'about:': 'About Version',
'about:dns': 'About DNS',
'about:histograms': 'About Histograms',
'about:plugins': 'Plug-ins',
'about:sync': 'About Sync',
'about:version': 'About Version',
'chrome://downloads': 'Downloads',
'chrome://extensions': 'Extensions',
'chrome://history': 'History',
'chrome://net-internals': 'Network internals',
}
def testSpecialAccleratorTabs(self):
"""Test special tabs created by acclerators like IDC_SHOW_HISTORY,
IDC_SHOW_DOWNLOADS."""
for accel, title in self.special_accelerator_tabs.iteritems():
self.RunCommand(accel)
self.assertEqual(title, self.GetActiveTabTitle())
def testSpecialURLTabs(self):
"""Test special tabs created by URLs like chrome://downloads,
chrome://extensions, chrome://history, etc."""
for url, title in self.special_url_tabs.iteritems():
self.NavigateToURL(url)
self.assertEqual(title, self.GetActiveTabTitle())
if __name__ == '__main__':
pyauto_functional.Main()
| bsd-3-clause | Python |
cbbe17c702238af8259cff9d52d7bb3e1d8e47b7 | Update _unittests/ut_hackathon/test_crypt.py | sdpython/ensae_projects,sdpython/ensae_projects,sdpython/ensae_projects | _unittests/ut_hackathon/test_crypt.py | _unittests/ut_hackathon/test_crypt.py | """
@brief test log(time=8s)
"""
import sys
import os
import warnings
import unittest
from pyquickhelper.pycode import skipif_travis
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.ensae_projects.hackathon.crypt_helper import set_password, get_password
class TestCrypt(unittest.TestCase):
def test_json_iter(self):
try:
set_password('ppwwdd', 'ep_ex')
except RuntimeError as e:
if "keyrings.alt" in str(e):
warnings.warn("No recommended backend was available.")
return
else:
raise e
pwd = get_password('ep_ex')
self.assertEqual('ppwwdd', pwd)
if __name__ == "__main__":
unittest.main()
| """
@brief test log(time=8s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG
try:
import src
except ImportError:
path = os.path.normpath(
os.path.abspath(
os.path.join(
os.path.split(__file__)[0],
"..",
"..")))
if path not in sys.path:
sys.path.append(path)
import src
from src.ensae_projects.hackathon.crypt_helper import set_password, get_password
class TestCrypt(unittest.TestCase):
def test_json_iter(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
set_password('ppwwdd', 'ep_ex')
pwd = get_password('ep_ex')
self.assertEqual('ppwwdd', pwd)
if __name__ == "__main__":
unittest.main()
| mit | Python |
595ae47653a747c304522beb76c045b14ba38e87 | Fix test_fields | khchine5/django-shop,jrief/django-shop,jrief/django-shop,awesto/django-shop,divio/django-shop,awesto/django-shop,nimbis/django-shop,nimbis/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop,awesto/django-shop,divio/django-shop,jrief/django-shop,nimbis/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop | example/tests/test_fields.py | example/tests/test_fields.py | # -*- coding: utf-8
from __future__ import unicode_literals
from django.db import models
from django.test import TestCase
from shop.models.defaults.cart_item import CartItem
from shop.models.fields import JSONField
class JsonModel(models.Model):
class Meta:
app_label = 'JsonModel'
json = JSONField()
default_json = JSONField(default={"check": 12})
class JSONFieldTest(TestCase):
"""JSONField Tests"""
fixtures = ['myshop-polymorphic.json']
def setUp(self):
super(JSONFieldTest, self).setUp()
self.sample = CartItem.objects.get(id=8)
self.assertIsNotNone(self.sample)
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
extra = {"product_code":"1121"}
self.assertEqual(self.sample.extra, extra)
| # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
| bsd-3-clause | Python |
843d8e8811ab197fdbb416a7cc7d045af6f1e4bf | Fix project list resources | cgwire/zou | zou/app/blueprints/crud/project.py | zou/app/blueprints/crud/project.py | from zou.app.models.project import Project
from zou.app.services import user_service, projects_service
from zou.app.utils import permissions
from .base import BaseModelResource, BaseModelsResource
class ProjectsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, Project)
def add_project_permission_filter(self, query):
if permissions.has_manager_permissions():
return query
else:
return query.filter(user_service.build_related_projects_filter())
def check_read_permissions(self):
return True
class ProjectResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, Project)
def check_read_permissions(self, project):
user_service.check_project_access(project["id"])
def update_data(self, data):
open_status = projects_service.get_or_create_open_status()
data["open_status_id"] = open_status["id"]
return data
| from zou.app.models.project import Project
from zou.app.services import user_service, projects_service
from .base import BaseModelResource, BaseModelsResource
class ProjectsResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, Project)
def add_project_permission_filter(self, query):
return query.filter(user_service.build_related_projects_filter())
def check_read_permissions(self):
True
class ProjectResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, Project)
def check_read_permissions(self, project):
user_service.check_project_access(project["id"])
def update_data(self, data):
open_status = projects_service.get_or_create_open_status()
data["open_status_id"] = open_status["id"]
return data
| agpl-3.0 | Python |
3f7e9f1392b3cc2f915d332ee224578e71dcebc3 | Add __version__ | vahtras/amy,vahtras/amy,shapiromatron/amy,wking/swc-amy,wking/swc-amy,pbanaszkiewicz/amy,shapiromatron/amy,pbanaszkiewicz/amy,swcarpentry/amy,shapiromatron/amy,vahtras/amy,swcarpentry/amy,wking/swc-amy,wking/swc-amy,swcarpentry/amy,pbanaszkiewicz/amy | workshops/__init__.py | workshops/__init__.py | __version__ = '0.2.0'
| mit | Python | |
f7a1e75bd11388ee5053e0f4e66dd95a760adc3c | Update openFile.py | Armando115Tapia/proyecto-2do-Bimestre-BI | Procesamiento/ProcesarTweets/openFile.py | Procesamiento/ProcesarTweets/openFile.py |
#Código escrito en lenguaje python que permite leer el archivo .txt creado por printInFile
#con el objetivo de extraer el campo value que contiene el texto de cada tweet recolectado
import json
import sys
from pprint import pprint
import urllib2
# encoding:utf-8
valor = '"value":"'
#Ejemplo de directorio Proyecto Pycharm:
#with open('/root/PycharmProjects/ImprimirTweets/tweets.txt') as f:
with open('') as f:
for line in f:
line.split(valor)
#print(line.split("value"))
#result = line.split('value:')[-1]
result = line.split(valor)[-1]
#print(result)
#Ejemplo de directorio Proyecto Pycharm:
#f1=open('/root/PycharmProjects/ProcesarTweets/tweetsValue.txt','a')
f1=open('','a')
f1.write(result)
f1.close()
| import json
import sys
from pprint import pprint
import urllib2
# encoding:utf-8
valor = '"value":"'
with open('/root/PycharmProjects/ImprimirTweets/tweetsUIO.txt') as f:
for line in f:
line.split(valor)
#print(line.split("value"))
#result = line.split('value:')[-1]
result = line.split(valor)[-1]
#print(result)
f1=open('/root/PycharmProjects/ProcesarTweets/tweetsValue.txt','a')
f1.write(result)
f1.close() | mit | Python |
abd709f9defd7375abdc3300b2ad6cb5e5d2a30a | Fix a bug in readv(). Close #2447. (#2453) | angr/angr,angr/angr,angr/angr | angr/procedures/linux_kernel/iovec.py | angr/procedures/linux_kernel/iovec.py | import angr
from ..posix.read import read
from ..posix.write import write
from ...sim_type import register_types, parse_types
register_types(parse_types("""
struct iovec {
void *iov_base; /* Starting address */
size_t iov_len; /* Number of bytes to transfer */
};
"""))
class readv(angr.SimProcedure):
def run(self, fd, iovec, iovcnt):
if iovec.symbolic or iovcnt.symbolic:
raise angr.errors.SimPosixError("Can't handle symbolic arguments to readv")
iovcnt = self.state.solver.eval(iovcnt)
res = 0
for element in self.state.mem[iovec].struct.iovec.array(iovcnt).resolved:
tmpres = self.inline_call(read, fd, element.iov_base, element.iov_len).ret_expr
if self.state.solver.is_true(self.state.solver.SLT(tmpres, 0)):
return tmpres
return res
class writev(angr.SimProcedure):
def run(self, fd, iovec, iovcnt):
if iovec.symbolic or iovcnt.symbolic:
raise angr.errors.SimPosixError("Can't handle symbolic arguments to writev")
iovcnt = self.state.solver.eval(iovcnt)
res = 0
for element in self.state.mem[iovec].struct.iovec.array(iovcnt).resolved:
tmpres = self.inline_call(write, fd, element.iov_base, element.iov_len).ret_expr
if self.state.solver.is_true(self.state.solver.SLT(tmpres, 0)):
return tmpres
return res
| import angr
from ..posix.read import read
from ..posix.write import write
from ...sim_type import register_types, parse_types
register_types(parse_types("""
struct iovec {
void *iov_base; /* Starting address */
size_t iov_len; /* Number of bytes to transfer */
};
"""))
class readv(angr.SimProcedure):
def run(self, fd, iovec, iovcnt):
if iovec.symbolic or iovcnt.symbolic:
raise angr.errors.SimPosixError("Can't handle symbolic arguments to readv")
iovcnt = self.state.solver.eval(iovcnt)
res = 0
for element in self.state.mem[iovec].struct.iovec.array(iovcnt).resolved:
tmpres = self.inline_call(read, fd, element.iov_base, element.iov_len)
if self.state.solver.is_true(self.state.solver.SLT(tmpres, 0)):
return tmpres
return res
class writev(angr.SimProcedure):
def run(self, fd, iovec, iovcnt):
if iovec.symbolic or iovcnt.symbolic:
raise angr.errors.SimPosixError("Can't handle symbolic arguments to writev")
iovcnt = self.state.solver.eval(iovcnt)
res = 0
for element in self.state.mem[iovec].struct.iovec.array(iovcnt).resolved:
tmpres = self.inline_call(write, fd, element.iov_base, element.iov_len).ret_expr
if self.state.solver.is_true(self.state.solver.SLT(tmpres, 0)):
return tmpres
return res
| bsd-2-clause | Python |
dc2254457602ca40f79d20d9eb97e64941ab7709 | use SI units in units example | mgk/urwid_timed_progress | examples/reasonable_units.py | examples/reasonable_units.py | #!/usr/bin/env python
from time import sleep
import urwid as uw
from urwid_timed_progress import TimedProgressBar
# Demo of bar adjusting to reasonable units
# The demo simulates progress at different rates to show
# the progress bar changing units as needed.
if __name__ == '__main__':
palette = [
('normal', 'white', 'black', 'standout'),
('complete', 'white', 'dark magenta'),
]
# Using SI units: https://en.wikipedia.org/wiki/Kilobyte
units = [
('bytes', 1),
('kB', 1000),
('MB', 1000000)
]
bar = TimedProgressBar('normal', 'complete', done=5e7, units=units)
status = uw.Text('simulation that demonstrates auto selecting best units')
footer = uw.Text('q to exit, r to run simulation')
progress = uw.Frame(uw.ListBox([bar, uw.Divider(bottom=5), status]),
footer=footer)
def run():
bar.reset()
# start with slow rate of progress
status.set_text('starting out, running slowly')
for i in range(5):
bar.add_progress(1)
loop.draw_screen()
sleep(.5)
for i in range(50):
bar.add_progress(200)
loop.draw_screen()
sleep(.1)
# run fast until done
status.set_text('running fast until done')
while bar.current < bar.done:
bar.add_progress(100000)
loop.draw_screen()
sleep(.01)
status.set_text('done')
loop.draw_screen()
def keypress(key):
if key == 'q':
raise uw.ExitMainLoop()
elif key == 'r':
run()
loop = uw.MainLoop(progress, palette, unhandled_input=keypress)
loop.run()
| #!/usr/bin/env python
from time import sleep
import urwid as uw
from urwid_timed_progress import TimedProgressBar
# Demo of bar adjusting to reasonable units
# The demo simulates progress at different rates to show
# the progress bar changing units as needed.
if __name__ == '__main__':
palette = [
('normal', 'white', 'black', 'standout'),
('complete', 'white', 'dark magenta'),
]
units = [
('B', 1),
('KB', 1000),
('MB', 1000000)
]
bar = TimedProgressBar('normal', 'complete', done=5e7, units=units)
status = uw.Text('simulation that demonstrates auto selecting best units')
footer = uw.Text('q to exit, r to run simulation')
progress = uw.Frame(uw.ListBox([bar, uw.Divider(bottom=5), status]),
footer=footer)
def run():
bar.reset()
# start with slow rate of progress
status.set_text('starting out, running slowly')
for i in range(5):
bar.add_progress(1)
loop.draw_screen()
sleep(.5)
for i in range(50):
bar.add_progress(200)
loop.draw_screen()
sleep(.1)
# run fast until done
status.set_text('running fast until done')
while bar.current < bar.done:
bar.add_progress(100000)
loop.draw_screen()
sleep(.01)
status.set_text('done')
loop.draw_screen()
def keypress(key):
if key == 'q':
raise uw.ExitMainLoop()
elif key == 'r':
run()
loop = uw.MainLoop(progress, palette, unhandled_input=keypress)
loop.run()
| mit | Python |
0ea72bc6c1470960294bbc92ee1e1e9b606699c9 | Update fb_login.py | umangahuja1/Python | Automation/fb_login.py | Automation/fb_login.py | from selenium import webdriver
from getpass import getpass
usr = input('Enter your username or email id: ')
pwd = getpass('Enter your password : ')
driver = webdriver.Chrome()
driver.get('https://www.facebook.com/')
username_box = driver.find_element_by_id('email')
username_box.send_keys(usr)
password_box = driver.find_element_by_id('pass')
password_box.send_keys(pwd)
login_btn = driver.find_element_by_id('u_0_2')
login_btn.submit()
| '''
This script is created to perform facebook login from terminal
'''
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.common.exceptions import NoSuchElementException
usr=input('Enter Email Id:')
pwd=input('Enter Password:')
driver = webdriver.Chrome()
driver.get('https://www.facebook.com/')
print ("Opened facebook...")
sleep(1)
a = driver.find_element_by_id('email')
a.send_keys(usr)
print ("Email Id entered...")
sleep(1)
b = driver.find_element_by_id('pass')
b.send_keys(pwd)
print ("Password entered...")
c = driver.find_element_by_id('loginbutton')
c.click()
print ("Done...")
sleep(10)
driver.quit()
print("Game Over...")
| apache-2.0 | Python |
169a14df3a5e145310f9d73b2531f62e4a02bd5a | Fix zip on clang-release-builder. | eunchong/build,eunchong/build,eunchong/build,eunchong/build | scripts/slave/android/archive_build.py | scripts/slave/android/archive_build.py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A tool to archive an Android build.
This script is used for Debug and Release builds.
When this is run, the current directory (cwd) should be the outer build
directory (e.g., <clankium>/src).
For a list of command-line options, call this script with '--help'.
"""
import optparse
import os
import subprocess
import sys
def archive_build(target='Debug', name='archive.zip', location='out',
files=None, ignore_subfolder_names=False):
out_dir = 'out'
target_dir = os.path.join(out_dir, target)
zip_file = os.path.join(location, name)
expanded_files = []
if files:
for f in files:
expanded_files.append(os.path.join(target_dir, f))
else:
expanded_files = [target_dir]
saved_dir = os.getcwd()
os.chdir(os.path.dirname(os.path.join(saved_dir, out_dir)))
zip_args = '-yr1'
if ignore_subfolder_names:
zip_args += 'j'
command = ['zip', zip_args, zip_file]
command.extend(expanded_files)
subprocess.call(' '.join(command), shell=True)
os.chdir(saved_dir)
def main(argv):
option_parser = optparse.OptionParser()
option_parser.add_option('--target', default='Debug',
help='build target to archive (Debug or Release)')
option_parser.add_option('--name', default='archive.zip',
help='name of archive')
option_parser.add_option('--location', default='out',
help='location to store archive in')
option_parser.add_option('--files',
help='list of files to include - can be file paths '
'or globs')
option_parser.add_option('--ignore-subfolder-names',
dest='ignore_subfolder_names',
action='store_true', default=False,
help='archive files without folder structure')
options, args = option_parser.parse_args()
if args:
raise Exception('Unknown arguments: %s' % args)
if options.files:
options.files = options.files.split(',')
return archive_build(target=options.target, name=options.name,
location=options.location, files=options.files,
ignore_subfolder_names=options.ignore_subfolder_names)
if '__main__' == __name__:
sys.exit(main(None))
| #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A tool to archive an Android build.
This script is used for Debug and Release builds.
When this is run, the current directory (cwd) should be the outer build
directory (e.g., <clankium>/src).
For a list of command-line options, call this script with '--help'.
"""
import optparse
import os
import subprocess
import sys
def archive_build(target='Debug', name='archive.zip', location='out',
files=None, ignore_subfolder_names=False):
out_dir = 'out'
target_dir = os.path.join(out_dir, target)
zip_file = os.path.join(location, name)
files = []
if files:
for f in files:
files.append(os.path.join(target_dir, f))
else:
files = [target_dir]
saved_dir = os.getcwd()
os.chdir(os.path.dirname(os.path.join(saved_dir, out_dir)))
zip_args = '-yr1'
if ignore_subfolder_names:
zip_args += 'j'
command = ['zip', zip_args, zip_file]
command.extend(files)
subprocess.call(' '.join(command), shell=True)
os.chdir(saved_dir)
def main(argv):
option_parser = optparse.OptionParser()
option_parser.add_option('--target', default='Debug',
help='build target to archive (Debug or Release)')
option_parser.add_option('--name', default='archive.zip',
help='name of archive')
option_parser.add_option('--location', default='out',
help='location to store archive in')
option_parser.add_option('--files',
help='list of files to include - can be file paths '
'or globs')
option_parser.add_option('--ignore-subfolder-names',
dest='ignore_subfolder_names',
action='store_true', default=False,
help='archive files without folder structure')
options, args = option_parser.parse_args()
if args:
raise Exception('Unknown arguments: %s' % args)
if options.files:
options.files = options.files.split(',')
return archive_build(target=options.target, name=options.name,
location=options.location, files=options.files,
ignore_subfolder_names=options.ignore_subfolder_names)
if '__main__' == __name__:
sys.exit(main(None))
| bsd-3-clause | Python |
782574b58e96eadcb3f07dbdf91fdf3ecc88b98e | Fix naming issue in host state | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/states/host.py | salt/states/host.py | '''
Manage the state of the hosts file
'''
def present(name, ip):
'''
Ensures that the named host is present with the given ip
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if __salt__['hosts.has_pair'](ip, name):
ret['changes'] = 'Already Present'
ret['result'] = True
return ret
if __salt__['hosts.add_host'](ip, name):
ret['changes'] = {'host': name}
ret['result'] = True
ret['comment'] = 'Added host ' + name
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set host'
return ret
def absent(name, ip):
'''
Ensure that the the named host is absent
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if not __salt__['hosts.has_pair'](ip, name):
ret['changes'] = 'Already Absent'
ret['result'] = True
return ret
if __salt__['hosts.rm_host'](ip, name):
ret['changes'] = {'host': name}
ret['result'] = True
ret['comment'] = 'Removed host ' + name
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to remove host'
return ret
| '''
Manage the state of the hosts file
'''
def present(name, ip):
'''
Ensures that the named host is present with the given ip
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if __salt__['hosts.has_pair'](ip, name):
ret['changes'] = 'Already Present'
ret['result'] = True
return ret
if __salt__['hosts.add_host'](ip, name):
ret['changes'] = {'host': name}
ret['result'] = True
ret['comment'] = 'Added host ' + name
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to set host'
return ret
def absent(name, ip):
'''
Ensure that the the named host is absent
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if not __salt__['hosts.has_pair'](ip, name):
ret['changes'] = 'Already Absent'
ret['result'] = True
return ret
if __salt__['hosts.rm_host'](ip, name):
ret['changes'] = {'host': name}
ret['result'] = True
ret['comment'] = 'Removed host ' + host
return ret
else:
ret['result'] = False
ret['comment'] = 'Failed to remove host'
return ret
| apache-2.0 | Python |
c300192f08edfecf843e00a8076cff97010e5fd5 | Add some docstring. | fyabc/MiniGames | Shift_pygame/Shift_pygame/utils/basic.py | Shift_pygame/Shift_pygame/utils/basic.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
import re
import sys
__author__ = 'fyabc'
def error(msg, *args, **kwargs):
print(msg, *args, **kwargs, file=sys.stderr)
def sign(x):
return 1 if x > 0 else (-1 if x < 0 else 1)
_comment_pattern = re.compile(r'#.*?\n')
def strip_line(line):
"""Remove comments (Start with '#') from the line."""
return _comment_pattern.sub('', line).strip()
def lget(l, i, default=None):
"""Get the i-th element of the list l. If not exist, return the default value."""
try:
return l[i]
except IndexError:
return default
| #! /usr/bin/python
# -*- coding: utf-8 -*-
import re
import sys
__author__ = 'fyabc'
def error(msg, *args, **kwargs):
print(msg, *args, **kwargs, file=sys.stderr)
def sign(x):
return 1 if x > 0 else (-1 if x < 0 else 1)
_comment_pattern = re.compile(r'#.*?\n')
def strip_line(line):
return _comment_pattern.sub('', line).strip()
def lget(l, i, default=None):
try:
return l[i]
except IndexError:
return default
| mit | Python |
876cbb786929f3eb18500faa960a3970bc252e2b | allow authproxy requests with double slash in URL (/tiles//layername) | omniscale/gbi-server,omniscale/gbi-server,omniscale/gbi-server | app/gbi_server/authproxy/blueprint.py | app/gbi_server/authproxy/blueprint.py | # This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Blueprint, request, session, abort
from gbi_server.extensions import tileproxy
from gbi_server.extensions import couchdbproxy
authproxy = Blueprint("authproxy", __name__)
for code in [401, 403, 404, 405]:
@authproxy.errorhandler(code)
def on_error(error):
return error
@authproxy.route('/authproxy/<string:user_token>/couchdb/', build_only=True)
@authproxy.route('/authproxy/<string:user_token>/couchdb/<path:url>', methods=['GET', 'POST', 'PUT', 'DELETE'])
def couchdb_proxy(url, user_token):
return couchdbproxy.on_proxy(request, user_token=user_token, url=url)
@authproxy.route('/authproxy/couchdb/<path:url>', methods=['GET'])
def couchdb_proxy_file(url):
user_token = session.get('authproxy_token')
if user_token is None:
abort(401)
url += '/file'
return couchdbproxy.on_proxy(request, user_token=user_token, url=url)
@authproxy.route('/authproxy/tiles/', build_only=True)
@authproxy.route('/authproxy/tiles/<string:layer>/<path:url>', methods=['GET', 'POST'])
@authproxy.route('/authproxy/<string:user_token>/tiles/', build_only=True)
@authproxy.route('/authproxy/<string:user_token>/tiles/<string:layer>/<path:url>', methods=['GET', 'POST'])
@authproxy.route('/authproxy/<string:user_token>/tiles//<string:layer>/<path:url>', methods=['GET', 'POST'])
def tile_proxy(layer, url, user_token=None):
if user_token is None:
user_token = session.get('authproxy_token')
if user_token is None:
abort(401)
return tileproxy.on_proxy(request, user_token=user_token, layer=layer, url=url)
| # This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Blueprint, request, session, abort
from gbi_server.extensions import tileproxy
from gbi_server.extensions import couchdbproxy
authproxy = Blueprint("authproxy", __name__)
for code in [401, 403, 404, 405]:
@authproxy.errorhandler(code)
def on_error(error):
return error
@authproxy.route('/authproxy/<string:user_token>/couchdb/', build_only=True)
@authproxy.route('/authproxy/<string:user_token>/couchdb/<path:url>', methods=['GET', 'POST', 'PUT', 'DELETE'])
def couchdb_proxy(url, user_token):
return couchdbproxy.on_proxy(request, user_token=user_token, url=url)
@authproxy.route('/authproxy/couchdb/<path:url>', methods=['GET'])
def couchdb_proxy_file(url):
user_token = session.get('authproxy_token')
if user_token is None:
abort(401)
url += '/file'
return couchdbproxy.on_proxy(request, user_token=user_token, url=url)
@authproxy.route('/authproxy/tiles/', build_only=True)
@authproxy.route('/authproxy/tiles/<string:layer>/<path:url>', methods=['GET', 'POST'])
@authproxy.route('/authproxy/<string:user_token>/tiles/', build_only=True)
@authproxy.route('/authproxy/<string:user_token>/tiles/<string:layer>/<path:url>', methods=['GET', 'POST'])
def tile_proxy(layer, url, user_token=None):
if user_token is None:
user_token = session.get('authproxy_token')
if user_token is None:
abort(401)
return tileproxy.on_proxy(request, user_token=user_token, layer=layer, url=url)
| apache-2.0 | Python |
df95f72566d0b01be27342f0da42980d3d8a1b60 | Remove unused DefaultRouter in retina_api.urls | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | app/grandchallenge/retina_api/urls.py | app/grandchallenge/retina_api/urls.py | from django.urls import path, include
from rest_framework.routers import DefaultRouter, SimpleRouter
from grandchallenge.retina_api import views
from django.views.decorators.cache import cache_page
from django.conf import settings
app_name = "retina_api"
annotation_router = SimpleRouter()
annotation_router.register(
"singlepolygonannotation",
views.SinglePolygonViewSet,
basename="singlepolygonannotation",
)
annotation_router.register(
"polygonannotationset",
views.PolygonAnnotationSetViewSet,
basename="polygonannotationset",
)
urlpatterns = [
path("archives/", views.ArchiveView.as_view(), name="archives-api-view"),
path(
"image/<str:image_type>/<str:patient_identifier>/<str:study_identifier>/<str:image_identifier>/<str:image_modality>/",
cache_page(settings.RETINA_IMAGE_CACHE_TIME)(
views.ImageView.as_view()
),
name="image-api-view",
),
path(
"data/<str:data_type>/<int:user_id>/<str:archive_identifier>/<str:patient_identifier>/",
views.DataView.as_view(),
name="data-api-view",
),
path(
"annotation/polygon/<int:user_id>/<uuid:image_id>/",
views.PolygonListView.as_view(),
name="annotation-api-view",
),
path("annotation/<int:user_id>/", include(annotation_router.urls)),
]
| from django.urls import path, include
from rest_framework.routers import DefaultRouter, SimpleRouter
from grandchallenge.retina_api import views
from django.views.decorators.cache import cache_page
from django.conf import settings
app_name = "retina_api"
router = DefaultRouter()
annotation_router = SimpleRouter()
annotation_router.register(
"singlepolygonannotation",
views.SinglePolygonViewSet,
basename="singlepolygonannotation",
)
annotation_router.register(
"polygonannotationset",
views.PolygonAnnotationSetViewSet,
basename="polygonannotationset",
)
urlpatterns = [
path("", include(router.urls)),
path("archives/", views.ArchiveView.as_view(), name="archives-api-view"),
path(
"image/<str:image_type>/<str:patient_identifier>/<str:study_identifier>/<str:image_identifier>/<str:image_modality>/",
cache_page(settings.RETINA_IMAGE_CACHE_TIME)(
views.ImageView.as_view()
),
name="image-api-view",
),
path(
"data/<str:data_type>/<int:user_id>/<str:archive_identifier>/<str:patient_identifier>/",
views.DataView.as_view(),
name="data-api-view",
),
path(
"annotation/polygon/<int:user_id>/<uuid:image_id>/",
views.PolygonListView.as_view(),
name="annotation-api-view",
),
path("annotation/<int:user_id>/", include(annotation_router.urls)),
]
| apache-2.0 | Python |
c1e7ff32956d003800c113907309f56096d0be41 | set interval int on scheduler minute | jgabriellima/mining,chrisdamba/mining,mining/mining,mlgruby/mining,AndrzejR/mining,seagoat/mining,mining/mining,avelino/mining,avelino/mining,mlgruby/mining,AndrzejR/mining,chrisdamba/mining,jgabriellima/mining,mlgruby/mining,seagoat/mining | bin/scheduler.py | bin/scheduler.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from gevent import monkey
monkey.patch_all()
import time
import schedule
from bottle.ext.mongo import MongoPlugin
from settings import ADMIN_BUCKET_NAME, MONGO_URI
from bin.mining import run
def job(slug):
run(slug)
def rules(cube):
scheduler_type = cube.get('scheduler_type', 'minutes')
scheduler_interval = cube.get('scheduler_interval', 60)
if scheduler_type == 'minutes':
t = schedule.every(int(scheduler_interval)).minutes
elif scheduler_type == 'hour':
t = schedule.every().hour
elif scheduler_type == 'day':
t = schedule.every().day
else:
return None
t.do(job, slug=cube.get('slug'))
mongo = MongoPlugin(uri=MONGO_URI, db=ADMIN_BUCKET_NAME,
json_mongo=True).get_mongo()
register = []
for cube in mongo['cube'].find({'scheduler': True}):
rules(cube)
register.append(cube['slug'])
while True:
for cube in mongo['cube'].find({'scheduler': True}):
if cube['slug'] not in register:
rules(cube)
schedule.run_pending()
time.sleep(1)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from gevent import monkey
monkey.patch_all()
import time
import schedule
from bottle.ext.mongo import MongoPlugin
from settings import ADMIN_BUCKET_NAME, MONGO_URI
from bin.mining import run
def job(slug):
run(slug)
def rules(cube):
scheduler_type = cube.get('scheduler_type', 'minutes')
scheduler_interval = cube.get('scheduler_interval', 60)
if scheduler_type == 'minutes':
t = schedule.every(scheduler_interval).minutes
elif scheduler_type == 'hour':
t = schedule.every().hour
elif scheduler_type == 'day':
t = schedule.every().day
t.do(job, slug=cube.get('slug'))
mongo = MongoPlugin(uri=MONGO_URI, db=ADMIN_BUCKET_NAME,
json_mongo=True).get_mongo()
register = []
for cube in mongo['cube'].find({'scheduler': True}):
rules(cube)
register.append(cube['slug'])
while True:
for cube in mongo['cube'].find({'scheduler': True}):
if cube['slug'] not in register:
rules(cube)
schedule.run_pending()
time.sleep(1)
| mit | Python |
ea5fd30a583016b4dc858848e28168ada74deca3 | Check in fix for py3k and urlparse. | scls19fr/blaze,markflorisson/blaze-core,ChinaQuants/blaze,jdmcbr/blaze,nkhuyu/blaze,cpcloud/blaze,LiaoPan/blaze,FrancescAlted/blaze,cowlicks/blaze,FrancescAlted/blaze,mwiebe/blaze,markflorisson/blaze-core,mwiebe/blaze,ContinuumIO/blaze,FrancescAlted/blaze,aterrel/blaze,AbhiAgarwal/blaze,jdmcbr/blaze,aterrel/blaze,ChinaQuants/blaze,mwiebe/blaze,mrocklin/blaze,jcrist/blaze,AbhiAgarwal/blaze,mwiebe/blaze,nkhuyu/blaze,alexmojaki/blaze,AbhiAgarwal/blaze,markflorisson/blaze-core,cowlicks/blaze,dwillmer/blaze,cpcloud/blaze,mrocklin/blaze,xlhtc007/blaze,maxalbert/blaze,alexmojaki/blaze,aterrel/blaze,caseyclements/blaze,FrancescAlted/blaze,caseyclements/blaze,xlhtc007/blaze,dwillmer/blaze,ContinuumIO/blaze,markflorisson/blaze-core,AbhiAgarwal/blaze,scls19fr/blaze,maxalbert/blaze,jcrist/blaze,LiaoPan/blaze | blaze/py3help.py | blaze/py3help.py | import sys
import itertools
PY3 = sys.version_info[:2] >= (3,0)
if PY3:
def dict_iteritems(d):
return d.items().__iter__()
xrange = range
_inttypes = (int,)
_strtypes = (str,)
unicode = str
imap = map
basestring = str
import urllib.parse as urlparse
else:
import __builtin__
def dict_iteritems(d):
return d.iteritems()
xrange = __builtin__.xrange
unicode = __builtin__.unicode
basestring = __builtin__.basestring
_strtypes = (str, unicode)
_inttypes = (int, long)
imap = itertools.imap
import urlparse
| import sys
import itertools
PY3 = sys.version_info[:2] >= (3,0)
if PY3:
def dict_iteritems(d):
return d.items().__iter__()
xrange = range
_inttypes = (int,)
_strtypes = (str,)
unicode = str
imap = map
basestring = str
import urllib
urlparse = urllib.parse
else:
import __builtin__
def dict_iteritems(d):
return d.iteritems()
xrange = __builtin__.xrange
unicode = __builtin__.unicode
basestring = __builtin__.basestring
_strtypes = (str, unicode)
_inttypes = (int, long)
imap = itertools.imap
import urlparse
| bsd-3-clause | Python |
5d40bdeb1b95c051d0655595846acd91cff6160f | Bump version | Rohde-Schwarz-Cybersecurity/botan,randombit/botan,randombit/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,randombit/botan,randombit/botan,Rohde-Schwarz-Cybersecurity/botan,Rohde-Schwarz-Cybersecurity/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,webmaster128/botan,webmaster128/botan,randombit/botan | botan_version.py | botan_version.py |
release_major = 1
release_minor = 11
release_patch = 7
release_so_abi_rev = release_patch
# These are set by the distribution script
release_vc_rev = None
release_datestamp = 0
release_type = 'unreleased'
|
release_major = 1
release_minor = 11
release_patch = 6
release_so_abi_rev = release_patch
# These are set by the distribution script
release_vc_rev = None
release_datestamp = 0
release_type = 'unreleased'
| bsd-2-clause | Python |
7205fa3fca6b31467e35a4472aa900ec75db6caf | Solve No.158 by python | jonathanxqs/lintcode,jonathanxqs/lintcode | 158.py | 158.py | class Solution:
"""
@param s: The first string
@param b: The second string
@return true or false
"""
def anagram(self, s, t):
def hashLize(s):
dicts1= dict()
for j in s:
if j in dicts1.keys():
dicts1[j]+=1
else:
dicts1[j]=0
return dicts1
# write your code here
s1=hashLize(s)
t1=hashLize(t)
if s1 == t1:
return True
return False
| mit | Python | |
239dba89e8cb933fe8950412eeaea3ca4caa92bd | remove all instances | FredrikAugust/Aether-Nodes,FredrikAugust/Aether-Nodes,FredrikAugust/Aether-Nodes | src/app.py | src/app.py | __author__ = 'Fredrik A. Madsen-Malmo'
from flask import Flask, flash, render_template, g
import models
import forms
import os
DEBUG = True
PORT = port = int(os.environ.get('PORT', 33507))
HOST = '0.0.0.0'
app = Flask(__name__)
app.secret_key = os.environ.get('secret_key', 'error')
# Before and after request
@app.before_request
def before_request():
"""Connect to db before each req"""
g.db = models.DATABASE
g.db.connect()
@app.after_request
def after_request(response):
"""Close the db connection after each req"""
g.db.close()
return response
# Other functions
@app.route('/online/<ip>', methods=['POST', 'GET'])
def is_online(ip):
try:
target = models.Entry.get(models.Entry.id == ip)
except Exception:
return False
# 2 cycles and 0 bytes sent
result = os.popen('ping -c 2 -s 0 {}'.format(ip))
if not (result.contains('Unknown host') or result.conatins('timeout')):
target.update(
online=True
).execute()
return True
# Routes
@app.route('/', methods=['POST', 'GET'])
def index():
form = forms.EntryForm()
stream = models.Entry.select()
if form.validate_on_submit():
try:
models.Entry.create(
name=form.name.data,
ip=form.ip.data,
port=form.port.data,
online=is_online(form.ip.data))
except Exception:
pass
return render_template('index.html', form=form, stream=stream)
# Start app
if __name__ == '__main__':
models.initialize()
models.Entry.select().delete()
app.run(debug=DEBUG, port=PORT, host=HOST)
| __author__ = 'Fredrik A. Madsen-Malmo'
from flask import Flask, flash, render_template, g
import models
import forms
import os
DEBUG = True
PORT = port = int(os.environ.get('PORT', 33507))
HOST = '0.0.0.0'
app = Flask(__name__)
app.secret_key = os.environ.get('secret_key', 'error')
# Before and after request
@app.before_request
def before_request():
"""Connect to db before each req"""
g.db = models.DATABASE
g.db.connect()
@app.after_request
def after_request(response):
"""Close the db connection after each req"""
g.db.close()
return response
# Other functions
@app.route('/online/<ip>', methods=['POST', 'GET'])
def is_online(ip):
try:
target = models.Entry.get(models.Entry.id == ip)
except Exception:
return False
# 2 cycles and 0 bytes sent
result = os.popen('ping -c 2 -s 0 {}'.format(ip))
if not (result.contains('Unknown host') or result.conatins('timeout')):
target.update(
online=True
).execute()
return True
# Routes
@app.route('/', methods=['POST', 'GET'])
def index():
form = forms.EntryForm()
stream = models.Entry.select()
if form.validate_on_submit():
try:
models.Entry.create(
name=form.name.data,
ip=form.ip.data,
port=form.port.data,
online=is_online(form.ip.data))
except Exception:
pass
return render_template('index.html', form=form, stream=stream)
# Start app
if __name__ == '__main__':
models.initialize()
models.Entry.create(
name='fotoply',
ip='93.184.204.215',
port='7077')
app.run(debug=DEBUG, port=PORT, host=HOST)
| mit | Python |
7a0c87ae0cca03bb13f105ecbc385ab85ef80dd2 | Fix Alu again | bongtrop/SMC | alu.py | alu.py | import translator
#nand gate
def nand(a, b):
a = translator.com2s2bin(a)
b = translator.com2s2bin(b)
result = ""
for i in range(0,32):
if not (a[i]=='1' and b[i]=='1'):
result+='1'
else:
result+='0'
return translator.bin2com2s(result)
#adder
def add(a, b):
return a+b
#get zero bit
def equal(a, b):
return a==b
| import translator
#nand gate
def nand(a, b):
a = translator.com2s2bin(a)
b = translator.com2s2bin(b)
result = ""
for i in range(0,32):
if (not (a[i] and b[i])):
result+='1'
else:
result+='0'
return int(result, 2)
#adder
def add(a, b):
return a+b
#get zero bit
def equal(a, b):
return a==b
| bsd-3-clause | Python |
6304dee5624c3bfb6b481a2d4e7b486c742d71bd | remove the useless code to reduce the complexity | Microsoft/multiverso,you-n-g/multiverso,zhengsx/multiverso,you-n-g/multiverso,you-n-g/multiverso,zhengsx/multiverso,Microsoft/multiverso,zhengsx/multiverso,liming-vie/multiverso,liming-vie/multiverso,you-n-g/multiverso,Microsoft/multiverso,zhengsx/multiverso,liming-vie/multiverso,liming-vie/multiverso,Microsoft/multiverso | binding/python/multiverso/api.py | binding/python/multiverso/api.py | #!/usr/bin/env python
# coding:utf8
import ctypes
from utils import Loader
import numpy as np
mv_lib = Loader.get_lib()
def init(args=[]):
'''Initialize mutliverso.
This should be called only once before training at the beginning of the
whole project.
'''
n = len(args)
args_type = ctypes.c_char_p * n
mv_lib.MV_Init(ctypes.pointer(ctypes.c_int(n)), args_type(*[ctypes.c_char_p(arg) for arg in args]))
def shutdown():
'''Set a barrier for all workers to wait.
Workers will wait until all workers reach a specific barrier.
'''
mv_lib.MV_ShutDown()
def barrier():
'''Shutdown multiverso.
This should be called only once after finishing training at the end of the
whole project.
'''
mv_lib.MV_Barrier()
def workers_num():
'''Return the total number of workers.'''
return mv_lib.MV_NumWorkers()
def worker_id():
'''Return the id (zero-based index) for current worker.'''
return mv_lib.MV_WorkerId()
def server_id():
return mv_lib.MV_ServerId()
def is_master_worker():
''' If the worker is master worker
Some things only need one worker process, such as validation, outputing the
result, initializing the parameters and so on. So we mark the worker 0 as
the master worker to finish these things.
'''
return worker_id() == 0
| #!/usr/bin/env python
# coding:utf8
import ctypes
from utils import Loader
import numpy as np
mv_lib = Loader.get_lib()
WORKER_ID = None
def init(args=[]):
'''Initialize mutliverso.
This should be called only once before training at the beginning of the
whole project.
'''
n = len(args)
args_type = ctypes.c_char_p * n
mv_lib.MV_Init(ctypes.pointer(ctypes.c_int(n)), args_type(*[ctypes.c_char_p(arg) for arg in args]))
def shutdown():
'''Set a barrier for all workers to wait.
Workers will wait until all workers reach a specific barrier.
'''
mv_lib.MV_ShutDown()
def barrier():
'''Shutdown multiverso.
This should be called only once after finishing training at the end of the
whole project.
'''
mv_lib.MV_Barrier()
def workers_num():
'''Return the total number of workers.'''
return mv_lib.MV_NumWorkers()
def worker_id():
'''Return the id (zero-based index) for current worker.'''
global WORKER_ID
if WORKER_ID is None:
#
WORKER_ID = mv_lib.MV_WorkerId()
return WORKER_ID
def server_id():
return mv_lib.MV_ServerId()
def is_master_worker():
''' If the worker is master worker
Some things only need one worker process, such as validation, outputing the
result, initializing the parameters and so on. So we mark the worker 0 as
the master worker to finish these things.
'''
return worker_id() == 0
| mit | Python |
4795c58f3cfacb20e3b991fec7b78d59a8dcabff | Use SO_REUSEADDR to avoid errors on quick restart. | Timmenem/micropython,ChuckM/micropython,dxxb/micropython,MrSurly/micropython,AriZuu/micropython,SungEun-Steve-Kim/test-mp,mhoffma/micropython,ryannathans/micropython,hosaka/micropython,martinribelotta/micropython,MrSurly/micropython,kerneltask/micropython,hiway/micropython,hosaka/micropython,xyb/micropython,aethaniel/micropython,vriera/micropython,cnoviello/micropython,mianos/micropython,ahotam/micropython,pfalcon/micropython,cloudformdesign/micropython,AriZuu/micropython,KISSMonX/micropython,feilongfl/micropython,rubencabrera/micropython,hiway/micropython,ruffy91/micropython,xyb/micropython,turbinenreiter/micropython,dinau/micropython,ganshun666/micropython,alex-march/micropython,utopiaprince/micropython,TDAbboud/micropython,tralamazza/micropython,xuxiaoxin/micropython,dxxb/micropython,ericsnowcurrently/micropython,dmazzella/micropython,infinnovation/micropython,aitjcize/micropython,rubencabrera/micropython,kostyll/micropython,jlillest/micropython,heisewangluo/micropython,EcmaXp/micropython,lbattraw/micropython,cwyark/micropython,dhylands/micropython,supergis/micropython,slzatz/micropython,selste/micropython,deshipu/micropython,neilh10/micropython,MrSurly/micropython,redbear/micropython,trezor/micropython,henriknelson/micropython,mianos/micropython,emfcamp/micropython,mhoffma/micropython,aethaniel/micropython,Peetz0r/micropython-esp32,jmarcelino/pycom-micropython,bvernoux/micropython,micropython/micropython-esp32,infinnovation/micropython,suda/micropython,HenrikSolver/micropython,TDAbboud/micropython,SHA2017-badge/micropython-esp32,mianos/micropython,dhylands/micropython,ChuckM/micropython,tobbad/micropython,blazewicz/micropython,lowRISC/micropython,adafruit/micropython,cwyark/micropython,kerneltask/micropython,toolmacher/micropython,trezor/micropython,Peetz0r/micropython-esp32,skybird6672/micropython,galenhz/micropython,cnoviello/micropython,kostyll/micropython,dinau/micropython,redbear/micropython,infinnovation/micropython,lbattraw/micropython,aitjcize/micropython,orionrobots/micropython,adamkh/micropython,misterdanb/micropython,HenrikSolver/micropython,oopy/micropython,tuc-osg/micropython,noahwilliamsson/micropython,emfcamp/micropython,ernesto-g/micropython,TDAbboud/micropython,adafruit/micropython,paul-xxx/micropython,noahchense/micropython,matthewelse/micropython,mgyenik/micropython,PappaPeppar/micropython,mianos/micropython,danicampora/micropython,noahchense/micropython,feilongfl/micropython,MrSurly/micropython-esp32,vriera/micropython,pozetroninc/micropython,mhoffma/micropython,chrisdearman/micropython,ahotam/micropython,jimkmc/micropython,praemdonck/micropython,MrSurly/micropython,bvernoux/micropython,ryannathans/micropython,dxxb/micropython,dhylands/micropython,KISSMonX/micropython,cnoviello/micropython,feilongfl/micropython,ryannathans/micropython,hosaka/micropython,skybird6672/micropython,TDAbboud/micropython,henriknelson/micropython,puuu/micropython,micropython/micropython-esp32,tdautc19841202/micropython,micropython/micropython-esp32,tdautc19841202/micropython,xhat/micropython,selste/micropython,utopiaprince/micropython,alex-robbins/micropython,ruffy91/micropython,tralamazza/micropython,hiway/micropython,MrSurly/micropython-esp32,misterdanb/micropython,PappaPeppar/micropython,rubencabrera/micropython,feilongfl/micropython,toolmacher/micropython,AriZuu/micropython,Peetz0r/micropython-esp32,alex-robbins/micropython,lowRISC/micropython,MrSurly/micropython,xuxiaoxin/micropython,tuc-osg/micropython,adamkh/micropython,cloudformdesign/micropython,kerneltask/micropython,heisewangluo/micropython,micropython/micropython-esp32,ganshun666/micropython,kostyll/micropython,alex-march/micropython,toolmacher/micropython,praemdonck/micropython,stonegithubs/micropython,ruffy91/micropython,Timmenem/micropython,tobbad/micropython,noahwilliamsson/micropython,emfcamp/micropython,mpalomer/micropython,methoxid/micropystat,suda/micropython,cnoviello/micropython,ceramos/micropython,pozetroninc/micropython,misterdanb/micropython,chrisdearman/micropython,cwyark/micropython,adamkh/micropython,warner83/micropython,aitjcize/micropython,redbear/micropython,dinau/micropython,lowRISC/micropython,chrisdearman/micropython,swegener/micropython,lowRISC/micropython,pfalcon/micropython,henriknelson/micropython,noahchense/micropython,neilh10/micropython,tuc-osg/micropython,slzatz/micropython,puuu/micropython,noahwilliamsson/micropython,neilh10/micropython,danicampora/micropython,warner83/micropython,ericsnowcurrently/micropython,tdautc19841202/micropython,redbear/micropython,alex-march/micropython,EcmaXp/micropython,adafruit/circuitpython,EcmaXp/micropython,mhoffma/micropython,toolmacher/micropython,Peetz0r/micropython-esp32,omtinez/micropython,supergis/micropython,ryannathans/micropython,firstval/micropython,infinnovation/micropython,adafruit/circuitpython,skybird6672/micropython,henriknelson/micropython,KISSMonX/micropython,dmazzella/micropython,toolmacher/micropython,supergis/micropython,ernesto-g/micropython,ceramos/micropython,mpalomer/micropython,oopy/micropython,ahotam/micropython,blazewicz/micropython,noahchense/micropython,mpalomer/micropython,xyb/micropython,hiway/micropython,dxxb/micropython,ernesto-g/micropython,jimkmc/micropython,lbattraw/micropython,rubencabrera/micropython,alex-march/micropython,redbear/micropython,drrk/micropython,orionrobots/micropython,firstval/micropython,chrisdearman/micropython,deshipu/micropython,dhylands/micropython,aitjcize/micropython,praemdonck/micropython,henriknelson/micropython,martinribelotta/micropython,misterdanb/micropython,warner83/micropython,noahwilliamsson/micropython,mgyenik/micropython,vitiral/micropython,torwag/micropython,EcmaXp/micropython,hiway/micropython,ryannathans/micropython,supergis/micropython,stonegithubs/micropython,adafruit/micropython,xhat/micropython,suda/micropython,tuc-osg/micropython,puuu/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,lbattraw/micropython,mpalomer/micropython,skybird6672/micropython,cwyark/micropython,neilh10/micropython,xuxiaoxin/micropython,mianos/micropython,AriZuu/micropython,SungEun-Steve-Kim/test-mp,tobbad/micropython,xuxiaoxin/micropython,neilh10/micropython,tralamazza/micropython,stonegithubs/micropython,MrSurly/micropython-esp32,emfcamp/micropython,matthewelse/micropython,pramasoul/micropython,selste/micropython,omtinez/micropython,noahchense/micropython,heisewangluo/micropython,tdautc19841202/micropython,xhat/micropython,galenhz/micropython,xhat/micropython,ganshun666/micropython,KISSMonX/micropython,kerneltask/micropython,ahotam/micropython,MrSurly/micropython-esp32,oopy/micropython,dinau/micropython,swegener/micropython,adafruit/circuitpython,warner83/micropython,noahwilliamsson/micropython,adafruit/micropython,ruffy91/micropython,omtinez/micropython,trezor/micropython,selste/micropython,blmorris/micropython,pfalcon/micropython,ChuckM/micropython,paul-xxx/micropython,suda/micropython,orionrobots/micropython,ganshun666/micropython,dmazzella/micropython,praemdonck/micropython,tuc-osg/micropython,firstval/micropython,matthewelse/micropython,Vogtinator/micropython,swegener/micropython,xyb/micropython,orionrobots/micropython,ganshun666/micropython,chrisdearman/micropython,turbinenreiter/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,puuu/micropython,methoxid/micropystat,Vogtinator/micropython,tobbad/micropython,methoxid/micropystat,warner83/micropython,dmazzella/micropython,alex-robbins/micropython,vitiral/micropython,praemdonck/micropython,pozetroninc/micropython,tralamazza/micropython,jmarcelino/pycom-micropython,blazewicz/micropython,adafruit/circuitpython,deshipu/micropython,galenhz/micropython,matthewelse/micropython,PappaPeppar/micropython,adamkh/micropython,mpalomer/micropython,ahotam/micropython,firstval/micropython,cloudformdesign/micropython,misterdanb/micropython,HenrikSolver/micropython,pozetroninc/micropython,swegener/micropython,drrk/micropython,adamkh/micropython,paul-xxx/micropython,bvernoux/micropython,swegener/micropython,jlillest/micropython,cwyark/micropython,orionrobots/micropython,lbattraw/micropython,trezor/micropython,aethaniel/micropython,AriZuu/micropython,ericsnowcurrently/micropython,vitiral/micropython,jlillest/micropython,micropython/micropython-esp32,ceramos/micropython,deshipu/micropython,xhat/micropython,alex-march/micropython,PappaPeppar/micropython,ChuckM/micropython,supergis/micropython,danicampora/micropython,vitiral/micropython,trezor/micropython,TDAbboud/micropython,blmorris/micropython,hosaka/micropython,Timmenem/micropython,jlillest/micropython,galenhz/micropython,ruffy91/micropython,danicampora/micropython,mhoffma/micropython,adafruit/circuitpython,martinribelotta/micropython,jimkmc/micropython,jimkmc/micropython,slzatz/micropython,tdautc19841202/micropython,slzatz/micropython,pramasoul/micropython,aethaniel/micropython,stonegithubs/micropython,selste/micropython,blmorris/micropython,kostyll/micropython,danicampora/micropython,mgyenik/micropython,turbinenreiter/micropython,martinribelotta/micropython,jlillest/micropython,pramasoul/micropython,galenhz/micropython,jmarcelino/pycom-micropython,adafruit/circuitpython,jmarcelino/pycom-micropython,ceramos/micropython,methoxid/micropystat,infinnovation/micropython,ernesto-g/micropython,pozetroninc/micropython,HenrikSolver/micropython,HenrikSolver/micropython,utopiaprince/micropython,dinau/micropython,torwag/micropython,Vogtinator/micropython,blazewicz/micropython,alex-robbins/micropython,stonegithubs/micropython,ernesto-g/micropython,ceramos/micropython,Timmenem/micropython,PappaPeppar/micropython,heisewangluo/micropython,drrk/micropython,paul-xxx/micropython,MrSurly/micropython-esp32,jmarcelino/pycom-micropython,turbinenreiter/micropython,SungEun-Steve-Kim/test-mp,vitiral/micropython,adafruit/micropython,SungEun-Steve-Kim/test-mp,torwag/micropython,vriera/micropython,paul-xxx/micropython,tobbad/micropython,pfalcon/micropython,blmorris/micropython,torwag/micropython,vriera/micropython,rubencabrera/micropython,omtinez/micropython,Timmenem/micropython,xyb/micropython,Vogtinator/micropython,turbinenreiter/micropython,drrk/micropython,dhylands/micropython,bvernoux/micropython,vriera/micropython,hosaka/micropython,ChuckM/micropython,deshipu/micropython,pfalcon/micropython,pramasoul/micropython,jimkmc/micropython,blmorris/micropython,cloudformdesign/micropython,cloudformdesign/micropython,xuxiaoxin/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,firstval/micropython,utopiaprince/micropython,suda/micropython,EcmaXp/micropython,matthewelse/micropython,lowRISC/micropython,cnoviello/micropython,mgyenik/micropython,slzatz/micropython,ericsnowcurrently/micropython,KISSMonX/micropython,Vogtinator/micropython,bvernoux/micropython,blazewicz/micropython,aethaniel/micropython,puuu/micropython,omtinez/micropython,utopiaprince/micropython,pramasoul/micropython,matthewelse/micropython,methoxid/micropystat,drrk/micropython,skybird6672/micropython,mgyenik/micropython,kostyll/micropython,kerneltask/micropython,dxxb/micropython,feilongfl/micropython,SungEun-Steve-Kim/test-mp,heisewangluo/micropython,SHA2017-badge/micropython-esp32,oopy/micropython,SHA2017-badge/micropython-esp32,ericsnowcurrently/micropython,martinribelotta/micropython,oopy/micropython | examples/unix/sock-server.py | examples/unix/sock-server.py | try:
import rawsocket as socket
except:
import socket
CONTENT = """\
HTTP/1.0 200 OK
Hello #{} from MicroPython!
"""
s = socket.socket()
ai = socket.getaddrinfo("127.0.0.1", 8080)
print("Bind address info:", ai)
addr = ai[0][4]
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(addr)
s.listen(5)
print("Listening, connect your browser to http://127.0.0.1:8080/")
counter = 0
while True:
res = s.accept()
client_s = res[0]
client_addr = res[1]
print("Client address:", client_addr)
print("Client socket:", client_s)
print("Request:")
if 0:
# MicroPython rawsocket module supports file interface directly
print(client_s.read(4096))
#print(client_s.readall())
client_s.write(CONTENT.format(counter))
else:
print(client_s.recv(4096))
client_s.send(bytes(CONTENT.format(counter), "ascii"))
client_s.close()
counter += 1
| try:
import rawsocket as socket
except:
import socket
CONTENT = """\
HTTP/1.0 200 OK
Hello #{} from MicroPython!
"""
s = socket.socket()
ai = socket.getaddrinfo("127.0.0.1", 8080)
print("Bind address info:", ai)
addr = ai[0][4]
s.bind(addr)
s.listen(5)
print("Listening, connect your browser to http://127.0.0.1:8080/")
counter = 0
while True:
res = s.accept()
client_s = res[0]
client_addr = res[1]
print("Client address:", client_addr)
print("Client socket:", client_s)
print("Request:")
if 0:
# MicroPython rawsocket module supports file interface directly
print(client_s.read(4096))
#print(client_s.readall())
client_s.write(CONTENT.format(counter))
else:
print(client_s.recv(4096))
client_s.send(bytes(CONTENT.format(counter), "ascii"))
client_s.close()
counter += 1
| mit | Python |
1dc28f250a0323acf456bf4f85731625b80670ac | Switch to webracer | p/google-proxy | app.py | app.py | import flask
import urllib
import re
import cgi
import xml.sax.saxutils
import webracer.session
app = flask.Flask(__name__)
#app.debug = True
def replace(match):
url = match.group(1)
url = xml.sax.saxutils.unescape(url)
url = url[:url.find('&')]
#url = url[:url.find('&')]
url = urllib.unquote(url)
#print url
url = xml.sax.saxutils.quoteattr(url)
return 'href=%s' % url
@app.route('/')
@app.route('/search')
def index():
args = {}
for key in ['q', 'start']:
value = flask.request.args.get(key)
if value is not None:
args[key] = value
query = urllib.urlencode(args)
if query:
url = 'http://www.google.com/search?%s' % query
else:
url = 'http://www.google.com/'
ua = webracer.session.Session(use_cookie_jar=False)
# http://curl.haxx.se/mail/curlpython-2007-07/0001.html
# curl insists on a str, not unicode, on python 2
#url = url.encode('utf8')
ua.get(url)
content = ua.response.body
content = re.sub(r'<script[^>]*>.*?</script>', '', content)
content = re.sub(r'href="/url\?q=([^"]+)"', replace, content)
content = re.sub(r'href="/interstitial\?url=([^"]+)"', replace, content)
# this causes forms to submit to google, not good
#content = content.replace(r'<head>', '<head><base href="http://www.google.com/">')
content = re.sub(r'url\(/(?!/)', 'url(http://www.google.com/', content)
return content
if __name__ == '__main__':
app.run(port=8080)
| import flask
import urllib
import re
import cgi
import xml.sax.saxutils
import curlfe
app = flask.Flask(__name__)
#app.debug = True
def replace(match):
url = match.group(1)
url = xml.sax.saxutils.unescape(url)
url = url[:url.find('&')]
#url = url[:url.find('&')]
url = urllib.unquote(url)
#print url
url = xml.sax.saxutils.quoteattr(url)
return 'href=%s' % url
@app.route('/')
@app.route('/search')
def index():
args = {}
for key in ['q', 'start']:
value = flask.request.args.get(key)
if value is not None:
args[key] = value
query = urllib.urlencode(args)
if query:
url = 'http://www.google.com/search?%s' % query
else:
url = 'http://www.google.com/'
fe = curlfe.CurlFe()
# http://curl.haxx.se/mail/curlpython-2007-07/0001.html
# curl insists on a str, not unicode, on python 2
url = url.encode('utf8')
content = fe.fetch(url)
content = re.sub(r'<script[^>]*>.*?</script>', '', content)
content = re.sub(r'href="/url\?q=([^"]+)"', replace, content)
content = re.sub(r'href="/interstitial\?url=([^"]+)"', replace, content)
# this causes forms to submit to google, not good
#content = content.replace(r'<head>', '<head><base href="http://www.google.com/">')
content = re.sub(r'url\(/(?!/)', 'url(http://www.google.com/', content)
return content
if __name__ == '__main__':
app.run(port=8080)
| bsd-2-clause | Python |
4b9f40dea7c8aaa45e174c7f4371cec543dcf26d | Make captcha required | freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net | app.py | app.py | #!/bin/env python
from flask import Flask, request, render_template, flash, redirect, url_for
from flask.ext.sqlalchemy import SQLAlchemy
from wtforms import Form, TextField, validators
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
app.config['SECRET_KEY'] = 'foobar'
db = SQLAlchemy(app)
class Request(db.Model):
id = db.Column(db.String(80), primary_key=True)
email = db.Column(db.String(120))
generation_date = db.Column(db.Date())
def __init__(self, id, email, generation_date=None):
self.id = id
self.email = email
self.generation_date = generation_date
def __repr__(self):
return "<Request {} - {} - {}>".format(
self.id,
self.email,
self.generation_date
)
class RequestForm(Form):
id = TextField(
'Id',
[
validators.Length(min=4, max=32),
validators.Required(),
validators.Regexp(
"[a-z]+[\-a-z]*",
message="Must be lowercase and can contain '-'."
)
]
)
email = TextField(
'Email',
[
validators.Email(), validators.Required(),
validators.EqualTo('email_confirm')
]
)
email_confirm = TextField('Confirm Email')
captcha = TextField(
'Capital of Germany (to make sure that you are human)',
validators=[
validators.AnyOf('Berlin', 'berlin'),
validators.Required()
]
)
@app.route('/', methods=['GET', 'POST'])
def index():
form = RequestForm(request.form)
if request.method == 'POST' and form.validate():
req = Request(form.id.data, form.email.data)
db.session.add(req)
db.session.commit()
flash('Thanks for registering')
return redirect(url_for('index'))
return render_template('index.html', form=form)
| #!/bin/env python
from flask import Flask, request, render_template, flash, redirect, url_for
from flask.ext.sqlalchemy import SQLAlchemy
from wtforms import Form, TextField, validators
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
app.config['SECRET_KEY'] = 'foobar'
db = SQLAlchemy(app)
class Request(db.Model):
id = db.Column(db.String(80), primary_key=True)
email = db.Column(db.String(120))
generation_date = db.Column(db.Date())
def __init__(self, id, email, generation_date=None):
self.id = id
self.email = email
self.generation_date = generation_date
def __repr__(self):
return "<Request {} - {} - {}>".format(
self.id,
self.email,
self.generation_date
)
class RequestForm(Form):
id = TextField(
'Id',
[
validators.Length(min=4, max=32),
validators.Required(),
validators.Regexp(
"[a-z]+[\-a-z]*",
message="Must be lowercase and can contain '-'."
)
]
)
email = TextField(
'Email',
[
validators.Email(), validators.Required(),
validators.EqualTo('email_confirm')
]
)
email_confirm = TextField('Confirm Email')
captcha = TextField(
'Capital of Germany (to make sure that you are human)',
validators=[validators.AnyOf('Berlin', 'berlin')]
)
@app.route('/', methods=['GET', 'POST'])
def index():
form = RequestForm(request.form)
if request.method == 'POST' and form.validate():
req = Request(form.id.data, form.email.data)
db.session.add(req)
db.session.commit()
flash('Thanks for registering')
return redirect(url_for('index'))
return render_template('index.html', form=form)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.