commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4c2fe580aa549f64722ca15d37c1065f62fd885f | implement the view | TheBlackDude/mudu45,TheBlackDude/mudu45,TheBlackDude/mudu45 | mudu/views.py | mudu/views.py | from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .models import About, Contact
from .serializers import AboutSerializer, ContactSerializer
@api_view(['GET', 'POST'])
def api(request):
""" List my Info, or Contact Me """
if request.method == 'GET':
about = About.objects.all()
serializer = AboutSerializer(about, many=True)
return Response(serializer.data)
elif request.method == 'POST':
serializer = ContactSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | from django.shortcuts import render
# Create your views here.
| mit | Python |
58c59697039b48ef48f14cd9eb62205c2db91972 | Add test boilerplatte | ashwoods/cachefunk | tests/conftest.py | tests/conftest.py | import pytest
| mit | Python | |
64afd914e58f0abe52030d6ee4279fb0757b1ab6 | Use `prawtest_` prefix for environment test settings. | RGood/praw,nmtake/praw,darthkedrik/praw,13steinj/praw,leviroth/praw,gschizas/praw,praw-dev/praw,13steinj/praw,praw-dev/praw,leviroth/praw,RGood/praw,darthkedrik/praw,gschizas/praw,nmtake/praw | tests/conftest.py | tests/conftest.py | """Prepare py.test."""
import os
import time
from base64 import b64encode
import betamax
from betamax_serializers import pretty_json
# Prevent calls to sleep
def _sleep(*args):
raise Exception('Call to sleep')
time.sleep = _sleep
def b64_string(input_string):
"""Return a base64 encoded string (not bytes) from input_string."""
return b64encode(input_string.encode('utf-8')).decode('utf-8')
def env_default(key):
"""Return environment variable or placeholder string."""
return os.environ.get('prawtest_{}'.format(key),
'placeholder_{}'.format(key))
os.environ['praw_check_for_updates'] = 'False'
placeholders = {x: env_default(x) for x in
('client_id client_secret password '
'test_subreddit user_agent username').split()}
placeholders['basic_auth'] = b64_string(
'{}:{}'.format(placeholders['client_id'], placeholders['client_secret']))
betamax.Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/integration/cassettes'
config.default_cassette_options['serialize_with'] = 'prettyjson'
for key, value in placeholders.items():
config.define_cassette_placeholder('<{}>'.format(key.upper()), value)
def pytest_namespace():
"""Add attributes to pytest in all tests."""
return {'placeholders': placeholders}
| """Prepare py.test."""
import os
import time
from base64 import b64encode
import betamax
from betamax_serializers import pretty_json
# Prevent calls to sleep
def _sleep(*args):
raise Exception('Call to sleep')
time.sleep = _sleep
def b64_string(input_string):
"""Return a base64 encoded string (not bytes) from input_string."""
return b64encode(input_string.encode('utf-8')).decode('utf-8')
def env_default(key):
"""Return environment variable or placeholder string."""
return os.environ.get('praw_{}'.format(key), 'placeholder_{}'.format(key))
os.environ['praw_check_for_updates'] = 'False'
placeholders = {x: env_default(x) for x in
('client_id client_secret password '
'test_subreddit user_agent username').split()}
placeholders['basic_auth'] = b64_string(
'{}:{}'.format(placeholders['client_id'], placeholders['client_secret']))
betamax.Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/integration/cassettes'
config.default_cassette_options['serialize_with'] = 'prettyjson'
for key, value in placeholders.items():
config.define_cassette_placeholder('<{}>'.format(key.upper()), value)
def pytest_namespace():
"""Add attributes to pytest in all tests."""
return {'placeholders': placeholders}
| bsd-2-clause | Python |
2ad088102d5022027bcbd507ed784b3cc2d9fab6 | bump version to 0.9.4 | deadscivey/mockthink,scivey/mockthink | mockthink/version.py | mockthink/version.py | VERSION = '0.9.4'
| VERSION = '0.9.3'
| mit | Python |
ce92d99fd87e5935de954af75ae39481d04711ea | Change morse simulation environment to demo_wildfire | fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop | morse_sim/default.py | morse_sim/default.py | #! /usr/bin/env morseexec
""" Basic MORSE simulation scene for <morse_sim> environment
Feel free to edit this template as you like!
"""
import os
import os.path
import numpy as np
os.chdir(os.path.dirname(__file__))
from morse.builder import *
from morse_sim.builder.actuators.absolute_teleport import AbsoluteTeleport
# Add the MORSE mascott, MORSY.
# Out-the-box available robots are listed here:
# http://www.openrobots.org/morse/doc/stable/components_library.html
#
# 'morse add robot <name> morse_sim' can help you to build custom robots.
drone = RMax("drone")
# The list of the main methods to manipulate your components
# is here: http://www.openrobots.org/morse/doc/stable/user/builder_overview.html
drone.translate(0, 0, 3000)
drone.rotate(0.0, 0.0, 0)
# Add a motion controller
# Check here the other available actuators:
# http://www.openrobots.org/morse/doc/stable/components_library.html#actuators
#
# 'morse add actuator <name> morse_sim' can help you with the creation of a custom
# actuator.
motion = AbsoluteTeleport("teleport")
motion.add_service('socket')
drone.append(motion)
# Add a keyboard controller to move the robot with arrow keys.
keyboard = Keyboard()
drone.append(keyboard)
keyboard.properties(ControlType='Position')
keyboard.properties(Speed=25)
# Add a pose sensor that exports the current location and orientation
# of the robot in the world frame
# Check here the other available actuators:
# http://www.openrobots.org/morse/doc/stable/components_library.html#sensors
#
# 'morse add sensor <name> morse_sim' can help you with the creation of a custom
# sensor.
pose = Pose()
drone.append(pose)
# Append a camera
ircam = VideoCamera("ircam")
ircam.properties(cam_width=640, cam_height=480,
cam_far=3000, cam_fov=44)
ircam.frequency(0)
ircam._n = 0
ircam.add_service('socket')
ircam.add_stream('socket', 'morse.middleware.sockets.video_camera.Video8uPublisher')
drone.append(ircam)
# add pose sensor for the camera
ircam_pose = Pose("ircam_pose")
ircam_pose.frequency(1)
ircam_pose.add_service('socket')
drone.append(ircam_pose)
ircam.rotate(0, -np.pi/2, 0)
ircam_pose.rotate(np.pi, 0, -np.pi/2)
# To ease development and debugging, we add a socket interface to our robot.
#
# Check here: http://www.openrobots.org/morse/doc/stable/user/integration.html
# the other available interfaces (like ROS, YARP...)
drone.add_default_interface('socket')
# set 'fastmode' to True to switch to wireframe mode
#env = Environment('land-1/trees', fastmode=False)
workdir = os.getcwd()
env = Environment(os.path.join(workdir, 'environment', 'demo_wildfire.blend'), fastmode=False)
env.set_camera_location([0, 0, 100])
env.set_camera_rotation([0, 0, -np.pi])
| #! /usr/bin/env morseexec
""" Basic MORSE simulation scene for <morse_sim> environment
Feel free to edit this template as you like!
"""
import os
import os.path
import numpy as np
os.chdir(os.path.dirname(__file__))
from morse.builder import *
from morse_sim.builder.actuators.absolute_teleport import AbsoluteTeleport
# Add the MORSE mascott, MORSY.
# Out-the-box available robots are listed here:
# http://www.openrobots.org/morse/doc/stable/components_library.html
#
# 'morse add robot <name> morse_sim' can help you to build custom robots.
drone = RMax("drone")
# The list of the main methods to manipulate your components
# is here: http://www.openrobots.org/morse/doc/stable/user/builder_overview.html
drone.translate(0, 0, 3000)
drone.rotate(0.0, 0.0, 0)
# Add a motion controller
# Check here the other available actuators:
# http://www.openrobots.org/morse/doc/stable/components_library.html#actuators
#
# 'morse add actuator <name> morse_sim' can help you with the creation of a custom
# actuator.
motion = AbsoluteTeleport("teleport")
motion.add_service('socket')
drone.append(motion)
# Add a keyboard controller to move the robot with arrow keys.
keyboard = Keyboard()
drone.append(keyboard)
keyboard.properties(ControlType='Position')
keyboard.properties(Speed=25)
# Add a pose sensor that exports the current location and orientation
# of the robot in the world frame
# Check here the other available actuators:
# http://www.openrobots.org/morse/doc/stable/components_library.html#sensors
#
# 'morse add sensor <name> morse_sim' can help you with the creation of a custom
# sensor.
pose = Pose()
drone.append(pose)
# Append a camera
ircam = VideoCamera("ircam")
ircam.properties(cam_width=640, cam_height=480,
cam_far=3000, cam_fov=44)
ircam.frequency(0)
ircam._n = 0
ircam.add_service('socket')
ircam.add_stream('socket', 'morse.middleware.sockets.video_camera.Video8uPublisher')
drone.append(ircam)
# add pose sensor for the camera
ircam_pose = Pose("ircam_pose")
ircam_pose.frequency(1)
ircam_pose.add_service('socket')
drone.append(ircam_pose)
ircam.rotate(0, -np.pi/2, 0)
ircam_pose.rotate(np.pi, 0, -np.pi/2)
# To ease development and debugging, we add a socket interface to our robot.
#
# Check here: http://www.openrobots.org/morse/doc/stable/user/integration.html
# the other available interfaces (like ROS, YARP...)
drone.add_default_interface('socket')
# set 'fastmode' to True to switch to wireframe mode
#env = Environment('land-1/trees', fastmode=False)
workdir = os.getcwd()
env = Environment(os.path.join(workdir, 'environment', 'terrain_fire.blend'), fastmode=False)
env.set_camera_location([0, 0, 100])
env.set_camera_rotation([0, 0, -np.pi])
| bsd-2-clause | Python |
a3392a89b71e262783caaf4acdddbb2577103cee | Improve list displays in admin interface. | ast0815/mqtt-hub,ast0815/mqtt-hub | mqtt_logger/admin.py | mqtt_logger/admin.py | from django.contrib import admin
from models import *
class MessageAdmin(admin.ModelAdmin):
readonly_fields = ('subscription', 'time_recorded', 'topic', 'payload')
date_hierarchy = 'time_recorded'
list_display = ('time_recorded', 'subscription', 'topic', 'payload')
list_display_links = ('payload',)
list_filter = ('subscription', 'time_recorded', 'topic')
ordering = ('-time_recorded',)
admin.site.register(MQTTMessage, MessageAdmin)
class SubscriptionAdmin(admin.ModelAdmin):
list_display = ('server', 'topic', 'active')
list_editable = ('active',)
list_filter = ('server', 'active')
ordering = ('server', 'topic')
admin.site.register(MQTTSubscription, SubscriptionAdmin)
| from django.contrib import admin
from models import *
class MessageAdmin(admin.ModelAdmin):
readonly_fields = ['subscription', 'time_recorded', 'topic', 'payload']
admin.site.register(MQTTMessage, MessageAdmin)
admin.site.register(MQTTSubscription)
| mit | Python |
8b939af8909d424b2a6b73194b257724ecc8c349 | add more tests | jlaine/django-coconuts,jlaine/django-coconuts,jlaine/django-coconuts | coconuts/tests/test_exif.py | coconuts/tests/test_exif.py | # -*- coding: utf-8 -*-
#
# django-coconuts
# Copyright (c) 2008-2013, Jeremy Lainé
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from coconuts.views import format_rational
from coconuts.tests import BaseTest
class ExifOldPilTest(BaseTest):
fixtures = ['test_users.json']
def test_canon(self):
"""
IMG_8232.JPG
"""
# fnumber
self.assertEqual(format_rational((4, 1)), '4')
# exposure time
self.assertEqual(format_rational((1, 80)), '1/80')
def test_fujifilm(self):
"""
DSCF1900.JPG
"""
# fnumber
self.assertEqual(format_rational((560, 100)), '5.6')
# exposure time
self.assertEqual(format_rational((10, 1400)), '1/140')
class ExifNewPilTest(BaseTest):
fixtures = ['test_users.json']
def test_canon(self):
# fnumber
self.assertEqual(format_rational((4.0,)), '4.0')
# FIXME: exposure time!
self.assertEqual(format_rational((0.0125,)), '0.0')
def test_fujifilm(self):
"""
DSCF1900.JPG
"""
# fnumber
self.assertEqual(format_rational((5.6,)), '5.6')
# FIXME: exposure time!
self.assertEqual(format_rational((0.007142857142857143,)), '0.0')
| # -*- coding: utf-8 -*-
#
# django-coconuts
# Copyright (c) 2008-2013, Jeremy Lainé
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from coconuts.views import format_rational
from coconuts.tests import BaseTest
class ExifTest(BaseTest):
fixtures = ['test_users.json']
def test_old_pil(self):
# fnumber
self.assertEqual(format_rational((560, 100)), '5.6')
# exposure time
self.assertEqual(format_rational((10, 1400)), '1/140')
def test_new_pil(self):
# fnumber
self.assertEqual(format_rational((5.6,)), '5.6')
# FIXME: exposure time!
self.assertEqual(format_rational((0.007142857142857143,)), '0.0')
| bsd-2-clause | Python |
7d1dabd9a7120e0a24c54aa9c1f7ab9c13683dfa | Remove unused Keyword lexeme | pdarragh/Viper | viper/lexer/lexemes.py | viper/lexer/lexemes.py | INDENT_SIZE = 4
class Lexeme:
def __init__(self, text, repl_with_text=True):
self.text = text
self._repl_with_text = repl_with_text
def __repr__(self):
if self._repl_with_text:
return f'{type(self).__name__}({self.text})'
else:
return type(self).__name__
def __str__(self):
return self.text
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return self.text == other.text
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.text)
class Indent(Lexeme):
def __init__(self):
super().__init__(' ' * INDENT_SIZE, False)
class Dedent(Lexeme):
def __init__(self):
super().__init__('', False)
class NewLine(Lexeme):
def __init__(self):
super().__init__('\n', False)
class Period(Lexeme):
def __init__(self):
super().__init__('.', False)
class Comma(Lexeme):
def __init__(self):
super().__init__(',', False)
class OpenParen(Lexeme):
def __init__(self):
super().__init__('(', False)
class CloseParen(Lexeme):
def __init__(self):
super().__init__(')', False)
class Colon(Lexeme):
def __init__(self):
super().__init__(':', False)
class Arrow(Lexeme):
def __init__(self):
super().__init__('->', False)
class Number(Lexeme):
pass
class Name(Lexeme):
pass
class Class(Lexeme):
pass
class Operator(Lexeme):
pass
INDENT = Indent()
DEDENT = Dedent()
NEWLINE = NewLine()
PERIOD = Period()
COMMA = Comma()
OPEN_PAREN = OpenParen()
CLOSE_PAREN = CloseParen()
COLON = Colon()
ARROW = Arrow()
| INDENT_SIZE = 4
class Lexeme:
def __init__(self, text, repl_with_text=True):
self.text = text
self._repl_with_text = repl_with_text
def __repr__(self):
if self._repl_with_text:
return f'{type(self).__name__}({self.text})'
else:
return type(self).__name__
def __str__(self):
return self.text
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
return self.text == other.text
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.text)
class Indent(Lexeme):
def __init__(self):
super().__init__(' ' * INDENT_SIZE, False)
class Dedent(Lexeme):
def __init__(self):
super().__init__('', False)
class NewLine(Lexeme):
def __init__(self):
super().__init__('\n', False)
class Period(Lexeme):
def __init__(self):
super().__init__('.', False)
class Comma(Lexeme):
def __init__(self):
super().__init__(',', False)
class OpenParen(Lexeme):
def __init__(self):
super().__init__('(', False)
class CloseParen(Lexeme):
def __init__(self):
super().__init__(')', False)
class Colon(Lexeme):
def __init__(self):
super().__init__(':', False)
class Arrow(Lexeme):
def __init__(self):
super().__init__('->', False)
class Number(Lexeme):
pass
class Keyword(Lexeme):
pass
class Name(Lexeme):
pass
class Class(Lexeme):
pass
class Operator(Lexeme):
pass
INDENT = Indent()
DEDENT = Dedent()
NEWLINE = NewLine()
PERIOD = Period()
COMMA = Comma()
OPEN_PAREN = OpenParen()
CLOSE_PAREN = CloseParen()
COLON = Colon()
ARROW = Arrow()
| apache-2.0 | Python |
35aa5f3928a6d1b60e2412f0d075f7dfc4738b04 | Bump version because I can! | RealDolos/volaupload | volaupload/_version.py | volaupload/_version.py | """
Version information for volaupload
"""
__version__ = "0.9.4"
| """
Version information for volaupload
"""
__version__ = "0.9.3"
| mit | Python |
6b32ddf59db6b2bb1865d37e9e41c9b2308a7e69 | add version info | benoitc/offset | offset/__init__.py | offset/__init__.py | # -*- coding: utf-8 -
#
# This file is part of offset. See the NOTICE for more information.
version_info = (0, 1, 0)
__version__ = ".".join([str(v) for v in version_info])
# scheduler functions
from .core import go, run, gosched, maintask
# channel functions
from .core.chan import makechan, select, default
# exceptions
from .core.exc import PanicError
| # -*- coding: utf-8 -
#
# This file is part of offset. See the NOTICE for more information.
# scheduler functions
from .core import go, run, gosched, maintask
# channel functions
from .core.chan import makechan, select, default
# exceptions
from .core.exc import PanicError
| mit | Python |
197fbb2793ccba85a00711d39fcd3af4fe4bc66e | Correct precipitation probability | ericfourrier/raspberry-scripts,ericfourrier/raspberry-scripts | weather_pred/main.py | weather_pred/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-u
"""
Purpose : Get weather predition using https://developer.forecast.io/ and
the pip install python-forecastio python wrapper
Requirements
------------
* pip install python-forecastio
"""
import os
import forecastio
import datetime
import time
import RPi.GPIO as GPIO
import logging
import pprint
logger = logging.getLogger() # 'root' Logger
console = logging.StreamHandler() # logging to console
csv_handler = logging.FileHandler('logs.csv')
# csv
template_log = '%(asctime)s,%(levelname)s,%(processName)s,%(filename)s,%(lineno)s,%(message)s'
console.setFormatter(logging.Formatter(template_log))
csv_handler.setFormatter(logging.Formatter(template_log))
logger.addHandler(console) # prints to console.
logger.addHandler(csv_handler) # save to csv gile
logger.setLevel(logging.INFO) # DEBUG or above
api_key = os.environ.get("FORECAST_API_KEY")
paris_lat = 48.8534100
paris_lng = 2.3488000
API_RATE_LIMIT = 1000 # 1000 calls a day
# Be careful with timezone here
# current_time = datetime.datetime.now()
# print(current_time)
#
# forecast = forecastio.load_forecast(api_key, paris_lat, paris_lng, time=current_time)
#
# pred_now = forecast.currently().d
def get_weather_now(api_key=api_key, lat=paris_lat, long=paris_lng, verbose=True):
""" Returns the weather now for a certain latitude and longitude """
current_time = datetime.datetime.now()
forecast = forecastio.load_forecast(
api_key, paris_lat, paris_lng, time=current_time)
nb_remaining_calls = API_RATE_LIMIT - \
int(forecast.response.headers['x-forecast-api-calls'])
logger.info("Pulling data from the api forecast.io, remaining calls : {}".format(
nb_remaining_calls))
pred = forecast.currently().d
if verbose:
logger.info(pprint.pformat(pred))
return pred
# Set up GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
while True:
pred = get_weather_now()
# light up red light if it is raining
if pred['precipProbability'] > 0.7:
logger.info("It is raining ...")
GPIO.output(18, True)
else:
GPIO.output(18, False)
time.sleep(60 * 10)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-u
"""
Purpose : Get weather predition using https://developer.forecast.io/ and
the pip install python-forecastio python wrapper
Requirements
------------
* pip install python-forecastio
"""
import os
import forecastio
import datetime
import time
import RPi.GPIO as GPIO
import logging
import pprint
logger = logging.getLogger() # 'root' Logger
console = logging.StreamHandler() # logging to console
csv_handler = logging.FileHandler('logs.csv')
# csv
template_log = '%(asctime)s,%(levelname)s,%(processName)s,%(filename)s,%(lineno)s,%(message)s'
console.setFormatter(logging.Formatter(template_log))
csv_handler.setFormatter(logging.Formatter(template_log))
logger.addHandler(console) # prints to console.
logger.addHandler(csv_handler) # save to csv gile
logger.setLevel(logging.INFO) # DEBUG or above
api_key = os.environ.get("FORECAST_API_KEY")
paris_lat = 48.8534100
paris_lng = 2.3488000
API_RATE_LIMIT = 1000 # 1000 calls a day
# Be careful with timezone here
# current_time = datetime.datetime.now()
# print(current_time)
#
# forecast = forecastio.load_forecast(api_key, paris_lat, paris_lng, time=current_time)
#
# pred_now = forecast.currently().d
def get_weather_now(api_key=api_key, lat=paris_lat, long=paris_lng, verbose=True):
""" Returns the weather now for a certain latitude and longitude """
current_time = datetime.datetime.now()
forecast = forecastio.load_forecast(
api_key, paris_lat, paris_lng, time=current_time)
nb_remaining_calls = API_RATE_LIMIT - \
int(forecast.response.headers['x-forecast-api-calls'])
logger.info("Pulling data from the api forecast.io, remaining calls : {}".format(
nb_remaining_calls))
pred = forecast.currently().d
if verbose:
logger.info(pprint.pformat(pred))
return pred
# Set up GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
while True:
pred = get_weather_now()
# light up red light if it is raining
if pred['precipProbability'] > 70:
logger.info("It is raining ...")
GPIO.output(18, True)
else:
GPIO.output(18, False)
time.sleep(60 * 10)
| mit | Python |
242acb878fbfad2b70f9c64b360bdc8db7a9e340 | Clean up setup/teardown | tpflueger/CSCI4900 | tests/test_dependency_node.py | tests/test_dependency_node.py | from scripts import dependency_node
from tempfile import mkdtemp
import shutil
import os
import glob
class TestDependencyNode:
def setup_class(self):
self.node = dependency_node.DependencyNode('org.joda', 'joda-money', '0.11', '1')
os.chdir(os.path.abspath('./tests'))
self.tempDirectoryPath = mkdtemp(dir=".")
def teardown_class(self):
os.chdir(os.pardir)
shutil.rmtree(self.tempDirectoryPath)
def test_should_create_dependency_node(self):
assert 'org.joda' in self.node.groupId
assert 'joda-money' in self.node.artifactId
assert '0.11' in self.node.version
assert '1' in self.node.referenceId
def test_should_get_file(self):
self.node.get('jar', self.tempDirectoryPath)
os.chdir(self.tempDirectoryPath)
filePath = glob.glob('*.jar')[0]
assert filePath != None
| from scripts import dependency_node
from tempfile import mkdtemp
import shutil
import os
import glob
class TestDependencyNode:
def setup(self):
self.node = dependency_node.DependencyNode('org.joda', 'joda-money', '0.11', '1')
os.chdir(os.path.abspath('./tests'))
def teardown(self):
os.chdir(os.pardir)
def test_should_create_dependency_node(self):
assert 'org.joda' in self.node.groupId
assert 'joda-money' in self.node.artifactId
assert '0.11' in self.node.version
assert '1' in self.node.referenceId
def test_should_get_file(self):
self.tempDirectoryPath = mkdtemp(dir=".")
self.node.get('jar', self.tempDirectoryPath)
os.chdir(self.tempDirectoryPath)
filePath = glob.glob('*.jar')[0]
os.chdir(os.pardir)
shutil.rmtree(self.tempDirectoryPath)
assert filePath != None
| mit | Python |
e45971c740b8b723901b9c56bb56cb84f581834c | fix import. Dbref isn't in pymongo package anymore | goanpeca/mongokit,aquavitae/mongokit-py3,namlook/mongokit,wshcdr/mongokit | mongokit/__init__.py | mongokit/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2010, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = "0.5.13-dev"
from bson.dbref import DBRef
from cursor import Cursor
from operators import *
from schema_document import *
from generators import MongoDocumentCursor
from mongo_exceptions import *
from document import Document, ObjectId
from versioned_document import VersionedDocument
from database import Database
from collection import Collection
from connection import Connection
from master_slave_connection import MasterSlaveConnection
from pymongo import ASCENDING as INDEX_ASCENDING, DESCENDING as INDEX_DESCENDING
from migration import DocumentMigration
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2010, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__version__ = "0.5.13-dev"
from pymongo.dbref import DBRef
from cursor import Cursor
from operators import *
from schema_document import *
from generators import MongoDocumentCursor
from mongo_exceptions import *
from document import Document, ObjectId
from versioned_document import VersionedDocument
from database import Database
from collection import Collection
from connection import Connection
from master_slave_connection import MasterSlaveConnection
from pymongo import ASCENDING as INDEX_ASCENDING, DESCENDING as INDEX_DESCENDING
from migration import DocumentMigration
| bsd-3-clause | Python |
e87d68fb0f7094c38dc931c9062f13015a725980 | normalize the PK values | pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity | localtv/search_indexes.py | localtv/search_indexes.py | from haystack import indexes
from haystack import site
from localtv.models import Video, VIDEO_STATUS_ACTIVE
class VideoIndex(indexes.SearchIndex):
text = indexes.CharField(document=True, use_template=True)
feed = indexes.IntegerField(model_attr='feed__pk', null=True)
search = indexes.IntegerField(model_attr='search__pk', null=True)
user = indexes.IntegerField(model_attr='user__pk', null=True)
tags = indexes.MultiValueField()
categories = indexes.MultiValueField()
authors = indexes.MultiValueField()
playlists = indexes.MultiValueField()
def get_queryset(self):
"""
Custom queryset to only search approved videos.
"""
return Video.objects.filter(status=VIDEO_STATUS_ACTIVE)
def get_updated_field(self):
return 'when_modified'
def _prepare_field(self, video, field, attr='pk', normalize=int):
return [normalize(getattr(rel, attr))
for rel in getattr(video, field).all()]
def prepare_tags(self, video):
return self._prepare_field(video, 'tags', 'name', unicode)
def prepare_categories(self, video):
return self._prepare_field(video, 'categories')
def prepare_authors(self, video):
return self._prepare_field(video, 'authors')
def prepare_playlists(self, video):
return self._prepare_field(video, 'playlists')
site.register(Video, VideoIndex)
| from haystack import indexes
from haystack import site
from localtv.models import Video, VIDEO_STATUS_ACTIVE
class VideoIndex(indexes.SearchIndex):
text = indexes.CharField(document=True, use_template=True)
feed = indexes.IntegerField(model_attr='feed__pk', null=True)
search = indexes.IntegerField(model_attr='search__pk', null=True)
user = indexes.IntegerField(model_attr='user__pk', null=True)
tags = indexes.MultiValueField()
categories = indexes.MultiValueField()
authors = indexes.MultiValueField()
playlists = indexes.MultiValueField()
def get_queryset(self):
"""
Custom queryset to only search approved videos.
"""
return Video.objects.filter(status=VIDEO_STATUS_ACTIVE)
def get_updated_field(self):
return 'when_modified'
def _prepare_field(self, video, field, attr='pk'):
return [getattr(rel, attr) for rel in getattr(video, field).all()]
def prepare_tags(self, video):
return self._prepare_field(video, 'tags', 'name')
def prepare_categories(self, video):
return self._prepare_field(video, 'categories')
def prepare_authors(self, video):
return self._prepare_field(video, 'authors')
def prepare_playlists(self, video):
return self._prepare_field(video, 'playlists')
site.register(Video, VideoIndex)
| agpl-3.0 | Python |
1580b8dcc4413b43ff6263d3ac4581196b887f27 | Add custom action to re-send confirmation emails | gdetrez/fscons-ticketshop,gdetrez/fscons-ticketshop,gdetrez/fscons-ticketshop | ticketshop/ticketapp/admin.py | ticketshop/ticketapp/admin.py | from django.contrib import admin
from django.contrib import messages
from .models import TicketPurchase, Ticket, TicketType, Coupon
from .mails import send_confirmation_email
# ~~~ Ticket purchase ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class TicketInline(admin.TabularInline):
"""
This is used to displayed the purchased ticket inline in the
page showing the details of a single purchase
"""
model = Ticket
extra = 1
def mark_selected_paid(modeladmin, request, queryset):
"""
Custom action that marks a bunch of tickets as paid
using the mark_as_paid function that takes care of
sending the cenfirmation email
"""
for purchase in queryset:
purchase.mark_as_paid()
mark_selected_paid.short_description = "Mark selected as paid"
def resend_confirmation(modeladmin, request, queryset):
"""
Custom admin action that re-send a confirmation email for the selected
purchases. Note that the purchases need to be marked as paid otherwise
the action will silently skip them
"""
for purchase in queryset:
if purchase.paid:
send_confirmation_email(purchase)
else:
messages.warning(request, "Skiped unpaid purchase: %s. Use \"Mark selected as paid instead\"" % purchase)
resend_confirmation.short_description = "Re-send confirmation email"
class TicketPurchaseAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'number_of_tickets', 'price', 'invoice_id', 'paid')
date_hierarchy = 'creation_date'
inlines = [TicketInline]
list_filter = ['paid','coupon', 'ticket__ticket_type']
search_fields = ['name', 'email', 'additional_information', 'invoice_id']
actions = [mark_selected_paid, resend_confirmation]
admin.site.register(TicketPurchase, TicketPurchaseAdmin)
admin.site.register(TicketType)
class CouponAdmin(admin.ModelAdmin):
list_display = ('code', 'percentage', 'active')
admin.site.register(Coupon, CouponAdmin)
| from django.contrib import admin
from .models import TicketPurchase, Ticket, TicketType, Coupon
# ~~~ Ticket purchase ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class TicketInline(admin.TabularInline):
"""
This is used to displayed the purchased ticket inline in the
page showing the details of a single purchase
"""
model = Ticket
extra = 1
def mark_selected_paid(modeladmin, request, queryset):
"""
Custom action that marks a bunch of tickets as paid
using the mark_as_paid function that takes care of
sending the cenfirmation email
"""
for purchase in queryset:
purchase.mark_as_paid()
mark_selected_paid.short_description = "Mark selected as paid"
class TicketPurchaseAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'number_of_tickets', 'price', 'invoice_id', 'paid')
date_hierarchy = 'creation_date'
inlines = [TicketInline]
list_filter = ['paid','coupon', 'ticket__ticket_type']
search_fields = ['name', 'email', 'additional_information', 'invoice_id']
actions = [mark_selected_paid]
admin.site.register(TicketPurchase, TicketPurchaseAdmin)
admin.site.register(TicketType)
class CouponAdmin(admin.ModelAdmin):
list_display = ('code', 'percentage', 'active')
admin.site.register(Coupon, CouponAdmin)
| mit | Python |
42d965e3c021c65c24fbe0ff6972bd678d851a28 | Remove duplicate datetime clause from OPALSerializer | khchine5/opal,khchine5/opal,khchine5/opal | opal/core/views.py | opal/core/views.py | """
Re-usable view components
"""
import functools
import json
import datetime
from django.utils.dateformat import format
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.core.serializers.json import DjangoJSONEncoder
from rest_framework import mixins, viewsets
from django.conf import settings
class LoginRequiredMixin(object):
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
class OpalSerializer(DjangoJSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return format(o, settings.DATETIME_FORMAT)
elif isinstance(o, datetime.date):
return format(
datetime.datetime.combine(
o, datetime.datetime.min.time()
), settings.DATE_FORMAT
)
super(OpalSerializer, self).default(o)
def _get_request_data(request):
data = request.read()
return json.loads(data)
def _build_json_response(data, status_code=200):
response = HttpResponse()
response['Content-Type'] = 'application/json'
response.content = json.dumps(data, cls=OpalSerializer)
# response.content = '<html><body>'+json.dumps(data, cls=DjangoJSONEncoder)+'</body></html>'
response.status_code = status_code
return response
def with_no_caching(view):
@functools.wraps(view)
def no_cache(*args, **kw):
response = view(*args, **kw)
response['Cache-Control'] = 'no-cache'
response['Pragma'] = 'no-cache'
response['Expires'] = '-1'
return response
return no_cache
class ModelViewSet(
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
pass
| """
Re-usable view components
"""
import functools
import json
import datetime
from django.utils.dateformat import format
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.core.serializers.json import DjangoJSONEncoder
from rest_framework import mixins, viewsets
from django.conf import settings
class LoginRequiredMixin(object):
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
class OpalSerializer(DjangoJSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return format(o, settings.DATETIME_FORMAT)
elif isinstance(o, datetime.date):
return format(
datetime.datetime.combine(
o, datetime.datetime.min.time()
), settings.DATE_FORMAT
)
elif isinstance(o, datetime.datetime):
return format(
datetime.datetime.combine(
o, datetime.datetime.min.time()
), settings.DATETIME_FORMAT
)
super(OpalSerializer, self).default(o)
def _get_request_data(request):
data = request.read()
return json.loads(data)
def _build_json_response(data, status_code=200):
response = HttpResponse()
response['Content-Type'] = 'application/json'
response.content = json.dumps(data, cls=OpalSerializer)
# response.content = '<html><body>'+json.dumps(data, cls=DjangoJSONEncoder)+'</body></html>'
response.status_code = status_code
return response
def with_no_caching(view):
@functools.wraps(view)
def no_cache(*args, **kw):
response = view(*args, **kw)
response['Cache-Control'] = 'no-cache'
response['Pragma'] = 'no-cache'
response['Expires'] = '-1'
return response
return no_cache
class ModelViewSet(
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
pass
| agpl-3.0 | Python |
d8e3c6b02d7fca6f3a1851fe6ae13428dac97e31 | use READ COMMITTED isolation level | opmuse/opmuse,opmuse/opmuse,opmuse/opmuse,opmuse/opmuse | opmuse/database.py | opmuse/database.py | import cherrypy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
Base = declarative_base()
def get_session():
url = cherrypy.config['opmuse']['database.url']
return sessionmaker(bind=create_engine(url))()
class SqlAlchemyPlugin(cherrypy.process.plugins.SimplePlugin):
def __init__(self, bus):
cherrypy.process.plugins.SimplePlugin.__init__(self, bus)
self.engine = None
self.bus.subscribe("bind", self.bind)
def start(self):
url = cherrypy.config['opmuse']['database.url']
echo = cherrypy.config['opmuse']['database.echo']
self.engine = create_engine(url, echo=echo,
isolation_level="READ COMMITTED")
Base.metadata.create_all(self.engine)
# TODO use decorator?
start.priority = 10
def bind(self, session):
session.configure(bind=self.engine)
def stop(self):
self.engine.dispose()
self.engine = None
class SqlAlchemyTool(cherrypy.Tool):
def __init__(self):
cherrypy.Tool.__init__(self, 'on_start_resource',
self.bind_session, priority=20)
self.session = scoped_session(sessionmaker(autoflush=True,
autocommit=False))
def _setup(self):
cherrypy.Tool._setup(self)
cherrypy.request.hooks.attach('on_end_request',
self.commit_transaction,
priority=80)
def commit_transaction(self):
cherrypy.request.database = None
try:
self.session.commit()
except:
self.session.rollback()
raise
finally:
self.session.remove()
def bind_session(self):
cherrypy.engine.publish('bind', self.session)
cherrypy.request.database = self.session
| import cherrypy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
Base = declarative_base()
def get_session():
url = cherrypy.config['opmuse']['database.url']
return sessionmaker(bind=create_engine(url))()
class SqlAlchemyPlugin(cherrypy.process.plugins.SimplePlugin):
def __init__(self, bus):
cherrypy.process.plugins.SimplePlugin.__init__(self, bus)
self.engine = None
self.bus.subscribe("bind", self.bind)
def start(self):
url = cherrypy.config['opmuse']['database.url']
echo = cherrypy.config['opmuse']['database.echo']
self.engine = create_engine(url, echo=echo)
Base.metadata.create_all(self.engine)
# TODO use decorator?
start.priority = 10
def bind(self, session):
session.configure(bind=self.engine)
def stop(self):
self.engine.dispose()
self.engine = None
class SqlAlchemyTool(cherrypy.Tool):
def __init__(self):
cherrypy.Tool.__init__(self, 'on_start_resource',
self.bind_session, priority=20)
self.session = scoped_session(sessionmaker(autoflush=True,
autocommit=False))
def _setup(self):
cherrypy.Tool._setup(self)
cherrypy.request.hooks.attach('on_end_request',
self.commit_transaction,
priority=80)
def commit_transaction(self):
cherrypy.request.database = None
try:
self.session.commit()
except:
self.session.rollback()
raise
finally:
self.session.remove()
def bind_session(self):
cherrypy.engine.publish('bind', self.session)
cherrypy.request.database = self.session
| agpl-3.0 | Python |
f2241e27a77f7bdae908a4a5db2766389850fdf9 | bump to 0.12.1 | Hanaasagi/sorator | orator/__init__.py | orator/__init__.py | # -*- coding: utf-8 -*-
__version__ = '0.12.1'
from .orm import Model, SoftDeletes, Collection, accessor, mutator, scope # noqa
from .database_manager import DatabaseManager # noqa
from .query.expression import QueryExpression # noqa
from .schema import Schema # noqa
from .pagination import Paginator, LengthAwarePaginator # noqa
| # -*- coding: utf-8 -*-
__version__ = '0.12.0'
from .orm import Model, SoftDeletes, Collection, accessor, mutator, scope # noqa
from .database_manager import DatabaseManager # noqa
from .query.expression import QueryExpression # noqa
from .schema import Schema # noqa
from .pagination import Paginator, LengthAwarePaginator # noqa
| mit | Python |
8376dd781118681bfa30f6cdf1e6d7906d5c3abb | change invocation to drop users_list | royrapoport/destalinator,randsleadershipslack/destalinator,randsleadershipslack/destalinator,royrapoport/destalinator | tests/test_announcer.py | tests/test_announcer.py | import os
import time
import unittest
import mock
import announcer
from tests.test_destalinator import MockValidator
import tests.fixtures as fixtures
import tests.mocks as mocks
class AnnouncerAnnounceTest(unittest.TestCase):
def setUp(self):
slacker_obj = mocks.mocked_slacker_object(channels_list=fixtures.channels)
self.slackbot = mocks.mocked_slackbot_object()
with mock.patch.dict(os.environ, {'DESTALINATOR_ACTIVATED': 'true'}):
self.announcer = announcer.Announcer(slacker_injected=slacker_obj, slackbot_injected=self.slackbot)
def test_announce_posts_to_announce_channel(self):
def channel_message_test(channel):
"""Ensure that an announce message contains the name of the channel being announced."""
return lambda message: channel['name'] in message
self.announcer.announce()
for channel in fixtures.channels:
if channel['created'] > int(time.time()) - 86400:
self.assertIn(
mock.call(self.announcer.config.announce_channel, MockValidator(channel_message_test(channel))),
self.slackbot.say.mock_calls
)
| import os
import time
import unittest
import mock
import announcer
from tests.test_destalinator import MockValidator
import tests.fixtures as fixtures
import tests.mocks as mocks
class AnnouncerAnnounceTest(unittest.TestCase):
def setUp(self):
slacker_obj = mocks.mocked_slacker_object(channels_list=fixtures.channels, users_list=fixtures.users)
self.slackbot = mocks.mocked_slackbot_object()
with mock.patch.dict(os.environ, {'DESTALINATOR_ACTIVATED': 'true'}):
self.announcer = announcer.Announcer(slacker_injected=slacker_obj, slackbot_injected=self.slackbot)
def test_announce_posts_to_announce_channel(self):
def channel_message_test(channel):
"""Ensure that an announce message contains the name of the channel being announced."""
return lambda message: channel['name'] in message
self.announcer.announce()
for channel in fixtures.channels:
if channel['created'] > int(time.time()) - 86400:
self.assertIn(
mock.call(self.announcer.config.announce_channel, MockValidator(channel_message_test(channel))),
self.slackbot.say.mock_calls
)
| apache-2.0 | Python |
0f68dc6b7b35aa22e08517a80cf5f4b672f88eca | Add tests for version upgrade | nevercast/home-assistant,jabesq/home-assistant,hmronline/home-assistant,Julian/home-assistant,tboyce1/home-assistant,caiuspb/home-assistant,robbiet480/home-assistant,titilambert/home-assistant,jnewland/home-assistant,ct-23/home-assistant,aoakeson/home-assistant,tboyce1/home-assistant,open-homeautomation/home-assistant,open-homeautomation/home-assistant,jaharkes/home-assistant,sdague/home-assistant,florianholzapfel/home-assistant,JshWright/home-assistant,ewandor/home-assistant,kennedyshead/home-assistant,home-assistant/home-assistant,mikaelboman/home-assistant,LinuxChristian/home-assistant,instantchow/home-assistant,rohitranjan1991/home-assistant,stefan-jonasson/home-assistant,alexmogavero/home-assistant,molobrakos/home-assistant,happyleavesaoc/home-assistant,rohitranjan1991/home-assistant,pschmitt/home-assistant,florianholzapfel/home-assistant,shaftoe/home-assistant,titilambert/home-assistant,auduny/home-assistant,jaharkes/home-assistant,mezz64/home-assistant,adrienbrault/home-assistant,coteyr/home-assistant,Duoxilian/home-assistant,philipbl/home-assistant,instantchow/home-assistant,FreekingDean/home-assistant,emilhetty/home-assistant,caiuspb/home-assistant,bdfoster/blumate,joopert/home-assistant,xifle/home-assistant,nevercast/home-assistant,betrisey/home-assistant,sffjunkie/home-assistant,mikaelboman/home-assistant,eagleamon/home-assistant,MungoRae/home-assistant,ct-23/home-assistant,sffjunkie/home-assistant,Duoxilian/home-assistant,balloob/home-assistant,GenericStudent/home-assistant,justyns/home-assistant,robjohnson189/home-assistant,morphis/home-assistant,balloob/home-assistant,hmronline/home-assistant,sander76/home-assistant,Smart-Torvy/torvy-home-assistant,shaftoe/home-assistant,LinuxChristian/home-assistant,Theb-1/home-assistant,coteyr/home-assistant,miniconfig/home-assistant,mKeRix/home-assistant,PetePriority/home-assistant,Cinntax/home-assistant,HydrelioxGitHub/home-assistant,jaharkes/home-assistant,oandrew/home-assistant,betrisey/home-assistant,kyvinh/home-assistant,Smart-Torvy/torvy-home-assistant,miniconfig/home-assistant,qedi-r/home-assistant,nkgilley/home-assistant,Julian/home-assistant,stefan-jonasson/home-assistant,varunr047/homefile,GenericStudent/home-assistant,DavidLP/home-assistant,srcLurker/home-assistant,nnic/home-assistant,mKeRix/home-assistant,Julian/home-assistant,ma314smith/home-assistant,turbokongen/home-assistant,aequitas/home-assistant,emilhetty/home-assistant,jnewland/home-assistant,tinloaf/home-assistant,aronsky/home-assistant,alexmogavero/home-assistant,Zyell/home-assistant,tboyce021/home-assistant,toddeye/home-assistant,kennedyshead/home-assistant,hexxter/home-assistant,qedi-r/home-assistant,DavidLP/home-assistant,sffjunkie/home-assistant,happyleavesaoc/home-assistant,miniconfig/home-assistant,bdfoster/blumate,nugget/home-assistant,nkgilley/home-assistant,Danielhiversen/home-assistant,tinloaf/home-assistant,alexmogavero/home-assistant,hmronline/home-assistant,devdelay/home-assistant,MungoRae/home-assistant,Zyell/home-assistant,sfam/home-assistant,LinuxChristian/home-assistant,betrisey/home-assistant,adrienbrault/home-assistant,ct-23/home-assistant,srcLurker/home-assistant,persandstrom/home-assistant,badele/home-assistant,aoakeson/home-assistant,Danielhiversen/home-assistant,happyleavesaoc/home-assistant,morphis/home-assistant,shaftoe/home-assistant,philipbl/home-assistant,tboyce021/home-assistant,robbiet480/home-assistant,coteyr/home-assistant,ma314smith/home-assistant,sdague/home-assistant,Cinntax/home-assistant,sfam/home-assistant,jnewland/home-assistant,xifle/home-assistant,srcLurker/home-assistant,sffjunkie/home-assistant,mKeRix/home-assistant,Smart-Torvy/torvy-home-assistant,jabesq/home-assistant,hmronline/home-assistant,sander76/home-assistant,nnic/home-assistant,xifle/home-assistant,varunr047/homefile,LinuxChristian/home-assistant,fbradyirl/home-assistant,badele/home-assistant,Zac-HD/home-assistant,ma314smith/home-assistant,Teagan42/home-assistant,deisi/home-assistant,hmronline/home-assistant,oandrew/home-assistant,devdelay/home-assistant,partofthething/home-assistant,Smart-Torvy/torvy-home-assistant,jawilson/home-assistant,nevercast/home-assistant,robjohnson189/home-assistant,happyleavesaoc/home-assistant,fbradyirl/home-assistant,philipbl/home-assistant,stefan-jonasson/home-assistant,devdelay/home-assistant,ewandor/home-assistant,DavidLP/home-assistant,mikaelboman/home-assistant,keerts/home-assistant,keerts/home-assistant,bdfoster/blumate,ct-23/home-assistant,jawilson/home-assistant,ma314smith/home-assistant,hexxter/home-assistant,jamespcole/home-assistant,LinuxChristian/home-assistant,tboyce1/home-assistant,leoc/home-assistant,auduny/home-assistant,aronsky/home-assistant,tinloaf/home-assistant,morphis/home-assistant,jamespcole/home-assistant,leoc/home-assistant,MungoRae/home-assistant,varunr047/homefile,emilhetty/home-assistant,eagleamon/home-assistant,xifle/home-assistant,Zyell/home-assistant,JshWright/home-assistant,Theb-1/home-assistant,justyns/home-assistant,FreekingDean/home-assistant,jabesq/home-assistant,emilhetty/home-assistant,open-homeautomation/home-assistant,balloob/home-assistant,soldag/home-assistant,MartinHjelmare/home-assistant,MungoRae/home-assistant,robjohnson189/home-assistant,joopert/home-assistant,persandstrom/home-assistant,luxus/home-assistant,varunr047/homefile,auduny/home-assistant,morphis/home-assistant,ct-23/home-assistant,mezz64/home-assistant,alexmogavero/home-assistant,deisi/home-assistant,MartinHjelmare/home-assistant,kyvinh/home-assistant,deisi/home-assistant,JshWright/home-assistant,justyns/home-assistant,instantchow/home-assistant,pschmitt/home-assistant,dmeulen/home-assistant,bdfoster/blumate,postlund/home-assistant,bdfoster/blumate,caiuspb/home-assistant,Zac-HD/home-assistant,oandrew/home-assistant,leoc/home-assistant,varunr047/homefile,florianholzapfel/home-assistant,mKeRix/home-assistant,tchellomello/home-assistant,tchellomello/home-assistant,leoc/home-assistant,Teagan42/home-assistant,srcLurker/home-assistant,sffjunkie/home-assistant,mikaelboman/home-assistant,devdelay/home-assistant,Zac-HD/home-assistant,leppa/home-assistant,stefan-jonasson/home-assistant,PetePriority/home-assistant,molobrakos/home-assistant,ewandor/home-assistant,emilhetty/home-assistant,deisi/home-assistant,w1ll1am23/home-assistant,dmeulen/home-assistant,oandrew/home-assistant,fbradyirl/home-assistant,betrisey/home-assistant,kyvinh/home-assistant,turbokongen/home-assistant,JshWright/home-assistant,MungoRae/home-assistant,eagleamon/home-assistant,PetePriority/home-assistant,open-homeautomation/home-assistant,lukas-hetzenecker/home-assistant,keerts/home-assistant,rohitranjan1991/home-assistant,HydrelioxGitHub/home-assistant,MartinHjelmare/home-assistant,badele/home-assistant,nugget/home-assistant,lukas-hetzenecker/home-assistant,florianholzapfel/home-assistant,soldag/home-assistant,postlund/home-assistant,miniconfig/home-assistant,mikaelboman/home-assistant,Theb-1/home-assistant,aequitas/home-assistant,kyvinh/home-assistant,aequitas/home-assistant,dmeulen/home-assistant,aoakeson/home-assistant,HydrelioxGitHub/home-assistant,molobrakos/home-assistant,Julian/home-assistant,shaftoe/home-assistant,home-assistant/home-assistant,toddeye/home-assistant,Duoxilian/home-assistant,partofthething/home-assistant,w1ll1am23/home-assistant,robjohnson189/home-assistant,Zac-HD/home-assistant,deisi/home-assistant,luxus/home-assistant,tboyce1/home-assistant,hexxter/home-assistant,nnic/home-assistant,leppa/home-assistant,nugget/home-assistant,dmeulen/home-assistant,Duoxilian/home-assistant,luxus/home-assistant,jamespcole/home-assistant,keerts/home-assistant,eagleamon/home-assistant,hexxter/home-assistant,sfam/home-assistant,jaharkes/home-assistant,persandstrom/home-assistant,philipbl/home-assistant | tests/test_bootstrap.py | tests/test_bootstrap.py | """
tests.test_bootstrap
~~~~~~~~~~~~~~~~~~~~
Tests bootstrap.
"""
# pylint: disable=too-many-public-methods,protected-access
import os
import tempfile
import unittest
from unittest import mock
from homeassistant import core, bootstrap
from homeassistant.const import __version__
import homeassistant.util.dt as dt_util
from tests.common import mock_detect_location_info
class TestBootstrap(unittest.TestCase):
""" Test the bootstrap utils. """
def setUp(self):
self.orig_timezone = dt_util.DEFAULT_TIME_ZONE
def tearDown(self):
dt_util.DEFAULT_TIME_ZONE = self.orig_timezone
def test_from_config_file(self):
components = ['browser', 'conversation', 'script']
with tempfile.NamedTemporaryFile() as fp:
for comp in components:
fp.write('{}:\n'.format(comp).encode('utf-8'))
fp.flush()
with mock.patch('homeassistant.util.location.detect_location_info',
mock_detect_location_info):
hass = bootstrap.from_config_file(fp.name)
components.append('group')
self.assertEqual(sorted(components),
sorted(hass.config.components))
def test_remove_lib_on_upgrade(self):
with tempfile.TemporaryDirectory() as config_dir:
version_path = os.path.join(config_dir, '.HA_VERSION')
lib_dir = os.path.join(config_dir, 'lib')
check_file = os.path.join(lib_dir, 'check')
with open(version_path, 'wt') as outp:
outp.write('0.7.0')
os.mkdir(lib_dir)
with open(check_file, 'w'):
pass
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
self.assertTrue(os.path.isfile(check_file))
bootstrap.process_ha_config_upgrade(hass)
self.assertFalse(os.path.isfile(check_file))
def test_not_remove_lib_if_not_upgrade(self):
with tempfile.TemporaryDirectory() as config_dir:
version_path = os.path.join(config_dir, '.HA_VERSION')
lib_dir = os.path.join(config_dir, 'lib')
check_file = os.path.join(lib_dir, 'check')
with open(version_path, 'wt') as outp:
outp.write(__version__)
os.mkdir(lib_dir)
with open(check_file, 'w'):
pass
hass = core.HomeAssistant()
hass.config.config_dir = config_dir
bootstrap.process_ha_config_upgrade(hass)
self.assertTrue(os.path.isfile(check_file))
| """
tests.test_bootstrap
~~~~~~~~~~~~~~~~~~~~
Tests bootstrap.
"""
# pylint: disable=too-many-public-methods,protected-access
import tempfile
import unittest
from unittest import mock
from homeassistant import bootstrap
import homeassistant.util.dt as dt_util
from tests.common import mock_detect_location_info
class TestBootstrap(unittest.TestCase):
""" Test the bootstrap utils. """
def setUp(self):
self.orig_timezone = dt_util.DEFAULT_TIME_ZONE
def tearDown(self):
dt_util.DEFAULT_TIME_ZONE = self.orig_timezone
def test_from_config_file(self):
components = ['browser', 'conversation', 'script']
with tempfile.NamedTemporaryFile() as fp:
for comp in components:
fp.write('{}:\n'.format(comp).encode('utf-8'))
fp.flush()
with mock.patch('homeassistant.util.location.detect_location_info',
mock_detect_location_info):
hass = bootstrap.from_config_file(fp.name)
components.append('group')
self.assertEqual(sorted(components),
sorted(hass.config.components))
| mit | Python |
932edca06dd726d227db6c092b7f733f94d63f60 | Add correct datastore CSV download The URL for the production datastore is added following a fix made in https://github.com/IATI/IATI-Datastore/commit/30177c556ccacdc5fe6a2967b272ecef8aa9dee1 | IATI/IATI-Website-Tests | tests/test_datastore.py | tests/test_datastore.py | import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
requests_to_load = {
'Datastore Homepage': {
'url': 'http://datastore.iatistandard.org/'
},
'Datastore download: csv': {
'url': 'http://datastore.iatistandard.org/api/1/access/activity.csv'
},
'Datastore download: xml': {
'url': 'http://datastore.iatistandard.org/api/1/access/activity.xml'
},
'Datastore download: json': {
'url': 'http://datastore.iatistandard.org/api/1/access/activity.json'
},
}
@pytest.mark.parametrize("target_request", ["Datastore Homepage"])
def test_contains_links(self, target_request):
"""
Test that each page contains links to the defined URLs.
"""
loaded_request = self.loaded_request_from_test_name(target_request)
result = utility.get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
@pytest.mark.parametrize("content_type", ["application/xml", "application/json", "text/csv"])
def test_api_output_filetype(self, content_type):
"""
Test that API calls return data in the expected filetypes.
The test is conducted based upon the data returned in the request
headers["content-type"]. For example, 'application/xml; charset=utf-8'
"""
file_extenstion = content_type.split("/")[1]
loaded_request = self.loaded_request_from_test_name("Datastore download: {}".format(file_extenstion))
result = loaded_request.headers["content-type"]
assert result.startswith(content_type)
| import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
requests_to_load = {
'Datastore Homepage': {
'url': 'http://datastore.iatistandard.org/'
},
'Datastore download: csv': {
'url': 'http://dev.datastore.iatistandard.org/api/1/access/activity.csv'
},
'Datastore download: xml': {
'url': 'http://datastore.iatistandard.org/api/1/access/activity.xml'
},
'Datastore download: json': {
'url': 'http://datastore.iatistandard.org/api/1/access/activity.json'
},
}
@pytest.mark.parametrize("target_request", ["Datastore Homepage"])
def test_contains_links(self, target_request):
"""
Test that each page contains links to the defined URLs.
"""
loaded_request = self.loaded_request_from_test_name(target_request)
result = utility.get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
@pytest.mark.parametrize("content_type", ["application/xml", "application/json", "text/csv"])
def test_api_output_filetype(self, content_type):
"""
Test that API calls return data in the expected filetypes.
The test is conducted based upon the data returned in the request
headers["content-type"]. For example, 'application/xml; charset=utf-8'
"""
file_extenstion = content_type.split("/")[1]
loaded_request = self.loaded_request_from_test_name("Datastore download: {}".format(file_extenstion))
result = loaded_request.headers["content-type"]
assert result.startswith(content_type)
| mit | Python |
75f236f8fd0ba368197da3070002b60233a01d49 | Test routines to the BED writer added | gtamazian/Chromosomer | tests/test_track_bed.py | tests/test_track_bed.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
from chromosomer.track.bed import Writer
from itertools import izip
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
class TestBedWriter(unittest.TestCase):
def setUp(self):
self.__input_file = os.path.join(
'data', 'bed', 'correct.bed'
)
self.__output_file = os.path.join(
'data', 'bed', 'test.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def tearDown(self):
os.unlink(self.__output_file)
def test_write(self):
"""
Check if BED records are written in the correct way.
"""
bed_input = Reader(self.__input_file)
with Writer(self.__output_file) as bed_output:
for record in bed_input.records():
bed_output.write(record)
# check if the lines are identical
with open(self.__input_file) as original_file, \
open(self.__output_file) as written_file:
for x, y in izip(original_file, written_file):
self.assertEqual(x, y)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015 by Gaik Tamazian
# gaik (dot) tamazian (at) gmail (dot) com
import os
import logging
import unittest
from chromosomer.track.bed import BedRecord
from chromosomer.track.bed import Reader
path = os.path.dirname(__file__)
os.chdir(path)
class TestBedReader(unittest.TestCase):
def setUp(self):
self.__correct_file = os.path.join(
'data', 'bed', 'correct.bed'
)
# silence the logging messages
logging.disable(logging.ERROR)
def test_records(self):
"""
Check if the parser reads a file in the BED format in the
correct way.
"""
# test against the correct input file
parser = Reader(self.__correct_file)
for record in parser.records():
self.assertIsInstance(record, BedRecord)
suite = unittest.TestLoader().loadTestsFromTestCase(TestBedReader)
unittest.TextTestRunner(verbosity=2).run(suite)
| mit | Python |
69f45f0f490d854335eed03198754fbb65fc9110 | Improve importation form wording | TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio | td_biblio/forms.py | td_biblio/forms.py | from operator import methodcaller
from django import forms
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
DOI_REGEX = '(10[.][0-9]{4,}(?:[.][0-9]+)*/(?:(?!["&\'<>])\S)+)'
doi_validator = RegexValidator(
DOI_REGEX,
_("One (or more) DOI is not valid"),
'invalid'
)
PMID_REGEX = '^-?\d+\Z'
pmid_validator = RegexValidator(
PMID_REGEX,
_("One (or more) PMID is not valid"),
'invalid'
)
def text_to_list(raw):
"""Transform a raw text list to a python object list
Supported separators: coma, space and carriage return
"""
return list(set(
id.strip()
for r in map(methodcaller('split', ','), raw.split())
for id in r
if len(id)
))
class EntryBatchImportForm(forms.Form):
pmids = forms.CharField(
label=_("PMID"),
widget=forms.Textarea(
attrs={
'placeholder': "ex: 26588162, 19569182"
}
),
help_text=_(
"Paste a list of PubMed Identifiers "
"(comma separated or one per line)"
),
required=False,
)
dois = forms.CharField(
label=_("DOI"),
widget=forms.Textarea(
attrs={
'placeholder': "ex: 10.1093/nar/gks419, 10.1093/nar/gkp323"
}
),
help_text=_(
"Paste a list of Digital Object Identifiers "
"(comma separated or one per line)"
),
required=False,
)
def clean_pmids(self):
"""Transform raw data in a PMID list"""
pmids = text_to_list(self.cleaned_data['pmids'])
for pmid in pmids:
pmid_validator(pmid)
return pmids
def clean_dois(self):
"""Transform raw data in a DOI list"""
dois = text_to_list(self.cleaned_data['dois'])
for doi in dois:
doi_validator(doi)
return dois
| from operator import methodcaller
from django import forms
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
DOI_REGEX = '(10[.][0-9]{4,}(?:[.][0-9]+)*/(?:(?!["&\'<>])\S)+)'
doi_validator = RegexValidator(
DOI_REGEX,
_("One (or more) DOI is not valid"),
'invalid'
)
PMID_REGEX = '^-?\d+\Z'
pmid_validator = RegexValidator(
PMID_REGEX,
_("One (or more) PMID is not valid"),
'invalid'
)
def text_to_list(raw):
"""Transform a raw text list to a python object list
Supported separators: coma, space and carriage return
"""
return list(set(
id.strip()
for r in map(methodcaller('split', ','), raw.split())
for id in r
if len(id)
))
class EntryBatchImportForm(forms.Form):
pmids = forms.CharField(
label=_("PubMed identifiers"),
widget=forms.Textarea(
attrs={
'placeholder': "ex: 26588162, 19569182"
}
),
help_text=_("Comma separated or one per line"),
required=False,
)
dois = forms.CharField(
label=_("Digital object identifiers (DOIs)"),
widget=forms.Textarea(
attrs={
'placeholder': "ex: 10.1093/nar/gks419, 10.1093/nar/gkp323"
}
),
help_text=_("Comma separated or one per line"),
required=False,
)
def clean_pmids(self):
"""Transform raw data in a PMID list"""
pmids = text_to_list(self.cleaned_data['pmids'])
for pmid in pmids:
pmid_validator(pmid)
return pmids
def clean_dois(self):
"""Transform raw data in a DOI list"""
dois = text_to_list(self.cleaned_data['dois'])
for doi in dois:
doi_validator(doi)
return dois
| mit | Python |
088a248a2276e46bcf58abc0873913680724534e | make the server visible to the internet | lex/gg-no-re,lex/gg-no-re,lex/gg-no-re,lex/gg-no-re | main.py | main.py | from flask import Flask
app = Flask(__name__)
@app.route('/')
def main():
return 'ok'
if __name__ == '__main__':
app.debug = True
app.run(host='0.0.0.0')
| from flask import Flask
app = Flask(__name__)
@app.route('/')
def main():
return 'ok'
if __name__ == '__main__':
app.run()
| bsd-3-clause | Python |
3a6b65125ddaaf97348d41082a70505ce99d8edc | Add route for setting max cache age to 0 | J216/band_name_generator,J216/band_name_generator,J216/band_name_generator | main.py | main.py | from flask import Flask, render_template
from os import listdir
from os.path import isfile, join
import twitter as tw
import image_overlay as ilay
import band_name as bn
app = Flask(__name__)
app.debug = True
names_made=0
page_info = {}
page_info['business_name'] = u"Band Name Generator"
page_info['desciption'] = u"Get your band name generated here."
page_info['about'] = u"We make the band name for real."
page_info['phone'] = u"(900) 985-2781"
page_info['phone_link'] = u"+1"
page_info['address'] = u"Saint Joseph, MO"
page_info['email'] = u"jaredhaer@gmail.com"
page_info['facebook'] = u"https://www.facebook.com/jared.haer"
page_info['twitter'] = u"https://twitter.com/jared216"
page_info['slides'] = [f for f in listdir('./static/images/band_names/') if isfile(join('./static/images/band_names/', f))]
@app.route("/")
def index():
return render_template("index.html",page_info=page_info)
@app.route("/band_name")
def bandName():
global names_made
page_info['band_name']=bn.getName()
bname=page_info['band_name']
p="./static/"
fn_out='images/band_names/'+str(names_made%12+1)+'.png'
print(ilay.makeImage(bname,fn_in='./bg.png',fn_out=p+fn_out))
page_info['band_image']=fn_out
names_made+=1
# page_info['tweet_status']=tw.tweetImage(bn.getTweet(bname),ilay.makeImage(bname))
page_info['slides'] = [f for f in listdir('./static/images/band_names/') if isfile(join('./static/images/band_names/', f))]
print(page_info['slides'])
return render_template("band_name.html", page_info=page_info)
@app.after_request
def add_header(response):
"""
Add headers to both force latest IE rendering engine or Chrome Frame,
and also to cache the rendered page for 10 minutes.
"""
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
if __name__ == "__main__":
app.run(host="0.0.0.0",port=5004) | from flask import Flask, render_template
from os import listdir
from os.path import isfile, join
import twitter as tw
import image_overlay as ilay
import band_name as bn
app = Flask(__name__)
app.debug = True
names_made=0
page_info = {}
page_info['business_name'] = u"Band Name Generator"
page_info['desciption'] = u"Get your band name generated here."
page_info['about'] = u"We make the band name for real."
page_info['phone'] = u"(900) 985-2781"
page_info['phone_link'] = u"+1"
page_info['address'] = u"Saint Joseph, MO"
page_info['email'] = u"jaredhaer@gmail.com"
page_info['facebook'] = u"https://www.facebook.com/jared.haer"
page_info['twitter'] = u"https://twitter.com/jared216"
page_info['slides'] = [f for f in listdir('./static/images/band_names/') if isfile(join('./static/images/band_names/', f))]
@app.route("/")
def index():
return render_template("index.html",page_info=page_info)
@app.route("/band_name")
def bandName():
global names_made
page_info['band_name']=bn.getName()
bname=page_info['band_name']
p="./static/"
fn_out='images/band_names/'+str(names_made%12+1)+'.png'
print(ilay.makeImage(bname,fn_in='./bg.png',fn_out=p+fn_out))
page_info['band_image']=fn_out
names_made+=1
# page_info['tweet_status']=tw.tweetImage(bn.getTweet(bname),ilay.makeImage(bname))
page_info['slides'] = [f for f in listdir('./static/images/band_names/') if isfile(join('./static/images/band_names/', f))]
print(page_info['slides'])
return render_template("band_name.html", page_info=page_info)
if __name__ == "__main__":
app.run(host="0.0.0.0",port=5004) | mit | Python |
59ba6a03afbcb91dbca1f639febaf7eee3283109 | Add minor updates. | haoyueping/peer-grading-for-MOOCs | main.py | main.py | #!/usr/bin/python3
from time import time
import pandas as pd
from scipy.stats import kendalltau
from algorithms.EM import em
from algorithms.PageRank import page_rank
from algorithms.borda_ordering import borda_ordering
from algorithms.random_circle_removal import random_circle_removal
from utils.gradings import get_gradings
def experiment(n, k_list, repetition):
algos = [random_circle_removal, page_rank, em, borda_ordering]
algo_names = ['random_circle_removal', 'page_rank', 'em', 'borda_ordering']
file_name = './out/results_n_{}_k_{}.csv'.format(n, str(k_list))
myfile = open(file_name, 'w')
myfile.write('n,k,repetition,algorithm,time,distance,ranking\n')
for rep in range(repetition):
for k in k_list:
gradings = get_gradings(n, k)
for j in range(len(algos)):
start = time()
ranking = algos[j](gradings)
duration = time() - start
line = '{},{},{},{},{},{}'.format(n, k, rep, algo_names[j], duration,
kendalltau(list(range(1, n + 1)), ranking)[0])
myfile.write(line)
myfile.write(',"[')
for item in ranking[:-1]:
myfile.write('{},'.format(item))
myfile.write('{}]"\n'.format(ranking[-1]))
myfile.close()
if __name__ == '__main__':
repetition = 2
n = 10000
k_list = [6, 8]
experiment(n, k_list, repetition)
| #!/usr/bin/python3
from time import time
import pandas as pd
from scipy.stats import kendalltau
from algorithms.EM import em
from algorithms.PageRank import page_rank
from algorithms.borda_ordering import borda_ordering
from algorithms.random_circle_removal import random_circle_removal
from utils.gradings import get_gradings
def experiment(n, k_list, repetition):
algos = [random_circle_removal, page_rank, em, borda_ordering]
algo_names = ['random_circle_removal', 'page_rank', 'em', 'borda_ordering']
file_name = './out/results_n_{}_k_{}.csv'.format(n, str(k_list))
myfile = open(file_name, 'w')
myfile.write('n, k, rep, algo, time, distance, ranking\n')
for rep in range(repetition):
for k in k_list:
gradings = get_gradings(n, k)
for j in range(len(algos)):
start = time()
ranking = algos[j](gradings)
duration = time() - start
line = '{},{},{},{},{},{}'.format(n, k, rep, algo_names[j], duration,
kendalltau(list(range(1, n + 1)), ranking)[0])
myfile.write(line)
myfile.write(',"[')
for item in ranking[:-1]:
myfile.write('{},'.format(item))
myfile.write('{}]"\n'.format(ranking[-1]))
myfile.close()
if __name__ == '__main__':
repetition = 2
n = 10000
k_list = [6, 8]
experiment(n, k_list, repetition)
| mit | Python |
9e666e97b07d7c08e434791a061086010da6e6eb | Add ability to get the latest TwoHeadlines tweet | underyx/TheMajorNews | main.py | main.py | # -*- coding: utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def get_latest_tweet(token):
parameters = {'screen_name': 'TwoHeadlines',
'count': 1,
'trim_user': True}
headers = {'Authorization': 'Bearer ' + token}
r = requests.get('https://api.twitter.com/1.1/statuses/user_timeline.json',
params=parameters, headers=headers)
return r.json(encoding='utf8')[0]['text']
def main():
bearer_token = get_access_token()
latest_tweet = get_latest_tweet(bearer_token)
if __name__ == '__main__':
main()
| # -*- utf-8 -*-
import config
import requests
from base64 import b64encode
def get_access_token():
token = config.twitter_key + ':' + config.twitter_secret
h = {'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'Authorization': b'Basic ' + b64encode(bytes(token, 'utf8'))}
print()
r = requests.post('https://api.twitter.com/oauth2/token',
data=b'grant_type=client_credentials', headers=h)
assert r.json()['token_type'] == 'bearer'
return r.json()['access_token']
def main():
bearer_token = get_access_token()
if __name__ == '__main__':
main()
| mit | Python |
760a3280015a9bb9b6fdbbeda7ace5048708b892 | set default run to lunar_dqn | kengz/openai_lab,kengz/openai_gym,kengz/openai_lab,kengz/openai_lab,kengz/openai_gym,kengz/openai_gym | main.py | main.py | from rl.experiment import run
if __name__ == '__main__':
# run('dev_dqn', times=2, param_selection=True)
# run('dqn', times=2, param_selection=False)
run('lunar_dqn', times=3, param_selection=True)
# run('DevCartPole-v0_DQN_HighLowMemoryWithForgetting_BoltzmannPolicy_NoPreProcessor_2017-01-21_191023_e0', plot_only=True)
# run('lunar_dqn', times=3, param_selection=True, line_search=True)
# run('breakout_dqn', times=1, param_selection=True)
| from rl.experiment import run
if __name__ == '__main__':
run('dev_dqn', times=2, param_selection=True)
# run('dqn', times=2, param_selection=False)
# run('lunar_dqn', times=1, param_selection=False)
# run('DevCartPole-v0_DQN_HighLowMemoryWithForgetting_BoltzmannPolicy_NoPreProcessor_2017-01-21_191023_e0', plot_only=True)
# run('lunar_dqn', times=3, param_selection=True, line_search=True)
| mit | Python |
2dec070daca154b26b880147d29ba80af99dd5ee | Fix is_worker_running | UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine | main.py | main.py | from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
reserved = inspect.reserved()
if (len(active.items()) - 1 + len(scheduled.items()) - 1 + len(reserved.items()) - 1) > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
| from crawler import tasks
from crawler.db import db_mongodb as db
from time import sleep
from celery.app.control import Control
from crawler.celery import app
# Temporal solution
db.insert_url("http://www.inf.upol.cz")
def is_worker_running():
inspect = app.control.inspect()
active = inspect.active()
scheduled = inspect.scheduled()
# reserved = inspect.reserved()
print(len(active.items()))
print(len(scheduled.items()))
if len(active.items()) - 1 + len(scheduled.items()) - 1 > 0:
return True
else:
return False
while True:
url = db.random_unvisited_url()
if url is not None:
print("FEEDING QUEUE")
db.set_visited_url(url)
tasks.crawl_url_task.delay(url)
else:
if is_worker_running():
print("WORKER IS RUNNING - SLEEPING")
sleep(5)
else:
print("END")
break
| mit | Python |
7ef70ed9bd1a2e16ff046e8d805d88dab439e9d7 | Use ThreadPoolExecutor to send statistics in parallel | luigiberrettini/build-deploy-stats | main.py | main.py | #!/usr/bin/env python3
import asyncio
from concurrent.futures import ThreadPoolExecutor
from configuration.settings import Settings
from reporting.shellReporter import ShellReporter
from reporting.zabbixReporter import ZabbixReporter
from statsSend.teamCity.teamCityStatisticsSender import TeamCityStatisticsSender
from statsSend.jenkins.jenkinsStatisticsSender import JenkinsStatisticsSender
from statsSend.octopusDeploy.octopusDeployStatisticsSender import OctopusDeployStatisticsSender
from statsSend.urbanCodeDeploy.urbanCodeDeployStatisticsSender import UrbanCodeDeployStatisticsSender
class Main:
reporter_factories = {
(lambda x: not x.is_enabled('Zabbix')): (lambda x: ShellReporter()),
(lambda x: x.is_enabled('Zabbix')): (lambda x: ZabbixReporter(x.section('Zabbix')))
}
stats_sender_factories = {
(lambda x: x.is_enabled('TeamCity')): (lambda x, reporter: TeamCityStatisticsSender(x.section('TeamCity'), reporter)),
(lambda x: x.is_enabled('Jenkins')): (lambda x, reporter: JenkinsStatisticsSender(x.section('Jenkins'), reporter)),
(lambda x: x.is_enabled('OctopusDeploy')): (lambda x, reporter: OctopusDeployStatisticsSender(x.section('OctopusDeploy'), reporter)),
(lambda x: x.is_enabled('UrbanCodeDeploy')): (lambda x, reporter: UrbanCodeDeployStatisticsSender(x.section('UrbanCodeDeploy'), reporter))
}
def __init__(self):
self.settings = Settings()
self._create_stats_senders()
def send_stats(self):
with ThreadPoolExecutor(max_workers = len(self.statisticsSenders)) as executor:
for sender in self.statisticsSenders:
executor.submit(self._send_in_event_loop, sender)
def _create_stats_senders(self):
self.statisticsSenders = []
reporter = self._create_reporter()
for key, value in self.stats_sender_factories.items():
if key(self.settings):
self.statisticsSenders.append(value(self.settings, reporter))
def _create_reporter(self):
for key, value in self.reporter_factories.items():
if key(self.settings):
return value(self.settings)
def _send_in_event_loop(self, sender):
loop = asyncio.new_event_loop()
loop.run_until_complete(sender.send())
loop.close()
Main().send_stats() | #!/usr/bin/env python3
import asyncio
from configuration.settings import Settings
from reporting.shellReporter import ShellReporter
from reporting.zabbixReporter import ZabbixReporter
from statsSend.teamCity.teamCityStatisticsSender import TeamCityStatisticsSender
from statsSend.jenkins.jenkinsStatisticsSender import JenkinsStatisticsSender
from statsSend.octopusDeploy.octopusDeployStatisticsSender import OctopusDeployStatisticsSender
from statsSend.urbanCodeDeploy.urbanCodeDeployStatisticsSender import UrbanCodeDeployStatisticsSender
class Main:
reporter_factories = {
(lambda x: not x.is_enabled('Zabbix')): (lambda x: ShellReporter()),
(lambda x: x.is_enabled('Zabbix')): (lambda x: ZabbixReporter(x.section('Zabbix')))
}
stats_sender_factories = {
(lambda x: x.is_enabled('TeamCity')): (lambda x, reporter: TeamCityStatisticsSender(x.section('TeamCity'), reporter)),
(lambda x: x.is_enabled('Jenkins')): (lambda x, reporter: JenkinsStatisticsSender(x.section('Jenkins'), reporter)),
(lambda x: x.is_enabled('OctopusDeploy')): (lambda x, reporter: OctopusDeployStatisticsSender(x.section('OctopusDeploy'), reporter)),
(lambda x: x.is_enabled('UrbanCodeDeploy')): (lambda x, reporter: UrbanCodeDeployStatisticsSender(x.section('UrbanCodeDeploy'), reporter))
}
def __init__(self):
self.settings = Settings()
self._create_stats_senders()
def send_stats(self):
loop = asyncio.get_event_loop()
to_future_send = lambda sender: asyncio.ensure_future(sender.send())
future_send_list = list(map(to_future_send, self.statisticsSenders))
loop.run_until_complete(asyncio.gather(*future_send_list))
loop.close()
def _create_stats_senders(self):
self.statisticsSenders = []
reporter = self._create_reporter()
for key, value in self.stats_sender_factories.items():
if key(self.settings):
self.statisticsSenders.append(value(self.settings, reporter))
def _create_reporter(self):
for key, value in self.reporter_factories.items():
if key(self.settings):
return value(self.settings)
Main().send_stats() | mit | Python |
620182c6cad11907751146a5c66a19478aad0c18 | Update SVM.py | mahesh-9/ML,konemshad/ML | ml/GLM/SVM.py | ml/GLM/SVM.py | from math import log,e
from ..numc import *
from .lr import LR
from .LogisticRegr import LogisticRegression
""" trying to implement linear kernel using SVC """
class SVM():
def fit(self,X,Y):
LR.fit(self,X,Y)
return self
def hypo(self,it):
res= 1/(1+e**(-LR.hyp(self,it)))
return res
def cost1(self):
cos1 = -log(self.hypo(_))
return cos1
def cost2(self):
cos2 = -log(1-(self.hypo(_)))
return cos2
def cost(self):
s =0
for _ in range(self.m):
s+= (self.target[_])*self.cost1()+ (1-self.target[_])*self.cost2()
return s
def gd(self,rate=0.001,loops=100):
for k in range(loops):
for i in range(len(self.theta)):
ts=0
for j in range(self.m):
res = (self.hypo(it=j)-self.target[j])*self.feat[j][i]
ts+=res
self.theta[i]-=rate*(1/(self.m))*(ts)
t=self.cost()
def predict(self,x):
self.gd()
x = np.array(x)
return x.dot(self.theta[1:])+self.theta[0]
| from math import log,e
from ..numc import *
from .lr import LR
from .LogisticRegr import LogisticRegression
""" trying to implement linear kernel using SVC """
class SVM():
def fit(self,X,Y):
LR.fit(self,X,Y)
return self
def hypo(self,it):
res= 1/(1+e**(-LR.hyp(self,it)))
return res
def cost1(self):
cos1 = -log(self.hypo(_))
return cos1
def cost2(self):
cos2 = -log(1-(self.hypo(_)))
return cos2
def cost(self):
s =0
for _ in range(self.m):
s+= (self.target[_])*self.cost1()+ (1-self.target[_])*self.cost2()
return s
def gd(self,rate=0.001,loops=100):
for k in range(loops):
for i in range(len(self.theta)):
ts=0111
for j in range(self.m):
res = (self.hypo(it=j)-self.target[j])*self.feat[j][i]
ts+=res
self.theta[i]-=rate*(1/(self.m))*(ts)
t=self.cost()
def predict(self,x):
self.gd()
x = np.array(x)
return x.dot(self.theta[1:])+self.theta[0]
| mit | Python |
95d0ec44a7c2556577bebb9ef5afa3e9401a1c10 | add hll | Parsely/probably | pds/hll.py | pds/hll.py | import smhasher
import numpy as np
from hashfunctions import get_raw_hashfunctions
class HyperLogLog(object):
""" Basic Hyperloglog """
def __init__(self, error_rate):
b = int(np.ceil(np.log2((1.04 / error_rate) ** 2)))
self.precision = 64
self.alpha = self._get_alpha(b)
self.b = b
self.m = 1 << b
self.M = np.zeros(self.m, dtype=np.uint8)
self.bitcount_arr = [ 1L << i for i in range(self.precision - b + 1) ]
self.hashes = get_raw_hashfunctions()
@staticmethod
def _get_alpha(b):
if not (4 <= b <= 16):
raise ValueError("b=%d should be in range [4 : 16]" % b)
if b == 4:
return 0.673
if b == 5:
return 0.697
if b == 6:
return 0.709
return 0.7213 / (1.0 + 1.079 / (1 << b))
def _get_rho(w, arr):
""" Return the least signifiant bit
O(N) in the worst case
"""
lsb = 0
while not (w & arr[lsb]):
lsb += 1
return lsb+1
def add(self, uuid):
""" Adds a key to the HyperLogLog """
if uuid:
# Computing the hash
try:
x = smhasher.murmur3_x86_64(uuid)
except UnicodeEncodeError:
x = smhasher.murmur3_x86_64(uuid.encode('ascii', 'ignore'))
# Finding the register to update by using thef first b bits as an index
j = x & ((1 << self.b) - 1)
# Remove those b bits
w = x >> self.b
# Find the first 0 in the remaining bit pattern
self.M[j] = max(self.M[j], self._get_rho(w, self.bitcount_arr))
def __len__(self, M = None):
""" Returns the estimate of the cardinality """
return self.estimate()
def __or__(self, other_hll):
""" Perform a union with another HLL object. """
other_hll_M = other_hll.M
self.M = reduce(lambda x,y: np.maximum(x,y), [self.M,other_hll_M]).astype(np.int16)
return self
def estimate(self):
""" Returns the estimate of the cardinality """
E = self.alpha * float(self.m ** 2) / np.power(2.0, -self.M).sum()
if E <= 2.5 * self.m: # Small range correction
V = self.m - np.count_nonzero(self.M)
return int(self.m * np.log(self.m / float(V))) if V > 0 else int(E)
elif E <= float(1L << self.precision) / 30.0: #intermidiate range correction -> No correction
return int(E)
else:
return int(-(1L << self.precision) * np.log(1.0 - E / (1L << self.precision)))
if __name__ == "__main__":
hll = HyperLogLog(0.01)
for i in range(1000):
hll.add(str(i))
print len(hll)
| mit | Python | |
800a03a6e24136623fd5ef3a892630fcc045100e | adjust import for new conary module layout | fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary | conary/commit.py | conary/commit.py | #
# Copyright (c) 2004-2005 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/cpl.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import os
import tempfile
from conary import versions
from conary.lib import log
from conary.repository import changeset
from conary.repository import errors
from conary.repository import filecontainer
def doCommit(repos, changeSetFile, targetLabel):
try:
cs = changeset.ChangeSetFromFile(changeSetFile)
except filecontainer.BadContainer:
log.error("invalid changeset %s", changeSetFile)
return 1
if cs.isLocal():
if not targetLabel:
log.error("committing local changesets requires a targetLabel")
label = versions.Label(targetLabel)
cs.setTargetBranch(repos, label)
commitCs = cs.makeAbsolute(repos)
(fd, changeSetFile) = tempfile.mkstemp()
os.close(fd)
commitCs.writeToFile(changeSetFile)
try:
# hopefully the file hasn't changed underneath us since we
# did the check at the top of doCommit(). We should probably
# add commitChangeSet method that takes a fd.
try:
repos.commitChangeSetFile(changeSetFile)
except errors.CommitError, e:
print e
finally:
if targetLabel:
os.unlink(changeSetFile)
def doLocalCommit(db, changeSetFile):
cs = changeset.ChangeSetFromFile(changeSetFile)
if not cs.isLocal():
log.error("repository changesets must be applied with update instead")
db.commitChangeSet(cs, set(), isRollback = True, toStash = False)
| #
# Copyright (c) 2004-2005 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.opensource.org/licenses/cpl.php.
#
# This program is distributed in the hope that it will be useful, but
# without any waranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
from conary.repository import changeset
from conary.repository import errors
from repository import filecontainer
from conary.lib import log
import os
import tempfile
from conary import versions
def doCommit(repos, changeSetFile, targetLabel):
try:
cs = changeset.ChangeSetFromFile(changeSetFile)
except filecontainer.BadContainer:
log.error("invalid changeset %s", changeSetFile)
return 1
if cs.isLocal():
if not targetLabel:
log.error("committing local changesets requires a targetLabel")
label = versions.Label(targetLabel)
cs.setTargetBranch(repos, label)
commitCs = cs.makeAbsolute(repos)
(fd, changeSetFile) = tempfile.mkstemp()
os.close(fd)
commitCs.writeToFile(changeSetFile)
try:
# hopefully the file hasn't changed underneath us since we
# did the check at the top of doCommit(). We should probably
# add commitChangeSet method that takes a fd.
try:
repos.commitChangeSetFile(changeSetFile)
except errors.CommitError, e:
print e
finally:
if targetLabel:
os.unlink(changeSetFile)
def doLocalCommit(db, changeSetFile):
cs = changeset.ChangeSetFromFile(changeSetFile)
if not cs.isLocal():
log.error("repository changesets must be applied with update instead")
db.commitChangeSet(cs, set(), isRollback = True, toStash = False)
| apache-2.0 | Python |
211fadd16c8fb63c61c0e6378e2d9607d61f932c | Bump version to 8.0.0a2 | jberci/resolwe,genialis/resolwe,genialis/resolwe,jberci/resolwe | resolwe/__about__.py | resolwe/__about__.py | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '8.0.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '8.0.0a1'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| apache-2.0 | Python |
7e74eb64697b410f38e7039b9a26b6cdd36275c6 | add doctest | amygdalama/stardatetime | stardatetime/conversion.py | stardatetime/conversion.py | from datetime import date
def earth_date_to_star_date(earth_year, earth_month, earth_day):
"""Converts an Earth date to a star date.
>>> earth_date_to_star_date(2323, 1, 1)
0.0
>>> earth_date_to_star_date(2015, 1, 1)
-308000.0
>>> earth_date_to_star_date(2014, 12, 31)
-308002.7
"""
star_year = (earth_year - 2323) * 1000
first_date_of_year = date(year=earth_year, month=1, day=1)
earth_date = date(year=earth_year, month=earth_month, day=earth_day)
days_elapsed_in_year = earth_date - first_date_of_year
star_day = round(days_elapsed_in_year.days / 365.0 * 1000, 1)
return star_year + star_day
| from datetime import date
def earth_date_to_star_date(earth_year, earth_month, earth_day):
star_year = (earth_year - 2323) * 1000
first_date_of_year = date(year=earth_year, month=1, day=1)
earth_date = date(year=earth_year, month=earth_month, day=earth_day)
days_elapsed_in_year = earth_date - first_date_of_year
star_day = round(days_elapsed_in_year.days / 365.0 * 1000, 1)
return star_year + star_day
| mit | Python |
178736aa0d9c44fe7cb94cbbf1597b256e091773 | Fix error if no organisation is selected | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin | meinberlin/apps/newsletters/emails.py | meinberlin/apps/newsletters/emails.py | from email.mime.image import MIMEImage
from django.apps import apps
from django.conf import settings
from django.contrib import auth
from django.contrib.staticfiles import finders
from meinberlin.apps.contrib.emails import Email
Organisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
User = auth.get_user_model()
class NewsletterEmail(Email):
template_name = 'meinberlin_newsletters/emails/newsletter_email'
def dispatch(self, object, *args, **kwargs):
organisation_pk = kwargs.pop('organisation_pk', None)
organisation = None
if organisation_pk:
organisation = Organisation.objects.get(pk=organisation_pk)
kwargs['organisation'] = organisation
return super().dispatch(object, *args, **kwargs)
def get_reply_to(self):
return ['{} <{}>'.format(self.object.sender_name, self.object.sender)]
def get_languages(self, receiver):
return ['raw']
def get_receivers(self):
return User.objects.filter(id__in=self.kwargs['participant_ids'])
def get_attachments(self):
attachments = super().get_attachments()
organisation = self.kwargs['organisation']
if organisation and organisation.logo:
f = open(organisation.logo.path, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
attachments += [logo]
meinberlin_filename = finders.find('images/email_logo.png')
if meinberlin_filename:
f = open(meinberlin_filename, 'rb')
meinberlin_logo = MIMEImage(f.read())
meinberlin_logo.add_header(
'Content-ID', '<{}>'.format('meinberlin_logo'))
attachments += [meinberlin_logo]
return attachments
| from email.mime.image import MIMEImage
from django.apps import apps
from django.conf import settings
from django.contrib import auth
from django.contrib.staticfiles import finders
from meinberlin.apps.contrib.emails import Email
Organisation = apps.get_model(settings.A4_ORGANISATIONS_MODEL)
User = auth.get_user_model()
class NewsletterEmail(Email):
template_name = 'meinberlin_newsletters/emails/newsletter_email'
def dispatch(self, object, *args, **kwargs):
organisation_pk = kwargs.pop('organisation_pk', None)
organisation = None
if organisation_pk:
organisation = Organisation.objects.get(pk=organisation_pk)
kwargs['organisation'] = organisation
return super().dispatch(object, *args, **kwargs)
def get_reply_to(self):
return ['{} <{}>'.format(self.object.sender_name, self.object.sender)]
def get_languages(self, receiver):
return ['raw']
def get_receivers(self):
return User.objects.filter(id__in=self.kwargs['participant_ids'])
def get_attachments(self):
attachments = super().get_attachments()
organisation = self.kwargs['organisation']
if organisation.logo:
f = open(organisation.logo.path, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
attachments += [logo]
meinberlin_filename = finders.find('images/email_logo.png')
if meinberlin_filename:
f = open(meinberlin_filename, 'rb')
meinberlin_logo = MIMEImage(f.read())
meinberlin_logo.add_header(
'Content-ID', '<{}>'.format('meinberlin_logo'))
attachments += [meinberlin_logo]
return attachments
| agpl-3.0 | Python |
94625711fe2503dd8a2efbe367ec3d494810bf80 | Remove rogue print | mozata/menpo,menpo/menpo,menpo/menpo,grigorisg9gr/menpo,patricksnape/menpo,patricksnape/menpo,yuxiang-zhou/menpo,menpo/menpo,patricksnape/menpo,mozata/menpo,grigorisg9gr/menpo,mozata/menpo,mozata/menpo,yuxiang-zhou/menpo,grigorisg9gr/menpo,yuxiang-zhou/menpo | menpo/image/test/image_masked_test.py | menpo/image/test/image_masked_test.py | import numpy as np
from numpy.testing import assert_allclose
from menpo.shape import PointCloud
from menpo.image import MaskedImage, BooleanImage
def test_constrain_mask_to_landmarks():
img = MaskedImage.init_blank((10, 10))
img.landmarks['box'] = PointCloud(np.array([[0.0, 0.0], [5.0, 0.0],
[5.0, 5.0], [0.0, 5.0]]))
img.constrain_mask_to_landmarks(group='box')
example_mask = BooleanImage.init_blank((10, 10), fill=False)
example_mask.pixels[0, :6, :6] = True
assert(img.mask.n_true() == 36)
assert_allclose(img.mask.pixels, example_mask.pixels)
| import numpy as np
from numpy.testing import assert_allclose
from menpo.shape import PointCloud
from menpo.image import MaskedImage, BooleanImage
def test_constrain_mask_to_landmarks():
img = MaskedImage.init_blank((10, 10))
img.landmarks['box'] = PointCloud(np.array([[0.0, 0.0], [5.0, 0.0],
[5.0, 5.0], [0.0, 5.0]]))
img.constrain_mask_to_landmarks(group='box')
example_mask = BooleanImage.init_blank((10, 10), fill=False)
example_mask.pixels[0, :6, :6] = True
print img.mask, example_mask
assert(img.mask.n_true() == 36)
assert_allclose(img.mask.pixels, example_mask.pixels)
| bsd-3-clause | Python |
2810780806628074a803fe92cb31e56751ba08d7 | add long cache headers to the geojson resource | texas/tx_mixed_beverages,texas/tx_mixed_beverages,texas/tx_mixed_beverages,texas/tx_mixed_beverages | mixed_beverages/apps/lazy_geo/urls.py | mixed_beverages/apps/lazy_geo/urls.py | from django.conf.urls import url
from django.views.decorators.cache import cache_control
from . import views
ONE_DAY = 86400
ONE_WEEK = ONE_DAY * 7
urlpatterns = [
url(r'^data.geojson$',
cache_control(max_age=ONE_WEEK)(views.MarkerList.as_view()),
name='geo'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^data.geojson$', views.MarkerList.as_view(), name='geo'),
]
| apache-2.0 | Python |
c45bc3e57f0c0fce9990488d13766875e33787a9 | add ability to load students from DB | samitnuk/studentsdb,samitnuk/studentsdb,samitnuk/studentsdb | students/views/students.py | students/views/students.py | from django.shortcuts import render
from django.http import HttpResponse
from ..models import Student
def students_list(request):
students = Student.objects.all()
return render(request, 'students/students_list.html',
{'students': students})
def students_add(request):
return HttpResponse('<h1>Students Add Form</h1>')
def students_edit(request, sid):
return HttpResponse('<h1>Edit Student %s</h1>' % sid)
def students_delete(request, sid):
return HttpResponse('<h1>Delete Student %s</h1>' % sid) | from django.shortcuts import render
from django.http import HttpResponse
def students_list(request):
students = (
{'id': 1,
'first_name': 'Віталій',
'last_name': 'Подоба',
'ticket': 235,
'image': 'img/image1.jpg'},
{'id': 2,
'first_name': 'Андрій',
'last_name': 'Корост',
'ticket': 2435,
'image': 'img/image2.jpg'},
{'id': 3,
'first_name': 'Іван',
'last_name': 'Драган',
'ticket': 7235,
'image': 'img/image3.jpg'},
)
return render(request, 'students/students_list.html',
{'students': students})
def students_add(request):
return HttpResponse('<h1>Students Add Form</h1>')
def students_edit(request, sid):
return HttpResponse('<h1>Edit Student %s</h1>' % sid)
def students_delete(request, sid):
return HttpResponse('<h1>Delete Student %s</h1>' % sid) | mit | Python |
4bd88c81a0ca52d8dc9b4a228e3a74d8d51bf65e | Implement equality checking with default values | jshholland/dutch-bits | inflist.py | inflist.py | """
Provide services for automatically extending lists.
inflist provides a list subclass to allow for automagically extending lists.
InfList represents a list that allows extending indefinitely without any fuss.
Indexing a non-assigned thing works as expected; first, normal lookup is tried
like normal lists. If that fails, then a default value is returned, or
optionally an exception is raised.
Currently setting a value outside the currently existing list just appends, so,
for example,
>>> li = InfList()
>>> li[200] = 1
>>> print li
[1]
is expected behaviour (for now). I can't really see a use beyond simply setting
the next value, so this is a low priority "bug".
"""
class InfList(list):
"""Represent an infinite list."""
def __init__(self, *args, **kwds):
"""
Initialise the list.
Same constructor as stock Python list(), except with the addition of
of the default keyword argument.
The default argument sets the default value to get if an uninitialised
value is accessed. If it is None, then IndexError is raised instead.
It can be changed on an instance via setting of self.default.
"""
self.default = kwds.pop('default', 0)
super(InfList, self).__init__(*args, **kwds)
def __getitem__(self, index):
try:
return super(InfList, self).__getitem__(index)
except IndexError:
if self.default == None:
raise
return self.default
def __setitem__(self, index, value):
try:
super(InfList, self).__setitem__(index, value)
except IndexError:
self.append(value)
def __eq__(self, other):
"""
Override the default list comparison operation to a more "natural" one.
Two lists (infinite or not) are equal if an infinite list can be
extended to a list equal to the other. For example,
>>> li1, li2 = InfList(), range(3)
>>> li1 == li2
False
>>> li2 = [0] * 3
>>> li1 == li2
True
"""
if super(InfList, self).__eq__(other):
return True
else:
lself, lother = len(self), len(other)
if lself < lother and self[:lother] == other:
return True
else:
return False
def __getslice__(self, i, j):
if self.default == None:
return super(InfList, self).__getslice__(i, j)
else:
retval = InfList(default=self.default)
while i < j:
retval.append(self[i])
i += 1
return retval
| """
Provide services for automatically extending lists.
inflist provides a list subclass to allow for automagically extending lists.
InfList represents a list that allows extending indefinitely without any fuss.
Indexing a non-assigned thing works as expected; first, normal lookup is tried
like normal lists. If that fails, then a default value is returned, or
optionally an exception is raised.
Currently setting a value outside the currently existing list just appends, so,
for example,
>>> li = InfList()
>>> li[200] = 1
>>> print li
[1]
is expected behaviour (for now). I can't really see a use beyond simply setting
the next value, so this is a low priority "bug".
"""
class InfList(list):
"""Represent an infinite list."""
def __init__(self, *args, **kwds):
"""
Initialise the list.
Same constructor as stock Python list(), except with the addition of
of the default keyword argument.
The default argument sets the default value to get if an uninitialised
value is accessed. If it is None, then IndexError is raised instead.
It can be changed on an instance via setting of self.default.
"""
self.default = kwds.pop('default', 0)
super(InfList, self).__init__(*args, **kwds)
def __getitem__(self, index):
try:
return super(InfList, self).__getitem__(index)
except IndexError:
if self.default == None:
raise
return self.default
def __setitem__(self, index, value):
try:
super(InfList, self).__setitem__(index, value)
except IndexError:
self.append(value)
def __getslice__(self, i, j):
if self.default == None:
return super(InfList, self).__getslice__(i, j)
else:
retval = InfList(default=self.default)
while i < j:
retval.append(self[i])
i += 1
return retval
| bsd-3-clause | Python |
7f639d77caa3120745ab1a42c517d396664c17f9 | bump version to 0.4.33 | nickp60/riboSeed,nickp60/riboSeed,nickp60/riboSeed | riboSeed/_version.py | riboSeed/_version.py | __version__ = '0.4.33'
| __version__ = '0.4.32'
| mit | Python |
fd905b25b81419c3896d1177edd05709a5a78ad5 | Fix missing variable | vrs01/mopidy,liamw9534/mopidy,hkariti/mopidy,pacificIT/mopidy,priestd09/mopidy,jodal/mopidy,adamcik/mopidy,hkariti/mopidy,swak/mopidy,pacificIT/mopidy,woutervanwijk/mopidy,jcass77/mopidy,jcass77/mopidy,quartz55/mopidy,ali/mopidy,bacontext/mopidy,tkem/mopidy,vrs01/mopidy,bacontext/mopidy,bencevans/mopidy,vrs01/mopidy,abarisain/mopidy,mokieyue/mopidy,diandiankan/mopidy,liamw9534/mopidy,ZenithDK/mopidy,mokieyue/mopidy,dbrgn/mopidy,abarisain/mopidy,SuperStarPL/mopidy,quartz55/mopidy,rawdlite/mopidy,diandiankan/mopidy,rawdlite/mopidy,kingosticks/mopidy,hkariti/mopidy,swak/mopidy,dbrgn/mopidy,dbrgn/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,ZenithDK/mopidy,mopidy/mopidy,vrs01/mopidy,jmarsik/mopidy,kingosticks/mopidy,priestd09/mopidy,bencevans/mopidy,ali/mopidy,diandiankan/mopidy,adamcik/mopidy,dbrgn/mopidy,pacificIT/mopidy,ZenithDK/mopidy,hkariti/mopidy,jodal/mopidy,glogiotatidis/mopidy,ali/mopidy,bacontext/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,diandiankan/mopidy,jodal/mopidy,mopidy/mopidy,mokieyue/mopidy,jcass77/mopidy,bacontext/mopidy,bencevans/mopidy,bencevans/mopidy,jmarsik/mopidy,quartz55/mopidy,priestd09/mopidy,quartz55/mopidy,tkem/mopidy,mopidy/mopidy,adamcik/mopidy,SuperStarPL/mopidy,woutervanwijk/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,rawdlite/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,tkem/mopidy,rawdlite/mopidy,tkem/mopidy,swak/mopidy,swak/mopidy,SuperStarPL/mopidy,ali/mopidy | mopidy/backends/libspotify/library.py | mopidy/backends/libspotify/library.py | import logging
import multiprocessing
from spotify import Link, SpotifyError
from mopidy.backends.base import BaseLibraryController
from mopidy.backends.libspotify import ENCODING
from mopidy.backends.libspotify.translator import LibspotifyTranslator
logger = logging.getLogger('mopidy.backends.libspotify.library')
class LibspotifyLibraryController(BaseLibraryController):
def find_exact(self, **query):
return self.search(**query)
def lookup(self, uri):
try:
spotify_track = Link.from_string(uri).as_track()
# TODO Block until metadata_updated callback is called. Before that
# the track will be unloaded, unless it's already in the stored
# playlists.
return LibspotifyTranslator.to_mopidy_track(spotify_track)
except SpotifyError as e:
logger.warning(u'Failed to lookup: %s', uri, e)
return None
def refresh(self, uri=None):
pass # TODO
def search(self, **query):
spotify_query = []
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
for value in values:
if field == u'track':
field = u'title'
if field == u'any':
spotify_query.append(value)
else:
spotify_query.append(u'%s:"%s"' % (field, value))
spotify_query = u' '.join(spotify_query)
logger.debug(u'Spotify search query: %s' % spotify_query)
my_end, other_end = multiprocessing.Pipe()
self.backend.spotify.search(spotify_query.encode(ENCODING), other_end)
my_end.poll(None)
playlist = my_end.recv()
return playlist
| import logging
import multiprocessing
from spotify import Link, SpotifyError
from mopidy.backends.base import BaseLibraryController
from mopidy.backends.libspotify import ENCODING
from mopidy.backends.libspotify.translator import LibspotifyTranslator
logger = logging.getLogger('mopidy.backends.libspotify.library')
class LibspotifyLibraryController(BaseLibraryController):
def find_exact(self, **query):
return self.search(**query)
def lookup(self, uri):
try:
spotify_track = Link.from_string(uri).as_track()
# TODO Block until metadata_updated callback is called. Before that
# the track will be unloaded, unless it's already in the stored
# playlists.
return LibspotifyTranslator.to_mopidy_track(spotify_track)
except SpotifyError as e:
logger.warning(u'Failed to lookup: %s', track.uri, e)
return None
def refresh(self, uri=None):
pass # TODO
def search(self, **query):
spotify_query = []
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
for value in values:
if field == u'track':
field = u'title'
if field == u'any':
spotify_query.append(value)
else:
spotify_query.append(u'%s:"%s"' % (field, value))
spotify_query = u' '.join(spotify_query)
logger.debug(u'Spotify search query: %s' % spotify_query)
my_end, other_end = multiprocessing.Pipe()
self.backend.spotify.search(spotify_query.encode(ENCODING), other_end)
my_end.poll(None)
playlist = my_end.recv()
return playlist
| apache-2.0 | Python |
802811e43524a93d3adb138a8159fc47775dc2bb | Return all tracks in stored playlists upon empty search query | tkem/mopidy,ali/mopidy,rawdlite/mopidy,quartz55/mopidy,hkariti/mopidy,ali/mopidy,priestd09/mopidy,jcass77/mopidy,jmarsik/mopidy,mokieyue/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,woutervanwijk/mopidy,tkem/mopidy,abarisain/mopidy,dbrgn/mopidy,dbrgn/mopidy,pacificIT/mopidy,adamcik/mopidy,ZenithDK/mopidy,mokieyue/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,vrs01/mopidy,rawdlite/mopidy,pacificIT/mopidy,ZenithDK/mopidy,tkem/mopidy,jmarsik/mopidy,adamcik/mopidy,bencevans/mopidy,priestd09/mopidy,ali/mopidy,mopidy/mopidy,hkariti/mopidy,kingosticks/mopidy,bencevans/mopidy,bacontext/mopidy,jodal/mopidy,ZenithDK/mopidy,mopidy/mopidy,quartz55/mopidy,rawdlite/mopidy,jcass77/mopidy,vrs01/mopidy,kingosticks/mopidy,vrs01/mopidy,swak/mopidy,hkariti/mopidy,bacontext/mopidy,ZenithDK/mopidy,mopidy/mopidy,pacificIT/mopidy,ali/mopidy,woutervanwijk/mopidy,vrs01/mopidy,swak/mopidy,SuperStarPL/mopidy,quartz55/mopidy,dbrgn/mopidy,bencevans/mopidy,jcass77/mopidy,diandiankan/mopidy,bencevans/mopidy,dbrgn/mopidy,bacontext/mopidy,SuperStarPL/mopidy,quartz55/mopidy,tkem/mopidy,glogiotatidis/mopidy,swak/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,priestd09/mopidy,mokieyue/mopidy,kingosticks/mopidy,liamw9534/mopidy,jmarsik/mopidy,pacificIT/mopidy,bacontext/mopidy,rawdlite/mopidy,diandiankan/mopidy,adamcik/mopidy,liamw9534/mopidy,hkariti/mopidy,SuperStarPL/mopidy,jodal/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,jodal/mopidy,swak/mopidy,abarisain/mopidy | mopidy/backends/libspotify/library.py | mopidy/backends/libspotify/library.py | import logging
import multiprocessing
from spotify import Link
from mopidy.backends.base import BaseLibraryController
from mopidy.backends.libspotify import ENCODING
from mopidy.backends.libspotify.translator import LibspotifyTranslator
from mopidy.models import Playlist
logger = logging.getLogger('mopidy.backends.libspotify.library')
class LibspotifyLibraryController(BaseLibraryController):
def find_exact(self, **query):
return self.search(**query)
def lookup(self, uri):
spotify_track = Link.from_string(uri).as_track()
# TODO Block until metadata_updated callback is called. Before that the
# track will be unloaded, unless it's already in the stored playlists.
return LibspotifyTranslator.to_mopidy_track(spotify_track)
def refresh(self, uri=None):
pass # TODO
def search(self, **query):
if not query:
# Since we can't search for the entire Spotify library, we return
# all tracks in the stored playlists when the query is empty.
tracks = []
for playlist in self.backend.stored_playlists.playlists:
tracks += playlist.tracks
return Playlist(tracks=tracks)
spotify_query = []
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
for value in values:
if field == u'track':
field = u'title'
if field == u'any':
spotify_query.append(value)
else:
spotify_query.append(u'%s:"%s"' % (field, value))
spotify_query = u' '.join(spotify_query)
logger.debug(u'Spotify search query: %s' % spotify_query)
my_end, other_end = multiprocessing.Pipe()
self.backend.spotify.search(spotify_query.encode(ENCODING), other_end)
my_end.poll(None)
playlist = my_end.recv()
return playlist
| import logging
import multiprocessing
from spotify import Link
from mopidy.backends.base import BaseLibraryController
from mopidy.backends.libspotify import ENCODING
from mopidy.backends.libspotify.translator import LibspotifyTranslator
logger = logging.getLogger('mopidy.backends.libspotify.library')
class LibspotifyLibraryController(BaseLibraryController):
def find_exact(self, **query):
return self.search(**query)
def lookup(self, uri):
spotify_track = Link.from_string(uri).as_track()
# TODO Block until metadata_updated callback is called. Before that the
# track will be unloaded, unless it's already in the stored playlists.
return LibspotifyTranslator.to_mopidy_track(spotify_track)
def refresh(self, uri=None):
pass # TODO
def search(self, **query):
spotify_query = []
for (field, values) in query.iteritems():
if not hasattr(values, '__iter__'):
values = [values]
for value in values:
if field == u'track':
field = u'title'
if field == u'any':
spotify_query.append(value)
else:
spotify_query.append(u'%s:"%s"' % (field, value))
spotify_query = u' '.join(spotify_query)
logger.debug(u'Spotify search query: %s' % spotify_query)
my_end, other_end = multiprocessing.Pipe()
self.backend.spotify.search(spotify_query.encode(ENCODING), other_end)
my_end.poll(None)
playlist = my_end.recv()
return playlist
| apache-2.0 | Python |
c4522ef2659e0f50bd240f9a3407721bc8ad6177 | add default post and pages | bderstine/WebsiteMixer-App-Base,bderstine/WebsiteMixer-App-Base,bderstine/WebsiteMixer-App-Base | initial.py | initial.py | #!venv/bin/python
import getpass, sys, os, uuid
print("================================================================================")
#This will need to ask for values and then update and deploy template files with those values.
domain = raw_input('Enter the domain name that will be used (.com/.net/.org): ')
appname = raw_input('Enter the app name that will be used (one word, no special chars!): ')
print("================================================================================")
dbuser = raw_input('Enter MYSQL USERNAME to use: ')
dbpass = getpass.getpass('Enter MYSQL PASSWORD to use: ')
dbname = raw_input('Enter MYSQL DATABASE NAME to use: ')
print("================================================================================")
adminuser = raw_input('Enter ADMIN USERNAME (do not use "admin"!): ')
adminpw1 = getpass.getpass()
adminpw2 = getpass.getpass('Confirm Password: ')
if adminpw1 != adminpw2:
print 'Admin passwords do not match! Abort!'
sys.exit(0)
adminemail = raw_input('Enter admin EMAIL address: ')
print("================================================================================")
#api.wsgi.template -> api.wsgi, update [domain]
with open ("api.wsgi.template", "r") as myfile:
data=myfile.read().replace('[domain]', domain)
f = open('api.wsgi', 'w')
f.write(data)
f.close()
#config.py.template -> config.py, update [appname]
secretkey = str(uuid.uuid4())
with open ("config.py.template", "r") as myfile:
data=myfile.read().replace('[appname]', appname).replace('[domain]',domain).replace('[secretkey]',secretkey).replace('[dbuser]',dbuser).replace('[dbpass]',dbpass).replace('[dbname]',dbname)
f = open('config.py', 'w')
f.write(data)
f.close()
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db, models
import os.path
db.drop_all()
db.create_all()
#virtualhosts/template.com.conf -> [domain].com.conf, update [domain] and [appname]
with open ("virtualhosts/template.com.conf", "r") as myfile:
data=myfile.read().replace('[appname]', appname).replace('[domain]', domain)
f = open('virtualhosts/' + domain + '.conf', 'w')
f.write(data)
f.close()
# Create UPLOAD_FOLDER
directory = 'app/static/upload/'
if not os.path.exists(directory):
print('Creating upload dir: ' + directory)
os.makedirs(directory)
else:
print('Upload dir exists: ' + directory)
# Pre-load first user
u = models.User(adminuser,adminpw1,adminemail)
db.session.add(u)
# Pre-load initial settings
settings = {'siteName':appname, 'siteUrl':'http://'+domain, 'headerForeground':'ffffff', 'headerBackground':'cccccc', 'colorLinks':'cccccc', 'colorHover':'666666'}
for k,v in settings.iteritems():
a = models.Settings(k,v)
db.session.add(a)
db.session.commit()
# Pre-load initial post
a = Posts(adminuser,'Hello World!','/hello-world/','This is the first post! You can delete this post and add more vi /admin/!','','')
db.session.add(a)
db.session.commit()
# Pre-load initial pages
a = Pages('About','/about/','Yep, it\'s an about page!','','')
db.session.add(a)
db.session.commit()
a = Pages('Contact','/contact/','Yep, it\'s a contact page!','','')
db.session.add(a)
db.session.commit()
print("Setup is complete!")
| #!venv/bin/python
import getpass, sys, os, uuid
print("================================================================================")
#This will need to ask for values and then update and deploy template files with those values.
domain = raw_input('Enter the domain name that will be used (.com/.net/.org): ')
appname = raw_input('Enter the app name that will be used (one word, no special chars!): ')
print("================================================================================")
dbuser = raw_input('Enter MYSQL USERNAME to use: ')
dbpass = getpass.getpass('Enter MYSQL PASSWORD to use: ')
dbname = raw_input('Enter MYSQL DATABASE NAME to use: ')
print("================================================================================")
adminuser = raw_input('Enter ADMIN USERNAME (do not use "admin"!): ')
adminpw1 = getpass.getpass()
adminpw2 = getpass.getpass('Confirm Password: ')
if adminpw1 != adminpw2:
print 'Admin passwords do not match! Abort!'
sys.exit(0)
adminemail = raw_input('Enter admin EMAIL address: ')
print("================================================================================")
#api.wsgi.template -> api.wsgi, update [domain]
with open ("api.wsgi.template", "r") as myfile:
data=myfile.read().replace('[domain]', domain)
f = open('api.wsgi', 'w')
f.write(data)
f.close()
#config.py.template -> config.py, update [appname]
secretkey = str(uuid.uuid4())
with open ("config.py.template", "r") as myfile:
data=myfile.read().replace('[appname]', appname).replace('[domain]',domain).replace('[secretkey]',secretkey).replace('[dbuser]',dbuser).replace('[dbpass]',dbpass).replace('[dbname]',dbname)
f = open('config.py', 'w')
f.write(data)
f.close()
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
from app import db, models
import os.path
db.drop_all()
db.create_all()
#virtualhosts/template.com.conf -> [domain].com.conf, update [domain] and [appname]
with open ("virtualhosts/template.com.conf", "r") as myfile:
data=myfile.read().replace('[appname]', appname).replace('[domain]', domain)
f = open('virtualhosts/' + domain + '.conf', 'w')
f.write(data)
f.close()
# Create UPLOAD_FOLDER
directory = 'app/static/upload/'
if not os.path.exists(directory):
print('Creating upload dir: ' + directory)
os.makedirs(directory)
else:
print('Upload dir exists: ' + directory)
# Pre-load first user
u = models.User(adminuser,adminpw1,adminemail)
db.session.add(u)
# Pre-load initial settings
settings = {'siteName':appname, 'siteUrl':'http://'+domain, 'headerForeground':'ffffff', 'headerBackground':'cccccc', 'colorLinks':'cccccc', 'colorHover':'666666'}
for k,v in settings.iteritems():
a = models.Settings(k,v)
db.session.add(a)
db.session.commit()
print("Setup is complete!")
| mit | Python |
363979820d537b940d334296e9b5a9b23c05a24e | fix unicode error | CARocha/sitioreddes,CARocha/sitioreddes,CARocha/sitioreddes | multimedia/models.py | multimedia/models.py | #encoding: utf-8
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from sorl.thumbnail import ImageField
from sitioreddes.utils import get_file_path
from taggit.managers import TaggableManager
from django.contrib.auth.models import User
# Create your models here.
#
class Fotos(models.Model):
nombre = models.CharField(max_length=150)
imagen = ImageField(upload_to=get_file_path, blank=True, null=True)
tags_fotos = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'fotos/'
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Fotos"
class Audio(models.Model):
nombre = models.CharField(max_length=150)
audio = models.FileField(upload_to=get_file_path, null=True, blank=True)
tags_audio = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'audios/'
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Audios"
class Videos(models.Model):
nombre= models.CharField(max_length=200, null=True, blank=True)
url = models.URLField(null=True, blank=True)
tags_video = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Videos"
class Adjuntos(models.Model):
nombre = models.CharField(max_length=150)
archivo = models.FileField(upload_to=get_file_path, blank=True, null=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'adjuntos/'
def __unicode__(self):
return u'%s' % (self.nombre)
class Meta:
verbose_name_plural = "Adjuntos"
class Multimedia(models.Model):
titulo = models.CharField(max_length=250)
autor = models.ForeignKey(User, null=True, blank=True)
def __unicode__(self):
return u'%s' % (self.titulo) | #encoding: utf-8
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from sorl.thumbnail import ImageField
from sitioreddes.utils import get_file_path
from taggit.managers import TaggableManager
from django.contrib.auth.models import User
# Create your models here.
#
class Fotos(models.Model):
nombre = models.CharField(max_length=150)
imagen = ImageField(upload_to=get_file_path, blank=True, null=True)
tags_fotos = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'fotos/'
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Fotos"
class Audio(models.Model):
nombre = models.CharField(max_length=150)
audio = models.FileField(upload_to=get_file_path, null=True, blank=True)
tags_audio = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'audios/'
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Audios"
class Videos(models.Model):
nombre= models.CharField(max_length=200, null=True, blank=True)
url = models.URLField(null=True, blank=True)
tags_video = TaggableManager(blank=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Videos"
class Adjuntos(models.Model):
nombre = models.CharField(max_length=150)
archivo = models.FileField(upload_to=get_file_path, blank=True, null=True)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
fileDir = 'adjuntos/'
def __unicode__(self):
return self.nombre
class Meta:
verbose_name_plural = "Adjuntos"
class Multimedia(models.Model):
titulo = models.CharField(max_length=250)
autor = models.ForeignKey(User, null=True, blank=True)
def __unicode__(self):
return u'%s' % (self.titulo) | mit | Python |
5aa65dca88661220645df2ce598b83b04fce94d3 | Add some fields for display | kboard/kboard,kboard/kboard,hyesun03/k-board,hyesun03/k-board,guswnsxodlf/k-board,darjeeling/k-board,cjh5414/kboard,guswnsxodlf/k-board,cjh5414/kboard,hyesun03/k-board,kboard/kboard,guswnsxodlf/k-board,cjh5414/kboard | kboard/accounts/admin.py | kboard/accounts/admin.py | from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from .models import Account
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = Account
fields = ('username', 'email', 'name')
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("패스워드가 동일하지 않습니다.")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField()
class Meta:
model = Account
fields = ('username', 'email', 'name', 'password')
def clean_password(self):
return self.initial["password"]
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
list_display = ('username', 'email', 'name', 'is_admin', 'is_superuser')
list_filter = ('is_admin',)
fieldsets = (
(None, {'fields': ('username', 'password')}),
('Personal info', {'fields': ('email', 'name', )}),
('Permissions', {'fields': ('is_admin', 'is_superuser', 'is_active')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'email', 'name', 'password1', 'password2')}
),
)
search_fields = ('username',)
ordering = ('username',)
filter_horizontal = ()
admin.site.register(Account, UserAdmin)
admin.site.unregister(Group)
| from django import forms
from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from .models import Account
class UserCreationForm(forms.ModelForm):
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = Account
fields = ('username', 'email', 'name')
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("패스워드가 동일하지 않습니다.")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField()
class Meta:
model = Account
fields = ('username', 'email', 'name', 'password')
def clean_password(self):
return self.initial["password"]
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
list_display = ('username', 'email', 'name', 'is_admin')
list_filter = ('is_admin',)
fieldsets = (
(None, {'fields': ('username', 'password')}),
('Personal info', {'fields': ('email', 'name', )}),
('Permissions', {'fields': ('is_admin',)}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'email', 'name', 'password1', 'password2')}
),
)
search_fields = ('username',)
ordering = ('username',)
filter_horizontal = ()
admin.site.register(Account, UserAdmin)
admin.site.unregister(Group)
| mit | Python |
01c50698e1640edb4417c71ec7a58ad034d5fda4 | Rename invalid model exception | QuLogic/ocropy,QuLogic/ocropy,mittagessen/kraken,mittagessen/kraken,mittagessen/kraken,mittagessen/kraken | kraken/lib/exceptions.py | kraken/lib/exceptions.py | # -*- coding: utf-8 -*-
"""
kraken.lib.exceptions
~~~~~~~~~~~~~~~~~~~~~
All custom exceptions raised by kraken's modules and packages. Packages should
always define their exceptions here.
"""
class KrakenRecordException(Exception):
def __init__(self, message=None):
Exception.__init__(self, message)
class KrakenInvalidModelException(Exception):
def __init__(self, message=None):
Exception.__init__(self, message)
| # -*- coding: utf-8 -*-
"""
kraken.lib.exceptions
~~~~~~~~~~~~~~~~~~~~~
All custom exceptions raised by kraken's modules and packages. Packages should
always define their exceptions here.
"""
class KrakenRecordException(Exception):
def __init__(self, message=None):
Exception.__init__(self, message)
| apache-2.0 | Python |
9113ea5813dabd746c2344e076bc3827aa2ec117 | Add restore functions to numpy.dual | efiring/numpy-work,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,efiring/numpy-work,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC,efiring/numpy-work,chadnetzer/numpy-gaurdro,efiring/numpy-work,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,illume/numpy3k,teoliphant/numpy-refactor,teoliphant/numpy-refactor,illume/numpy3k,teoliphant/numpy-refactor,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,chadnetzer/numpy-gaurdro | numpy/dual.py | numpy/dual.py | # This module should be used for functions both in numpy and scipy if
# you want to use the numpy version if available but the scipy version
# otherwise.
# Usage --- from numpy.dual import fft, inv
__all__ = ['fft','ifft','fftn','ifftn','fft2','ifft2',
'inv','svd','solve','det','eig','eigvals','lstsq',
'pinv','cholesky','i0']
import numpy.linalg as linpkg
import numpy.dft as fftpkg
from numpy.lib import i0
import sys
fft = fftpkg.fft
ifft = fftpkg.ifft
fftn = fftpkg.fftn
ifftn = fftpkg.ifftn
fft2 = fftpkg.fft2
ifft2 = fftpkg.ifft2
inv = linpkg.inv
svd = linpkg.svd
solve = linpkg.solve
det = linpkg.det
eig = linpkg.eig
eigvals = linpkg.eigvals
lstsq = linpkg.lstsq
pinv = linpkg.pinv
cholesky = linpkg.cholesky
_restore_dict = {}
def register_func(name, func):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
f = sys._getframe(0).f_globals
_restore_dict[name] = f[name]
f[name] = func
def restore_func(name):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
try:
val = _restore_dict[name]
except KeyError:
return
else:
sys._getframe(0).f_globals[name] = val
def restore_all():
for name in _restore_dict.keys():
restore_func(name)
| # This module should be used for functions both in numpy and scipy if
# you want to use the numpy version if available but the scipy version
# otherwise.
# Usage --- from numpy.dual import fft, inv
__all__ = ['fft','ifft','fftn','ifftn','fft2','ifft2',
'inv','svd','solve','det','eig','eigvals','lstsq',
'pinv','cholesky','i0']
import numpy.linalg as linpkg
import numpy.dft as fftpkg
from numpy.lib import i0
import sys
fft = fftpkg.fft
ifft = fftpkg.ifft
fftn = fftpkg.fftn
ifftn = fftpkg.ifftn
fft2 = fftpkg.fft2
ifft2 = fftpkg.ifft2
inv = linpkg.inv
svd = linpkg.svd
solve = linpkg.solve
det = linpkg.det
eig = linpkg.eig
eigvals = linpkg.eigvals
lstsq = linpkg.lstsq
pinv = linpkg.pinv
cholesky = linpkg.cholesky
_restore_dict = {}
def register_func(name, func):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
f = sys._getframe(0).f_globals
_restore_dict[name] = f[name]
f[name] = func
def restore_func(name):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
try:
sys._getframe(0).f_globals[name] = _restore_dict[name]
except KeyError:
pass
| bsd-3-clause | Python |
ab823cb476c4b2244ab360e0ac9e95f26f06b972 | Bring back a needed import, and exclude it from flake checks | gentoo/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org | okupy/wsgi.py | okupy/wsgi.py | # vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
"""
WSGI config for okupy project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "okupy.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "okupy.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
# from http://projects.unbit.it/uwsgi/wiki/TipsAndTricks
# AUTHOR: Simone Federici
try:
# uwsgi module is only available when running from uwsgi
import uwsgi
except ImportError:
# we're probably running from django's built-in server
pass
else:
from uwsgidecorators import postfork, thread, timer
from django.utils import autoreload
# autodiscover SSH handlers
import okupy.accounts.ssh # noqa
from okupy.common.ssh import ssh_main
import Crypto.Random
postfork(thread(ssh_main))
@postfork
def reset_rng():
Crypto.Random.atfork()
@timer(5)
def change_code_gracefull_reload(sig):
if autoreload.code_changed():
uwsgi.reload()
| # vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
"""
WSGI config for okupy project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "okupy.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "okupy.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
# from http://projects.unbit.it/uwsgi/wiki/TipsAndTricks
# AUTHOR: Simone Federici
try:
# uwsgi module is only available when running from uwsgi
import uwsgi
except ImportError:
# we're probably running from django's built-in server
pass
else:
from uwsgidecorators import postfork, thread, timer
from django.utils import autoreload
# autodiscover SSH handlers
from okupy.common.ssh import ssh_main
import Crypto.Random
postfork(thread(ssh_main))
@postfork
def reset_rng():
Crypto.Random.atfork()
@timer(5)
def change_code_gracefull_reload(sig):
if autoreload.code_changed():
uwsgi.reload()
| agpl-3.0 | Python |
6fc1e3a51dbabe096478e78ceb6c3aaff95ead32 | test commit for website | opalmer/pyfarm,opalmer/pyfarm | trunk/pyfarm/client/master.py | trunk/pyfarm/client/master.py | # No shebang line, this module is meant to be imported
#
# This file is part of PyFarm.
# Copyright (C) 2008-2012 Oliver Palmer
#
# PyFarm is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyFarm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with PyFarm. If not, see <http://www.gnu.org/licenses/>.
'''module for dealing with storage and retrieval of the master'''
from pyfarm import prefs
PORT = prefs.get('network.ports.client')
def getmaster(options):
'''primary function for retrieving the master'''
# ensure both --master and --set-master are not being provided
if options.master is not None and options.set_master is not None:
raise ValueError("--set-master and --master cannot both be defined")
# end getmaster
#
## if either master or set_master are set then we should
## setup the local MASTER variable before we continue
#if options.master or options.set_master:
# port = prefs.get('network.ports.server')
# master = options.master or options.set_master
# MASTER = (master, port)
#
#if options.set_master:
# log.msg(
# "--set-master database calls not implemented",
# level="NOT_IMPLEMENTED",
# system="Client"
# )
#
#elif options.set_master is None and options.master is None:
# log.msg(
# "master from database not implemented",
# level="NOT_IMPLEMENTED",
# system="Client"
# ) | # No shebang line, this module is meant to be imported
#
# This file is part of PyFarm.
# Copyright (C) 2008-2012 Oliver Palmer
#
# PyFarm is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyFarm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with PyFarm. If not, see <http://www.gnu.org/licenses/>.
'''module for dealing with storage and retrieval of the master'''
from pyfarm import prefs
#PORT = prefs.get('')
def getmaster(options):
'''primary function for retrieving the master'''
print options
# end getmaster
## ensure both --master and --set-master are not being used
#if options.master is not None and options.set_master is not None:
# raise ValueError("--set-master and --master cannot both be defined")
#
## if either master or set_master are set then we should
## setup the local MASTER variable before we continue
#if options.master or options.set_master:
# port = prefs.get('network.ports.server')
# master = options.master or options.set_master
# MASTER = (master, port)
#
#if options.set_master:
# log.msg(
# "--set-master database calls not implemented",
# level="NOT_IMPLEMENTED",
# system="Client"
# )
#
#elif options.set_master is None and options.master is None:
# log.msg(
# "master from database not implemented",
# level="NOT_IMPLEMENTED",
# system="Client"
# ) | apache-2.0 | Python |
9ca655edd5b03fadcb72fd976df6d77f9af8d277 | change quickstart | elastic-event-components/e2c,elastic-event-components/e2c,elastic-event-components/e2c,elastic-event-components/e2c | examples/python/quick_start/app.py | examples/python/quick_start/app.py | from e2c import E2c
config = (
'.run -- action',
'action.out -- print')
e2c = E2c[str, str](config)
e2c.actor('action', lambda data, out: out(data))
e2c.actor('print', lambda data: print(data))
e2c.visualize()
e2c.run('hello')
| from e2c import E2c
config = (
'.run -- action',
'action.out -- print',
'action.out -- print')
def print_data(x: int, data):
print(x, data)
e2c = E2c[str, str](config)
e2c.actor('action', lambda data, out: out(data, {'a': 1}))
e2c.actor('print', print_data)
e2c.visualize()
e2c.run('hello')
| apache-2.0 | Python |
deba1c0bb985fe62000b4474744df9ee1170a8a1 | Use numpy testing | Astroua/TurbuStat,e-koch/TurbuStat | turbustat/tests/test_delvar.py | turbustat/tests/test_delvar.py | # Licensed under an MIT open source license - see LICENSE
'''
Test functions for Delta Variance
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import DeltaVariance, DeltaVariance_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testDelVar(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_DelVar_method(self):
self.tester = \
DeltaVariance(dataset1["integrated_intensity"][0],
dataset1["integrated_intensity"][1],
dataset1["integrated_intensity_error"][0])
self.tester.run()
npt.assert_allclose(self.tester.delta_var, computed_data['delvar_val'])
def test_DelVar_distance(self):
self.tester_dist = \
DeltaVariance_Distance(dataset1["integrated_intensity"],
dataset2["integrated_intensity"],
weights1=dataset1["integrated_intensity_error"][0],
weights2=dataset2["integrated_intensity_error"][0])
self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['delvar_distance'],
decimal=3)
| # Licensed under an MIT open source license - see LICENSE
'''
Test functions for Delta Variance
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import DeltaVariance, DeltaVariance_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testDelVar(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_DelVar_method(self):
self.tester = \
DeltaVariance(dataset1["integrated_intensity"][0],
dataset1["integrated_intensity"][1],
dataset1["integrated_intensity_error"][0])
self.tester.run()
assert np.allclose(self.tester.delta_var, computed_data['delvar_val'])
def test_DelVar_distance(self):
self.tester_dist = \
DeltaVariance_Distance(dataset1["integrated_intensity"],
dataset2["integrated_intensity"],
weights1=dataset1["integrated_intensity_error"][0],
weights2=dataset2["integrated_intensity_error"][0])
self.tester_dist.distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['delvar_distance'])
| mit | Python |
57084fd911c5cef48da11d64b71763d8f922c038 | Check final memory usage, not peak memory usage, in Repeat_Load test | Zorro666/renderdoc,TurtleRockStudios/renderdoc_public,Zorro666/renderdoc,Zorro666/renderdoc,TurtleRockStudios/renderdoc_public,baldurk/renderdoc,Zorro666/renderdoc,baldurk/renderdoc,baldurk/renderdoc,baldurk/renderdoc,baldurk/renderdoc,TurtleRockStudios/renderdoc_public,baldurk/renderdoc,Zorro666/renderdoc,TurtleRockStudios/renderdoc_public,TurtleRockStudios/renderdoc_public,TurtleRockStudios/renderdoc_public,Zorro666/renderdoc | util/test/tests/Repeat_Load.py | util/test/tests/Repeat_Load.py | import rdtest
import os
import renderdoc as rd
class Repeat_Load(rdtest.TestCase):
slow_test = True
def repeat_load(self, path):
memory_usage = memory_baseline = 0
for i in range(20):
rdtest.log.print("Loading for iteration {}".format(i+1))
try:
controller = rdtest.open_capture(path)
except RuntimeError as err:
rdtest.log.print("Skipping. Can't open {}: {}".format(path, err))
return
# Do nothing, just ensure it's loaded
memory_usage: int = rd.GetCurrentProcessMemoryUsage()
# We measure the baseline memory usage during the second peak to avoid any persistent caches etc that might
# not be full
if i == 1:
memory_baseline = memory_usage
controller.Shutdown()
pct_over = 'N/A'
if memory_baseline > 0:
pct_over = '{:.2f}%'.format((memory_usage / memory_baseline)*100)
rdtest.log.success("Succeeded iteration {}, memory usage was {} ({} of baseline)"
.format(i+1, memory_usage, pct_over))
pct_over = '{:.2f}%'.format((memory_usage / memory_baseline)*100)
msg = 'final memory usage was {}, {} compared to baseline {}'.format(memory_usage, pct_over, memory_baseline)
if memory_baseline * 1.25 < memory_usage:
raise rdtest.TestFailureException(msg)
else:
rdtest.log.success(msg)
def run(self):
dir_path = self.get_ref_path('', extra=True)
for file in os.scandir(dir_path):
rdtest.log.print('Repeat loading {}'.format(file.name))
self.repeat_load(file.path)
rdtest.log.success("Successfully repeat loaded {}".format(file.name))
rdtest.log.success("Repeat loaded all files")
| import rdtest
import os
import renderdoc as rd
class Repeat_Load(rdtest.TestCase):
slow_test = True
def repeat_load(self, path):
memory_peak = memory_baseline = 0
for i in range(20):
rdtest.log.print("Loading for iteration {}".format(i))
try:
controller = rdtest.open_capture(path)
except RuntimeError as err:
rdtest.log.print("Skipping. Can't open {}: {}".format(path, err))
return
# Do nothing, just ensure it's loaded
memory_usage: int = rd.GetCurrentProcessMemoryUsage()
# We measure the baseline memory usage during the second peak to avoid any persistent caches etc that might
# not be full
if i == 1:
memory_baseline = memory_usage
memory_peak = max(memory_peak, memory_usage)
controller.Shutdown()
pct_over = 'N/A'
if memory_baseline > 0:
pct_over = '{:.2f}%'.format((memory_usage / memory_baseline)*100)
rdtest.log.success("Succeeded iteration {}, memory usage was {} ({} of baseline)"
.format(i, memory_usage, pct_over))
pct_over = '{:.2f}%'.format((memory_peak / memory_baseline)*100)
msg = 'peak memory usage was {}, {} compared to baseline {}'.format(memory_peak, pct_over, memory_baseline)
if memory_baseline * 1.25 < memory_peak:
raise rdtest.TestFailureException(msg)
else:
rdtest.log.success(msg)
def run(self):
dir_path = self.get_ref_path('', extra=True)
for file in os.scandir(dir_path):
rdtest.log.print('Repeat loading {}'.format(file.name))
self.repeat_load(file.path)
rdtest.log.success("Successfully repeat loaded {}".format(file.name))
rdtest.log.success("Repeat loaded all files")
| mit | Python |
77f410d03d4d182fd7210626ba804767768e3583 | Update C_Salinity_Vertical_sections.py | Herpinemmanuel/Oceanography | Cas_1/Salinity/C_Salinity_Vertical_sections.py | Cas_1/Salinity/C_Salinity_Vertical_sections.py | import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
plt.ion()
dir1 = '/homedata/bderembl/runmit/test_southatlgyre'
ds1 = open_mdsdataset(dir1,iters='all',prefix=['S'])
Height = ds1.S.Z
print(Height)
nx = len(ds1.S.XC)/2
print(nx)
ny = len(ds1.S.YC)/2
print(ny)
nt = -1 #Last Iteration
# Vertical Section of Salinity
plt.figure(1)
ds1['S'].where(ds1.hFacC>0)[nt,:,210,:].plot()
plt.title('Case 1 : Salinity (t=-1 ; YC = 30S)')
plt.savefig('S_Salinity_Vertical_section_xz_cas1'+'.png')
plt.clf()
plt.figure(2)
ds1['S'].where(ds1.hFacC>0)[nt,:,:,560].plot()
plt.title('Case 1 : Salinity (t=-1 ; XC = 0E)')
plt.savefig('S_Salinity_Vertical_section_yz_cas1'+'.png')
plt.clf()
| import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
plt.ion()
dir1 = '/homedata/bderembl/runmit/test_southatlgyre'
ds1 = open_mdsdataset(dir1,iters='all',prefix=['S'])
Height = ds1.S.Z
print(Height)
nx = len(ds1.S.XC)/2
print(nx)
ny = len(ds1.S.YC)/2
print(ny)
nt = -1 #Last Iteration
# Vertical Section of Salinity
plt.figure(1)
ds1['S'].where(ds1.hFacC>0)[nt,:,100,:].plot()
plt.title('Case 1 : Salinity (t=-1 ; YC = 30S)')
plt.savefig('S_Salinity_Vertical_section_xz_cas1'+'.png')
plt.clf()
plt.figure(2)
ds1['S'].where(ds1.hFacC>0)[nt,:,:,280].plot()
plt.title('Case 1 : Salinity (t=-1 ; XC = 0E)')
plt.savefig('S_Salinity_Vertical_section_yz_cas1'+'.png')
plt.clf()
| mit | Python |
2f76a3ef654a56f1f881c266da96f4fa72703d3d | fix extending command regardless of platform | alfredodeza/ceph-doctor | ceph_medic/util/hosts.py | ceph_medic/util/hosts.py | import json
from ceph_medic import config, terminal
from remoto import connection, process
def _platform_options(platform):
try:
namespace = config.file.get_safe(platform, 'namespace', 'rook-ceph')
context = config.file.get_safe(platform, 'context', None)
except RuntimeError:
namespace = 'rook-ceph'
context = None
return {'namespace': namespace, 'context': context}
def container_platform(platform='openshift'):
"""
Connect to a container platform (kubernetes or openshift), retrieve all the
available pods that match the namespace (defaults to 'rook-ceph'), and
return a dictionary including them, regardless of state.
"""
local_conn = connection.get('local')()
options = _platform_options(platform)
context = options.get('context')
namespace = options.get('namespace')
executable = 'oc' if platform == 'openshift' else 'kubectl'
if context:
cmd = [executable, '--context', context]
else:
cmd = [executable]
cmd.extend(['--request-timeout=5', 'get', '-n', namespace, 'pods', '-o', 'json'])
out, err, code = process.check(local_conn, cmd)
if code:
terminal.error('Unable to retrieve the pods using command: %s' % ' '.join(cmd))
raise SystemExit('\n'.join(err))
pods = json.loads(''.join(out))
base_inventory = {
'rgws': [], 'mgrs': [], 'mdss': [], 'clients': [], 'osds': [], 'mons': []
}
label_map = {
'rook-ceph-mgr': 'mgrs',
'rook-ceph-mon': 'mons',
'rook-ceph-osd': 'osds',
'rook-ceph-mds': 'mdss',
'rook-ceph-rgw': 'rgws',
'rook-ceph-client': 'clients',
}
for item in pods.get('items', {}):
label_name = item['metadata'].get('labels', {}).get('app')
if not label_name:
continue
if label_name in label_map:
inventory_key = label_map[label_name]
base_inventory[inventory_key].append(
{'host': item['metadata']['name'], 'group': None}
)
for key, value in dict(base_inventory).items():
if not value:
base_inventory.pop(key)
return base_inventory
| import json
from ceph_medic import config, terminal
from remoto import connection, process
def _platform_options(platform):
namespace = config.file.get_safe(platform, 'namespace', 'rook-ceph')
context = config.file.get_safe(platform, 'context', None)
return {'namespace': namespace, 'context': context}
def container_platform(platform='openshift'):
"""
Connect to a container platform (kubernetes or openshift), retrieve all the
available pods that match the namespace (defaults to 'rook-ceph'), and
return a dictionary including them, regardless of state.
"""
local_conn = connection.get('local')()
options = _platform_options(platform)
context = options.get('context')
namespace = options.get('namespace')
executable = 'oc' if platform == 'openshift' else 'kubectl'
if context:
cmd = [executable, '--context', context]
else:
cmd = [executable]
cmd.extend(['--request-timeout=5', 'get', '-n', namespace, 'pods', '-o', 'json'])
out, err, code = process.check(local_conn, cmd)
if code:
terminal.error('Unable to retrieve the pods using command: %s' % ' '.join(cmd))
raise SystemExit('\n'.join(err))
pods = json.loads(''.join(out))
base_inventory = {
'rgws': [], 'mgrs': [], 'mdss': [], 'clients': [], 'osds': [], 'mons': []
}
label_map = {
'rook-ceph-mgr': 'mgrs',
'rook-ceph-mon': 'mons',
'rook-ceph-osd': 'osds',
'rook-ceph-mds': 'mdss',
'rook-ceph-rgw': 'rgws',
'rook-ceph-client': 'clients',
}
for item in pods['items']:
label_name = item['metadata'].get('labels', {}).get('app')
if not label_name:
continue
if label_name in label_map:
inventory_key = label_map[label_name]
base_inventory[inventory_key].append(
{'host': item['metadata']['name'], 'group': None}
)
for key, value in dict(base_inventory).items():
if not value:
base_inventory.pop(key)
return base_inventory
| mit | Python |
e90211ac7a17118a085ab2cfdf621569ee7eab5a | Fix import path | freeekanayaka/charm-test,freeekanayaka/charmfixture | charmfixtures/testing.py | charmfixtures/testing.py | from testtools import (
TestCase,
try_import,
)
from charmfixtures.filesystem import Filesystem
from charmfixtures.users import Users
from charmfixtures.groups import Groups
from charmfixtures.hooktools.fixture import HookTools
hookenv = try_import("charmhelpers.core.hookenv")
class CharmTest(TestCase):
def setUp(self):
super().setUp()
self.filesystem = self.useFixture(Filesystem())
self.users = self.useFixture(Users())
self.groups = self.useFixture(Groups())
self.hooktools = self.useFixture(HookTools())
# If charmhelpers is around, clear its config cache
hookenv and hookenv.cache.clear()
| from testtools import (
TestCase,
try_import,
)
from charmfixtures.filesystem import Filesystem
from charmfixtures.users import Users
from charmfixtures.groups import Groups
from charmfixtures.hooktools.fixture import HookTools
hookenv = try_import("from charmhelpers.core.hookenv")
class CharmTest(TestCase):
def setUp(self):
super().setUp()
self.filesystem = self.useFixture(Filesystem())
self.users = self.useFixture(Users())
self.groups = self.useFixture(Groups())
self.hooktools = self.useFixture(HookTools())
# If charmhelpers is around, clear its config cache
hookenv and hookenv.cache.clear()
| agpl-3.0 | Python |
cdd2e19aff6fcd86496c35bf39ac1f42d3139c31 | Fix test volume of interest to match data extents | keithroe/vtkoptix,sankhesh/VTK,candy7393/VTK,sumedhasingla/VTK,SimVascular/VTK,msmolens/VTK,sankhesh/VTK,demarle/VTK,gram526/VTK,sankhesh/VTK,candy7393/VTK,keithroe/vtkoptix,demarle/VTK,gram526/VTK,jmerkow/VTK,jmerkow/VTK,SimVascular/VTK,gram526/VTK,keithroe/vtkoptix,jmerkow/VTK,msmolens/VTK,SimVascular/VTK,candy7393/VTK,demarle/VTK,msmolens/VTK,keithroe/vtkoptix,candy7393/VTK,jmerkow/VTK,mspark93/VTK,demarle/VTK,mspark93/VTK,candy7393/VTK,SimVascular/VTK,sumedhasingla/VTK,candy7393/VTK,jmerkow/VTK,demarle/VTK,SimVascular/VTK,msmolens/VTK,jmerkow/VTK,sumedhasingla/VTK,mspark93/VTK,jmerkow/VTK,SimVascular/VTK,sumedhasingla/VTK,SimVascular/VTK,gram526/VTK,sankhesh/VTK,sumedhasingla/VTK,msmolens/VTK,keithroe/vtkoptix,sumedhasingla/VTK,gram526/VTK,mspark93/VTK,sankhesh/VTK,demarle/VTK,demarle/VTK,keithroe/vtkoptix,keithroe/vtkoptix,mspark93/VTK,candy7393/VTK,sankhesh/VTK,sankhesh/VTK,sankhesh/VTK,mspark93/VTK,demarle/VTK,candy7393/VTK,gram526/VTK,sumedhasingla/VTK,gram526/VTK,mspark93/VTK,msmolens/VTK,SimVascular/VTK,gram526/VTK,msmolens/VTK,sumedhasingla/VTK,jmerkow/VTK,msmolens/VTK,keithroe/vtkoptix,mspark93/VTK | Imaging/Core/Testing/Python/resampledTexture.py | Imaging/Core/Testing/Python/resampledTexture.py | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Demonstrate automatic resampling of textures (i.e., OpenGL only handles
# power of two texture maps. This examples exercise's vtk's automatic
# power of two resampling).
#
# get the interactor ui
# create pipeline
#
# generate texture map (not power of two)
v16 = vtk.vtkVolume16Reader()
v16.SetDataDimensions(64,64)
v16.GetOutput().SetOrigin(0.0,0.0,0.0)
v16.SetDataByteOrderToLittleEndian()
v16.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
v16.SetImageRange(1,93)
v16.SetDataSpacing(3.2,3.2,1.5)
extract = vtk.vtkExtractVOI()
extract.SetInputConnection(v16.GetOutputPort())
extract.SetVOI(32,32,0,63,0,92)
atext = vtk.vtkTexture()
atext.SetInputConnection(extract.GetOutputPort())
atext.InterpolateOn()
# gnerate plane to map texture on to
plane = vtk.vtkPlaneSource()
plane.SetXResolution(1)
plane.SetYResolution(1)
textureMapper = vtk.vtkPolyDataMapper()
textureMapper.SetInputConnection(plane.GetOutputPort())
textureActor = vtk.vtkActor()
textureActor.SetMapper(textureMapper)
textureActor.SetTexture(atext)
# Create the RenderWindow, Renderer
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(textureActor)
renWin.SetSize(250,250)
ren1.SetBackground(0.1,0.2,0.4)
iren.Initialize()
# prevent the tk window from showing up then start the event loop
# --- end of script --
| #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Demonstrate automatic resampling of textures (i.e., OpenGL only handles
# power of two texture maps. This examples exercise's vtk's automatic
# power of two resampling).
#
# get the interactor ui
# create pipeline
#
# generate texture map (not power of two)
v16 = vtk.vtkVolume16Reader()
v16.SetDataDimensions(64,64)
v16.GetOutput().SetOrigin(0.0,0.0,0.0)
v16.SetDataByteOrderToLittleEndian()
v16.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter")
v16.SetImageRange(1,93)
v16.SetDataSpacing(3.2,3.2,1.5)
extract = vtk.vtkExtractVOI()
extract.SetInputConnection(v16.GetOutputPort())
extract.SetVOI(32,32,0,63,0,93)
atext = vtk.vtkTexture()
atext.SetInputConnection(extract.GetOutputPort())
atext.InterpolateOn()
# gnerate plane to map texture on to
plane = vtk.vtkPlaneSource()
plane.SetXResolution(1)
plane.SetYResolution(1)
textureMapper = vtk.vtkPolyDataMapper()
textureMapper.SetInputConnection(plane.GetOutputPort())
textureActor = vtk.vtkActor()
textureActor.SetMapper(textureMapper)
textureActor.SetTexture(atext)
# Create the RenderWindow, Renderer
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(textureActor)
renWin.SetSize(250,250)
ren1.SetBackground(0.1,0.2,0.4)
iren.Initialize()
# prevent the tk window from showing up then start the event loop
# --- end of script --
| bsd-3-clause | Python |
7240c24933e712fe98c8cb06670e66f470205e4c | Add start and end options as well as days back for harvesting | zamattiac/SHARE,laurenbarker/SHARE,aaxelb/SHARE,laurenbarker/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,aaxelb/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,aaxelb/SHARE | share/management/commands/harvest.py | share/management/commands/harvest.py | import arrow
import datetime
from django.apps import apps
from django.core.management.base import BaseCommand
from django.conf import settings
from share.models import ShareUser
from share.tasks import HarvesterTask
from share.provider import ProviderAppConfig
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--all', action='store_true', help='Run all harvester')
parser.add_argument('harvester', nargs='*', type=str, help='The name of the harvester to run')
parser.add_argument('--async', action='store_true', help='Whether or not to use Celery')
parser.add_argument('--days-back', type=int, help='The number of days to go back, defaults to 1')
parser.add_argument('--start', type=str, help='The day to start harvesting, in the format YYYY-MM-DD')
parser.add_argument('--end', type=str, help='The day to end harvesting, in the format YYYY-MM-DD')
def handle(self, *args, **options):
user = ShareUser.objects.get(username=settings.APPLICATION_USERNAME)
task_kwargs = {}
if options['days_back'] and (options['start'] or options['end']):
self.stdout.write('Please choose days-back OR a start date with end date, not both')
return
if options['days_back']:
task_kwargs['end'] = (datetime.datetime.utcnow() + datetime.timedelta(days=-(options['days_back'] - 1))).isoformat() + 'Z'
task_kwargs['start'] = (datetime.datetime.utcnow() + datetime.timedelta(days=-options['days_back'])).isoformat() + 'Z'
else:
task_kwargs['start'] = arrow.get(options['start']) if options.get('start') else arrow.utcnow() - datetime.timedelta(days=int(options.get('days_back', 1)))
task_kwargs['end'] = arrow.get(options['end']) if options.get('end') else arrow.utcnow()
if not options['harvester'] and options['all']:
options['harvester'] = [x.label for x in apps.get_app_configs() if isinstance(x, ProviderAppConfig)]
for harvester in options['harvester']:
apps.get_app_config(harvester) # Die if the AppConfig can not be loaded
task_args = (harvester, user.id,)
if options['async']:
HarvesterTask().apply_async(task_args, task_kwargs)
self.stdout.write('Started job for harvester {}'.format(harvester))
else:
self.stdout.write('Running harvester for {}'.format(harvester))
HarvesterTask().apply(task_args, task_kwargs, throw=True)
| import datetime
from django.apps import apps
from django.core.management.base import BaseCommand
from django.conf import settings
from share.models import ShareUser
from share.tasks import HarvesterTask
from share.provider import ProviderAppConfig
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--all', action='store_true', help='Run all harvester')
parser.add_argument('harvester', nargs='*', type=str, help='The name of the harvester to run')
parser.add_argument('--async', action='store_true', help='Whether or not to use Celery')
parser.add_argument('--days-back', type=int, help='The number of days to go back')
def handle(self, *args, **options):
user = ShareUser.objects.get(username=settings.APPLICATION_USERNAME)
task_kwargs = {}
if options['days_back']:
task_kwargs['end'] = (datetime.datetime.utcnow() + datetime.timedelta(days=-(options['days_back'] - 1))).isoformat() + 'Z'
task_kwargs['start'] = (datetime.datetime.utcnow() + datetime.timedelta(days=-options['days_back'])).isoformat() + 'Z'
if not options['harvester'] and options['all']:
options['harvester'] = [x.label for x in apps.get_app_configs() if isinstance(x, ProviderAppConfig)]
for harvester in options['harvester']:
apps.get_app_config(harvester) # Die if the AppConfig can not be loaded
task_args = (harvester, user.id,)
if options['async']:
HarvesterTask().apply_async(task_args, task_kwargs)
self.stdout.write('Started job for harvester {}'.format(harvester))
else:
self.stdout.write('Running harvester for {}'.format(harvester))
HarvesterTask().apply(task_args, task_kwargs, throw=True)
| apache-2.0 | Python |
00ad8303d7b1dcdba9eebfd95ea4ef49662023a2 | Remove test_tvnamer from runtest [#85] | Collisionc/sickbeard_mp4_automator,Filechaser/sickbeard_mp4_automator,dpimenov/tvdb_api,phtagn/sickbeard_mp4_automator,Collisionc/sickbeard_mp4_automator,dbr/tvdb_api,Filechaser/sickbeard_mp4_automator,dpimenov/tvdb_api,phtagn/sickbeard_mp4_automator | tests/runtests.py | tests/runtests.py | #!/usr/bin/env python
#encoding:utf-8
#author:dbr/Ben
#project:tvdb_api
#repository:http://github.com/dbr/tvdb_api
#license:unlicense (http://unlicense.org/)
import sys
import unittest
import test_tvdb_api
def main():
suite = unittest.TestSuite([
unittest.TestLoader().loadTestsFromModule(test_tvdb_api)
])
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(suite)
if result.wasSuccessful():
return 0
else:
return 1
if __name__ == '__main__':
sys.exit(
int(main())
)
| #!/usr/bin/env python
#encoding:utf-8
#author:dbr/Ben
#project:tvdb_api
#repository:http://github.com/dbr/tvdb_api
#license:unlicense (http://unlicense.org/)
import sys
import unittest
import test_tvdb_api
import test_tvnamer
def main():
suite = unittest.TestSuite([
unittest.TestLoader().loadTestsFromModule(test_tvnamer),
unittest.TestLoader().loadTestsFromModule(test_tvdb_api)
])
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(suite)
if result.wasSuccessful():
return 0
else:
return 1
if __name__ == '__main__':
sys.exit(
int(main())
)
| mit | Python |
c9da0810109479f32c746eaec8e8520448ea9ad1 | Bump version | Motiejus/django-webtopay,Motiejus/django-webtopay | webtopay/__init__.py | webtopay/__init__.py | __version__ = (1, 0, 1)
| __version__ = (1, 0, 0)
| mit | Python |
385938c0014c0dffcb8524ef99b374e15af99a34 | handle local imports more effectively; add initial stub XML classification document test | EsriOceans/btm | tests/testMain.py | tests/testMain.py | import os
import unittest
import numpy
import arcpy
from utils import *
# import our constants;
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
from config import *
# import our local directory so we can use the internal modules
import_paths = ['../Install/toolbox', '../Install']
addLocalPaths(import_paths)
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
class TestBtmDocument(unittest.TestCase):
# XXX this won't automatically get the right thing... how can we fix it?
import utils
def testXMLDocumentExists(self):
self.assertTrue(os.path.exists(xml_doc))
if __name__ == '__main__':
unittest.main()
| import os
import sys
import unittest
import numpy
import arcpy
# import our local directory so we can use the internal modules
local_path = os.path.dirname(__file__)
scripts_path = os.path.join(local_path, '..', 'Install/toolbox')
abs_path = os.path.abspath(scripts_path)
sys.path.insert(0, abs_path)
from scripts import *
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
if __name__ == '__main__':
unittest.main()
| mpl-2.0 | Python |
83bf1a8b682c1c6595bfe82749aac1b1077dc07b | fix style | jendrikseipp/vulture,jendrikseipp/vulture | whitelists/stdlib.py | whitelists/stdlib.py | """
Vulture sometimes reports used code as unused. To avoid these
false-positives, you can write a Python file that explicitly uses the
code and pass it to vulture:
vulture myscript.py mydir mywhitelist.py
This file explicitly uses code from the Python standard library that is
often incorrectly detected as unused.
"""
import ast
import collections
# NodeVisitor methods are called implicitly.
class WhitelistNodeVisitor(ast.NodeVisitor):
def __getattr__(self, attr):
pass
whitelist_node_visitor = WhitelistNodeVisitor()
# TODO: Add missing methods.
whitelist_node_visitor.visit_arg
whitelist_node_visitor.visit_alias
whitelist_node_visitor.visit_Assign
whitelist_node_visitor.visit_Attribute
whitelist_node_visitor.visit_ClassDef
whitelist_node_visitor.visit_comprehension
whitelist_node_visitor.visit_For
whitelist_node_visitor.visit_FunctionDef
whitelist_node_visitor.visit_Import
whitelist_node_visitor.visit_ImportFrom
whitelist_node_visitor.visit_Name
whitelist_node_visitor.visit_Str
# To free memory, the "default_factory" attribute can be set to None.
collections.defaultdict(list).default_factory = None
collections.defaultdict(list).default_factory
| """
Vulture sometimes reports used code as unused. To avoid these
false-positives, you can write a Python file that explicitly uses the
code and pass it to vulture:
vulture myscript.py mydir mywhitelist.py
This file explicitly uses code from the Python standard library that is
often incorrectly detected as unused.
"""
import ast
import collections
# NodeVisitor methods are called implicitly.
class WhitelistNodeVisitor(ast.NodeVisitor):
def __getattr__(self, attr):
pass
whitelist_node_visitor = WhitelistNodeVisitor()
# TODO: Add missing methods.
whitelist_node_visitor.visit_arg
whitelist_node_visitor.visit_alias
whitelist_node_visitor.visit_Assign
whitelist_node_visitor.visit_Attribute
whitelist_node_visitor.visit_ClassDef
whitelist_node_visitor.visit_comprehension
whitelist_node_visitor.visit_For
whitelist_node_visitor.visit_FunctionDef
whitelist_node_visitor.visit_Import
whitelist_node_visitor.visit_ImportFrom
whitelist_node_visitor.visit_Name
whitelist_node_visitor.visit_Str
# To free memory, the "default_factory" attribute can be set to None.
collections.defaultdict(list).default_factory = None
collections.defaultdict(list).default_factory
| mit | Python |
8fe19b02e565da38cd965174e21b80b229cae0a4 | test added. module must has version | oriontvv/pyaspeller | tests/test_cli.py | tests/test_cli.py | from __future__ import print_function
import unittest
import pyaspeller
class TestCLI(unittest.TestCase):
def setUp(self):
print("setUp")
pyaspeller.main()
def tearDown(self):
print("tearDown")
def test_pyaspeller_has_version(self):
self.assertTrue(hasattr(pyaspeller, '__version__'),
"Module pyaspeller must have version")
def test_simple(self):
self.assertTrue(2 * 2 == 4, "simple")
def test_simple2(self):
self.assertFalse(2 * 2 == 5, "simple2")
| from __future__ import print_function
import unittest
import pyaspeller
class TestCLI(unittest.TestCase):
def setUp(self):
print("setUp")
pyaspeller.main()
def tearDown(self):
print("tearDown")
def test_simple(self):
self.assertTrue(2 * 2 == 4, "simple")
def test_simple2(self):
self.assertFalse(2 * 2 == 5, "simple2")
| apache-2.0 | Python |
44cb01a4ed7f1fcae4cd76c367a479b389e59418 | Update test_toy.py | iminuit/probfit,iminuit/probfit | tests/test_toy.py | tests/test_toy.py | import numpy as np
import matplotlib
matplotlib.use('Agg', warn=False)
from probfit.nputil import mid
from probfit.pdf import crystalball, gaussian, doublecrystalball
from probfit.functor import Normalized
from probfit.toy import gen_toy
from probfit._libstat import compute_chi2
from probfit.nputil import vector_apply
from probfit.costfunc import BinnedLH
def test_gen_toy():
np.random.seed(0)
bound = (-1, 2)
ntoy = 100000
toy = gen_toy(crystalball, ntoy, bound=bound,
alpha=1., n=2., mean=1., sigma=0.3, quiet=False)
assert len(toy) == ntoy
htoy, bins = np.histogram(toy, bins=1000, range=bound)
ncball = Normalized(crystalball, bound)
f = lambda x: ncball(x, 1., 2., 1., 0.3)
expected = vector_apply(f, mid(bins)) * ntoy * (bins[1] - bins[0])
htoy = htoy * 1.0
err = np.sqrt(expected)
chi2 = compute_chi2(htoy, expected, err)
print(chi2, len(bins), chi2 / len(bins))
assert (0.9 < (chi2 / len(bins)) < 1.1)
def test_gen_toy3():
np.random.seed(0)
bound = (-1, 2)
ntoy = 100000
toy = gen_toy(doublecrystalball, ntoy, bound=bound,
alpha=1., alpha2=1., n=2.,n2=2., mean=1., sigma=0.3, quiet=False)
assert len(toy) == ntoy
htoy, bins = np.histogram(toy, bins=1000, range=bound)
ncball = Normalized(doublecrystalball, bound)
f = lambda x: ncball(x, 1., 1., 2., 2., 1., 0.3)
expected = vector_apply(f, mid(bins)) * ntoy * (bins[1] - bins[0])
htoy = htoy * 1.0
err = np.sqrt(expected)
chi2 = compute_chi2(htoy, expected, err)
print(chi2, len(bins), chi2 / len(bins))
assert (0.9 < (chi2 / len(bins)) < 1.1)
def test_gen_toy2():
pdf = gaussian
np.random.seed(0)
toy = gen_toy(pdf, 10000, (-5, 5), mean=0, sigma=1)
binlh = BinnedLH(pdf, toy, bound=(-5, 5), bins=100)
lh = binlh(0., 1.)
for x in toy:
assert (x < 5)
assert (x >= -5)
assert len(toy) == 10000
assert lh / 100. < 1.
| import numpy as np
import matplotlib
matplotlib.use('Agg', warn=False)
from probfit.nputil import mid
from probfit.pdf import crystalball, gaussian
from probfit.functor import Normalized
from probfit.toy import gen_toy
from probfit._libstat import compute_chi2
from probfit.nputil import vector_apply
from probfit.costfunc import BinnedLH
def test_gen_toy():
np.random.seed(0)
bound = (-1, 2)
ntoy = 100000
toy = gen_toy(crystalball, ntoy, bound=bound,
alpha=1., n=2., mean=1., sigma=0.3, quiet=False)
assert len(toy) == ntoy
htoy, bins = np.histogram(toy, bins=1000, range=bound)
ncball = Normalized(crystalball, bound)
f = lambda x: ncball(x, 1., 2., 1., 0.3)
expected = vector_apply(f, mid(bins)) * ntoy * (bins[1] - bins[0])
htoy = htoy * 1.0
err = np.sqrt(expected)
chi2 = compute_chi2(htoy, expected, err)
print(chi2, len(bins), chi2 / len(bins))
assert (0.9 < (chi2 / len(bins)) < 1.1)
def test_gen_toy2():
pdf = gaussian
np.random.seed(0)
toy = gen_toy(pdf, 10000, (-5, 5), mean=0, sigma=1)
binlh = BinnedLH(pdf, toy, bound=(-5, 5), bins=100)
lh = binlh(0., 1.)
for x in toy:
assert (x < 5)
assert (x >= -5)
assert len(toy) == 10000
assert lh / 100. < 1.
| mit | Python |
03b2aff24014d8860e308486d7fc0256d3e903f3 | change item status | pbl-cloud/paas-manager,pbl-cloud/paas-manager,pbl-cloud/paas-manager | paas_manager/app/app.py | paas_manager/app/app.py | from flask import Flask, render_template, request
from werkzeug import secure_filename
app = Flask(__name__)
class Item:
def __init__(item, name, filename, status):
item.name = name
item.filename = filename
item.status = status
items = []
@app.route("/")
def view_index():
return render_template("index.html", items=items)
@app.route('/upload', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
f = request.files['jarfile']
f.save('/tmp/uploads/' + secure_filename(f.filename))
items.append( Item(request.form['username'], f.filename, 'waiting') )
return render_template("index.html", items=items)
if __name__ == '__main__':
app.run(debug=True)
| from flask import Flask, render_template, request
from werkzeug import secure_filename
app = Flask(__name__)
class Item:
def __init__(item, name, filename, status):
item.name = name
item.filename = filename
item.status = status
items = []
@app.route("/")
def view_index():
return render_template("index.html", items=items)
@app.route('/upload', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
f = request.files['jarfile']
f.save('/tmp/uploads/' + secure_filename(f.filename))
items.append( Item(request.form['username'], f.filename, 'uploaded') )
return render_template("index.html", items=items)
if __name__ == '__main__':
app.run(debug=True)
| mit | Python |
789ac1de1e94eda1224fb314ccad14c061c58ad4 | Create empty PactGroup if no arguments given | vmalloc/pact | pact/group.py | pact/group.py | from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts=None):
self._pacts = [] if pacts is None else list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| from .base import PactBase
from .utils import GroupWaitPredicate
class PactGroup(PactBase):
def __init__(self, pacts):
self._pacts = list(pacts)
super(PactGroup, self).__init__()
def __iadd__(self, other):
self._pacts.append(other)
return self
def _is_finished(self):
return all(p.finished() for p in self._pacts)
def _build_wait_predicate(self):
return GroupWaitPredicate(self._pacts)
def __str__(self):
return ", ".join(map(str, self._pacts))
| bsd-3-clause | Python |
198d399f0339b1a1b856721cabb3f4c66c203b92 | fix bug | myyyy/wechatserver | wechatclient/clinet.py | wechatclient/clinet.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
import os
import json
import tornado
import tornado.web
import tornado.ioloop
from tornado import gen
from tornado.gen import coroutine
from wechatpy import WeChatClient
from wechatpy.parser import parse_message
import json
from wechatpy.utils import check_signature
from wechatpy.exceptions import InvalidSignatureException
import sys
from storage.machine import Machine
from storage.robot import TuLingRobot
from wechatpy.replies import TextReply
from wechatpy import create_reply
reload(sys)
sys.setdefaultencoding("utf-8")
TOKEN = '123456'
APPID = 'wxecb5391ec8a58227'
SECRET = 'fa32576b9daa6fd020c0104e6092196a'
# OPENID = 'ozJS1syaqn5ztglMsr8ceH8o2zCQ'
class IndexHandler(tornado.web.RequestHandler):
def get(self):
signature = self.get_argument('signature', '')
timestamp = self.get_argument('timestamp', '')
nonce = self.get_argument('nonce', '')
client = WeChatClient(APPID, SECRET)
print (client)
try:
check_signature(TOKEN, signature, timestamp, nonce)
except InvalidSignatureException as e:
self.write(str(e))
def post(self):
xml = self.request.body
msg = parse_message(xml)
if msg.content in 'status':
data = Machine().fast_data
reply = TextReply(content=data, message=msg)
reply = create_reply(data, message=msg)
_reply = reply.render()
self.write(_reply)
else:
robot = TuLingRobot(msg.content)
reply = TextReply(content=robot.reply, message=msg)
_reply = reply.render()
self.write(_reply)
if __name__ == '__main__':
app = tornado.web.Application(
handlers=[
(r'/wechat/', IndexHandler),
],
debug=True,
)
app.listen(1121)
print('server start on 127.0.0.1:1121')
tornado.ioloop.IOLoop.instance().start()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals, print_function
import os
import json
import tornado
import tornado.web
import tornado.ioloop
from tornado import gen
from tornado.gen import coroutine
from wechatpy import WeChatClient
from wechatpy.parser import parse_message
import json
from wechatpy.utils import check_signature
from wechatpy.exceptions import InvalidSignatureException
import sys
from storage.machine import Machine
from storage.robot import TuLingRobot
from wechatpy.replies import TextReply
from wechatpy import create_reply
reload(sys)
sys.setdefaultencoding("utf-8")
TOKEN = '123456'
APPID = 'wxecb5391ec8a58227'
SECRET = 'fa32576b9daa6fd020c0104e6092196a'
# OPENID = 'ozJS1syaqn5ztglMsr8ceH8o2zCQ'
class IndexHandler(tornado.web.RequestHandler):
def get(self):
signature = self.get_argument('signature', '')
timestamp = self.get_argument('timestamp', '')
nonce = self.get_argument('nonce', '')
client = WeChatClient(APPID, SECRET)
print (client)
try:
check_signature(TOKEN, signature, timestamp, nonce)
except InvalidSignatureException as e:
self.write(str(e))
def post(self):
xml = self.request.body
msg = parse_message(xml)
if msg.content in 'status':
data = Machine().fast_data
reply = TextReply(content=data, message=msg)
_reply = create_reply(data, message=msg)
_reply = reply.render()
self.write(_reply)
else:
robot = TuLingRobot(msg.content)
reply = TextReply(content=robot.reply, message=msg)
_reply = reply.render()
self.write(_reply)
if __name__ == '__main__':
app = tornado.web.Application(
handlers=[
(r'/wechat/', IndexHandler),
],
debug=True,
)
app.listen(1121)
print('server start on 127.0.0.1:1121')
tornado.ioloop.IOLoop.instance().start()
| mit | Python |
c1e1fc49a92b1c608293adad4810df1ff4f08f19 | fix import order | vicalloy/django-lb-workflow,vicalloy/django-lb-workflow,vicalloy/django-lb-workflow | lbworkflow/tests/urls.py | lbworkflow/tests/urls.py | from django.conf.urls import include
from django.conf.urls import url
urlpatterns = [
url(r'^wf/', include('lbworkflow.urls')),
]
| from django.conf.urls import url
from django.conf.urls import include
urlpatterns = [
url(r'^wf/', include('lbworkflow.urls')),
]
| mit | Python |
927bd523d7f42d9fc151b2cf223107f93846a77b | remove utf8 annotations | ResolveWang/WeiboSpider,ResolveWang/WeiboSpider | logger/log.py | logger/log.py | import os
import logging
import logging.config as log_conf
log_dir = os.path.dirname(os.path.dirname(__file__))+'/logs'
if not os.path.exists(log_dir):
os.mkdir(log_dir)
log_path = os.path.join(log_dir, 'weibo.log')
log_config = {
'version': 1.0,
'formatters': {
'detail': {
'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
'datefmt': "%Y-%m-%d %H:%M:%S"
},
'simple': {
'format': '%(name)s - %(levelname)s - %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'detail'
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'maxBytes': 1024 * 1024 * 5,
'backupCount': 10,
'filename': log_path,
'level': 'INFO',
'formatter': 'detail',
'encoding': 'utf-8',
},
},
'loggers': {
'crawler': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
},
'parser': {
'handlers': ['file'],
'level': 'INFO',
},
'other': {
'handlers': ['console', 'file'],
'level': 'INFO',
},
'storage': {
'handlers': ['file'],
'level': 'INFO',
}
}
}
log_conf.dictConfig(log_config)
crawler = logging.getLogger('crawler')
parser = logging.getLogger('parser')
other = logging.getLogger('other')
storage = logging.getLogger('storage') | # coding:utf-8
import os
import logging
import logging.config as log_conf
log_dir = os.path.dirname(os.path.dirname(__file__))+'/logs'
if not os.path.exists(log_dir):
os.mkdir(log_dir)
log_path = os.path.join(log_dir, 'weibo.log')
log_config = {
'version': 1.0,
'formatters': {
'detail': {
'format': '%(asctime)s - %(name)s - %(levelname)s - %(message)s',
'datefmt': "%Y-%m-%d %H:%M:%S"
},
'simple': {
'format': '%(name)s - %(levelname)s - %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'detail'
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'maxBytes': 1024 * 1024 * 5,
'backupCount': 10,
'filename': log_path,
'level': 'INFO',
'formatter': 'detail',
'encoding': 'utf-8',
},
},
'loggers': {
'crawler': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
},
'parser': {
'handlers': ['file'],
'level': 'INFO',
},
'other': {
'handlers': ['console', 'file'],
'level': 'INFO',
},
'storage': {
'handlers': ['file'],
'level': 'INFO',
}
}
}
log_conf.dictConfig(log_config)
crawler = logging.getLogger('crawler')
parser = logging.getLogger('parser')
other = logging.getLogger('other')
storage = logging.getLogger('storage') | mit | Python |
7b40cb4f7e1e01d8b0e63c7310867cd1b7daf48e | update version | arcticfoxnv/slackminion,arcticfoxnv/slackminion | slackminion/plugins/core/__init__.py | slackminion/plugins/core/__init__.py | version = '0.9.11'
| version = '0.9.10'
| mit | Python |
df3a1860ec4debc432ad214a0b88e6461cb30879 | Fix octohatrack.py | glasnt/octohat,LABHR/octohatrack | octohatrack.py | octohatrack.py | import octohatrack
octohatrack.main()
| import octohat
octohat.main()
| bsd-3-clause | Python |
d1ec190f1a4dc84db0540481f2489f1db8421799 | Enable specifying the password in `config.ini` | oemof/oemof.db | oemof_pg/db.py | oemof_pg/db.py | from configparser import NoOptionError as option, NoSectionError as section
from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
pw = keyring.get_password(cfg.get("postGIS", "database"),
cfg.get("postGIS", "username"))
if pw is None:
try: pw = cfg.get("postGIS", "pw")
except option:
print("Unable to find the database password in " +
"the oemof config or keyring." +
"\nExiting.")
exit(-1)
except section:
print("Unable to find the 'postGIS' section in oemof's config." +
"\nExiting.")
exit(-1)
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=pw,
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
| from sqlalchemy import create_engine
import keyring
from . import config as cfg
def connection():
engine = create_engine(
"postgresql+psycopg2://{user}:{passwd}@{host}:{port}/{db}".format(
user=cfg.get("postGIS", "username"),
passwd=keyring.get_password(
cfg.get("postGIS", "database"),
cfg.get("postGIS", "username")),
host=cfg.get("postGIS", "host"),
db=cfg.get("postGIS", "database"),
port=int(cfg.get("postGIS", "port"))))
return engine.connect()
| mit | Python |
2ae5c6cb0a82e15ee224b2772b9390c6caf764af | Remove unused code | stevecshanks/trello-next-actions | nextactions/board.py | nextactions/board.py | from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists',
{'cards': "none"}
)
return [List(self._trello, j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
| from nextactions.list import List
class Board:
def __init__(self, trello, json):
self._trello = trello
self.id = json['id']
self.name = json['name']
# TODO remove me
self.nextActionList = []
def getLists(self):
json = self._trello.get(
'https://api.trello.com/1/boards/' + self.id + '/lists',
{'cards': "none"}
)
return [List(self._trello, j) for j in json]
def getListByName(self, name):
matches = [l for l in self.getLists() if l.name == name]
return matches[0] if len(matches) else None
| mit | Python |
d66c95a6441d9c872819916d80b70f15580580eb | create a command line interface | ioO/nommer | nommer_cli.py | nommer_cli.py | import argparse
from nommer import *
def main():
parser = argparse.ArgumentParser()
parser.add_argument("words", help="list of words separated by coma", type=str)
args = parser.parse_args()
if args.words:
names = process(args.words)
display(names)
def process(words):
word_list = words.split(',')
list_index = get_list_index(word_list)
index_range = find_index_range(list_index)
list_integer = create_list_integer_range(index_range, len(list_index))
index_string = create_possible_index_string(list_index, list_integer)
possible_index = create_possible_index(index_string)
combinations = create_combination(word_list, possible_index)
names = create_name(combinations)
return names
def display(names):
for name in names:
print(name)
if __name__ == '__main__':
main()
| unlicense | Python | |
d50281dcaee29d9879ec88e517a723b5fafe6cdf | fix ics files for outlook 2003 | allink/woodstock | woodstock/views/ics.py | woodstock/views/ics.py | from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from woodstock.models import EventPart
from woodstock.views.decorators import registration_required
from woodstock import settings
import icalendar
import datetime
import random
import hashlib
def _event_part_ics(event_parts):
cal = icalendar.Calendar()
cal.add('prodid', '-//Woodstock//')
cal.add('version', '2.0')
cal.add('method', 'REQUEST')
for event_part in event_parts:
event = icalendar.Event()
event.add('summary', settings.ICS_EVENT_PART_NAME % {'event_name':event_part.event.translation.name, 'part_name':event_part.name})
event.add('dtstart', event_part.date_start)
event.add('dtend', event_part.date_end)
event.add('dtstamp', datetime.datetime.now())
event['uid'] = '%s/%s/woodstock' % (event_part.id, hashlib.md5(str(random.random())).hexdigest()[:10])
event.add('priority', 5)
cal.add_component(event)
response = HttpResponse(cal.as_string(),mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
def event_part_view(request, event_part_id):
event_part = get_object_or_404(EventPart, pk=event_part_id)
ics = _event_part_ics([event_part])
response = HttpResponse(ics,mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
def event_parts_email_view(request, participant, event):
"""
This view is showed using the pennyblack proxy view.
"""
event_parts = EventPart.objects.filter(event=event, attendances__participant=participant)
ics = _event_part_ics(event_parts)
response = HttpResponse(ics,mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
| from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from woodstock.models import EventPart
from woodstock.views.decorators import registration_required
from woodstock import settings
import icalendar
import datetime
import random
import hashlib
def _event_part_ics(event_parts):
cal = icalendar.Calendar()
cal.add('prodid', '-//Woodstock//')
cal.add('version', '2.0')
for event_part in event_parts:
event = icalendar.Event()
event.add('summary', settings.ICS_EVENT_PART_NAME % {'event_name':event_part.event.translation.name, 'part_name':event_part.name})
event.add('dtstart', event_part.date_start)
event.add('dtend', event_part.date_end)
event.add('dtstamp', datetime.datetime.now())
event['uid'] = '%s/%s/woodstock' % (event_part.id, hashlib.md5(str(random.random())).hexdigest()[:10])
event.add('priority', 5)
cal.add_component(event)
response = HttpResponse(cal.as_string(),mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
def event_part_view(request, event_part_id):
event_part = get_object_or_404(EventPart, pk=event_part_id)
ics = _event_part_ics([event_part])
response = HttpResponse(ics,mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
def event_parts_email_view(request, participant, event):
"""
This view is showed using the pennyblack proxy view.
"""
event_parts = EventPart.objects.filter(event=event, attendances__participant=participant)
ics = _event_part_ics(event_parts)
response = HttpResponse(ics,mimetype="text/calendar")
response['Content-Disposition'] = 'attachment; filename=event.ics'
return response
| bsd-3-clause | Python |
33dcaee26c1df5ce690eeb9c2104904bb49043a3 | create classifier for each request | mgp4/prague-transport-2017,mgp4/prague-transport-2017,mgp4/prague-transport-2017 | task2/task.py | task2/task.py | from sklearn import tree
def prepare_classifier(clf, data):
target = []
_input = []
for a in data:
target.append(a['type'])
_input.append((a['noise-level'], a['brake-distance'], a['vibrations']))
clf.fit(_input, target)
def predict(clf, data):
result = []
for a in data:
_input = [(a['noise-level'], a['brake-distance'], a['vibrations'])]
cls = clf.predict(_input)[0]
result.append({'id': a['id'], 'type': cls})
return result
def solve_task(data):
try:
learning_set = data['measurements']
testing_set = data['samples']
except IndexError:
return None # or raise some custom excpetion?
clf = tree.DecisionTreeClassifier()
prepare_classifier(clf, learning_set)
response = predict(clf, testing_set)
return response
| from sklearn import tree
clf = tree.DecisionTreeClassifier()
def prepare_classifier(data):
target = []
_input = []
for a in data:
target.append(a['type'])
_input.append((a['noise-level'], a['brake-distance'], a['vibrations']))
clf.fit(_input, target)
def predict(data):
result = []
for a in data:
_input = ((a['noise-level'], a['brake-distance'], a['vibrations']))
cls = clf.predict(_input)[0]
result.append({'id': a['id'], 'type': cls})
return result
def solve_task(data):
try:
learning_set = data['measurements']
testing_set = data['samples']
except IndexError:
return None # or raise some custom excpetion?
prepare_classifier(learning_set)
response = predict(testing_set)
return response
| mit | Python |
b73e7fb61b08c3322ba0c6796d87ec79f5451a93 | Update exceptions.py | Intelworks/cabby | taxii_client/exceptions.py | taxii_client/exceptions.py |
class ClientException(Exception):
pass
class UnsuccessfulStatusError(ClientException):
def __init__(self, taxii_status, *args, **kwargs):
super(UnsuccessfulStatusError, self).__init__(status_to_message(taxii_status), *args, **kwargs)
self.status = taxii_status.status_type
self.text = taxii_status.to_text()
self.raw = taxii_status
class AmbiguousServicesError(ClientException):
pass
class ServiceNotFoundError(ClientException):
pass
class NoURIProvidedError(ValueError):
pass
def status_to_message(status):
l = [status.status_type]
if status.status_detail:
l += [dict_to_pairs(status.status_detail)]
if status.extended_headers:
l += [dict_to_pairs(status.extended_headers)]
if status.message:
l += [status.message]
return "; ".join( l )
def dict_to_pairs(d):
pairs = []
for k, v in d.items():
pairs.append('%s=%s' % (k, v))
return ", ".join(pairs)
|
class ClientException(Exception):
pass
class UnsuccessfulStatusError(ClientException):
def __init__(self, taxii_status, *args, **kwargs):
super(UnsuccessfulStatusError, self).__init__(status_to_message(taxii_status), *args, **kwargs)
self.status = taxii_status.status_type
self.text = taxii_status.to_text()
self.raw = taxii_status
class AmbiguousServicesError(ClientException):
pass
class ServiceNotFoundError(ClientException):
pass
class NoURIProvidedError(ValueError):
pass
def status_to_message(status):
message = status.status_type
if status.status_detail:
message += '; %s;' % dict_to_pairs(status.status_detail)
if status.extended_headers:
message += '; %s;' % dict_to_pairs(status.extended_headers)
if status.message:
message += '; message=%s' % status.message
return message
def dict_to_pairs(d):
pairs = []
for k, v in d.items():
pairs.append('%s=%s' % (k, v))
return ", ".join(pairs)
| bsd-3-clause | Python |
836002d3d0957ea74e6d456afb5bdfd282377875 | use correct template names | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/utils/extend.py | salt/utils/extend.py | # -*- coding: utf-8 -*-
'''
SaltStack Extend
'''
from __future__ import absolute_import
from datetime import date
import tempfile
from shutil import copytree
import logging
log = logging.getLogger(__name__)
try:
from cookiecutter.main import cookiecutter as cookie
import cookiecutter.prompt as prompt
HAS_COOKIECUTTER = True
except ImportError as ie:
HAS_COOKIECUTTER = False
MODULE_OPTIONS = [
('module', 'Execution module'),
('state', 'State module'),
]
def run(extension=None, name=None, salt_dir='.', merge=False, temp_dir=None):
assert HAS_COOKIECUTTER, "Cookiecutter is not installed, please install using pip or " \
"from https://github.com/audreyr/cookiecutter"
if extension is None:
print('Choose which kind of extension you are developing for SaltStack')
extension_type = 'Extension type'
extension_type = prompt.read_user_choice(extension_type, MODULE_OPTIONS)[0]
else:
assert extension in list(zip(*MODULE_OPTIONS))[0], "Module extension option not valid"
extension_type = extension
if name is None:
print('Enter the short name for the module (e.g. mymodule)')
name = prompt.read_user_variable('Module name', '')
short_description = prompt.read_user_variable('Short description of the module', '')
template_dir = 'templates/{0}'.format(extension_type)
project_name = name
param_dict = {
"full_name": "",
"email": "",
"project_name": project_name,
"repo_name": project_name,
"project_short_description": short_description,
"release_date": date.today().strftime('%Y-%m-%d'),
"year": date.today().strftime('%Y'),
}
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
cookie(template=template_dir,
no_input=True,
extra_context=param_dict,
output_dir=temp_dir)
if not merge:
print('New module stored in {0}'.format(temp_dir))
else:
copytree(temp_dir, salt_dir)
print('New module stored in {0}'.format(salt_dir))
if __name__ == '__main__':
run()
| # -*- coding: utf-8 -*-
'''
SaltStack Extend
'''
from __future__ import absolute_import
from datetime import date
import tempfile
from shutil import copytree
try:
import logging
log = logging.getLogger(__name__)
from cookiecutter.main import cookiecutter as cookie
import cookiecutter.prompt as prompt
HAS_COOKIECUTTER = True
except ImportError as ie:
HAS_COOKIECUTTER = False
MODULE_OPTIONS = [
('module', 'Execution module'),
('state', 'State module'),
]
def run(extension=None, name=None, salt_dir='.', merge=False, temp_dir=None):
assert HAS_COOKIECUTTER, "Cookiecutter is not installed, please install using pip"
if extension is None:
print('Choose which kind of extension you are developing for SaltStack')
extension_type = 'Extension type'
extension_type = prompt.read_user_choice(extension_type, MODULE_OPTIONS)
else:
assert extension in list(zip(MODULE_OPTIONS)[0]), "Module extension option not valid"
extension_type = extension
if name is None:
print('Enter the short name for the module (e.g. mymodule)')
name = prompt.read_user_variable('Module name', '')
short_description = prompt.read_user_variable('Short description of the module', '')
template_dir = 'templates/{0}'.format(extension_type[0])
project_name = name
param_dict = {
"full_name": "",
"email": "",
"project_name": project_name,
"repo_name": project_name,
"project_short_description": short_description,
"release_date": date.today().strftime('%Y-%m-%d'),
"year": date.today().strftime('%Y'),
}
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
cookie(template=template_dir,
no_input=True,
extra_context=param_dict,
output_dir=temp_dir)
if not merge:
print('New module stored in {0}'.format(temp_dir))
else:
copytree(temp_dir, salt_dir)
print('New module stored in {0}'.format(salt_dir))
if __name__ == '__main__':
run()
| apache-2.0 | Python |
3f9685420f55fbfb910744a1d5d9c6aa0f29b78e | Set version to 0.5.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | saltcloud/version.py | saltcloud/version.py | __version_info__ = (0, 5, 0)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (0, 2)
__version__ = '.'.join(map(str, __version_info__))
| apache-2.0 | Python |
2a65ca70cb35983ce2b7d2f921e816780855731b | Test kifparse()/kifserialize() invariance. | pySUMO/pysumo,pySUMO/pysumo | test/lib/parser.py | test/lib/parser.py | """ The PyUnit test framework for the parser. """
import unittest
import subprocess
from tempfile import mkdtemp
from shutil import rmtree
from lib import parser
class wParseTestCase(unittest.TestCase):
def test0Tokenize(self):
line = '10495555 18 n 05 pusher 1 drug_peddler 0 peddler 1 drug_dealer 0 drug_trafficker 0 004 @ 10721470 n 0000 @ 09977660 n 0000 + 02302817 v 0301 + 02245555 v 0101 | an unlicensed dealer in illegal drugs &%Position+'
position = parser._wtokenize(line, parser.Pos.noun).pop()
assert position.sumo_concept == 'Position'
assert position.suffix == '+'
assert position.synset_offset == 2245555
assert position.lex_filenum == 18
assert position.ss_type == parser.SSType.noun
assert position.synset == {1: ('pusher', None, 1), 2: ('drug_peddler', None, 0), 3: ('peddler', None, 1), 4: ('drug_dealer', None, 0), 5: ('drug_trafficker', None, 0)}
assert position.ptr_list == [('@', parser.Pos.noun, 10721470, 0, 0), ('@', parser.Pos.noun, 9977660, 0, 0), ('+', parser.Pos.verb, 2302817, 3, 1), ('+', parser.Pos.verb, 2245555, 1, 1)]
assert position.frames is None
assert position.gloss == 'an unlicensed dealer in illegal drugs'
def test1Full(self):
parser.wparse('data')
wParseSuit = unittest.makeSuite(wParseTestCase, 'test')
class kifParseSerilizeTest(unittest.TestCase):
def test0ParseSerilize(self):
tempd = mkdtemp()
out1 = "/".join([tempd, "out1"])
out2 = "/".join([tempd, "out2"])
f = "data/Merge.kif"
o = parser.Ontology(f)
a = parser.kifparse(o)
o.path = out1
parser.kifserialize(a, o)
a = parser.kifparse(o)
o.path = out2
parser.kifserialize(a, o)
ret = subprocess.call(["diff", out1, out2])
rmtree(tempd)
assert ret == 0
kifParseSuit = unittest.makeSuite(kifParseSerilizeTest, 'test')
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(wParseSuit)
runner.run(kifParseSuit)
| """ The PyUnit test framework for the parser. """
import unittest
from lib import parser
class wParseTestCase(unittest.TestCase):
def test0Tokenize(self):
line = '10495555 18 n 05 pusher 1 drug_peddler 0 peddler 1 drug_dealer 0 drug_trafficker 0 004 @ 10721470 n 0000 @ 09977660 n 0000 + 02302817 v 0301 + 02245555 v 0101 | an unlicensed dealer in illegal drugs &%Position+'
position = parser._wtokenize(line, parser.Pos.noun).pop()
assert position.sumo_concept == 'Position'
assert position.suffix == '+'
assert position.synset_offset == 2245555
assert position.lex_filenum == 18
assert position.ss_type == parser.SSType.noun
assert position.synset == {1: ('pusher', None, 1), 2: ('drug_peddler', None, 0), 3: ('peddler', None, 1), 4: ('drug_dealer', None, 0), 5: ('drug_trafficker', None, 0)}
assert position.ptr_list == [('@', parser.Pos.noun, 10721470, 0, 0), ('@', parser.Pos.noun, 9977660, 0, 0), ('+', parser.Pos.verb, 2302817, 3, 1), ('+', parser.Pos.verb, 2245555, 1, 1)]
assert position.frames is None
assert position.gloss == 'an unlicensed dealer in illegal drugs'
def test1Full(self):
parser.wparse('data')
wParseSuit = unittest.makeSuite(wParseTestCase, 'test')
if __name__ == "__main__":
runner = unittest.TextTestRunner()
runner.run(wParseSuit)
| bsd-2-clause | Python |
901a47adf6726d50c01ac743e9661c0caac2b555 | Check to ensure the excpetions return the text we expect. | golliher/dg-tickler-file | test_openfolder.py | test_openfolder.py | import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| mit | Python |
b8ae587da8b3cecf6b1f2fbe8899f0629e24448b | remove main mathod | rahulrrixe/libcloudCLI | libcloudcli/configure.py | libcloudcli/configure.py | import os
import re
import sys
import logging
import configparser
def readConfigure():
try:
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
# currently config.ini is in example folder
config_path = os.path.join(root, 'examples/config.ini')
# TODO: There is a better way to log this message than print.
print "Reading secrets from %r" % config_path
parser = configparser.ConfigParser()
parser.read(config_path)
config_data = dict(parser.items("default"))
except Exception as e:
# TODO: There is a better way to log this message than print.
print 'Failed to load config.ini. Reason: %r' % str(e)
| import os
import re
import sys
import logging
import configparser
def main():
try:
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
# currently config.ini is in example folder
config_path = os.path.join(root, 'examples/config.ini')
# TODO: There is a better way to log this message than print.
print "Reading secrets from %r" % config_path
parser = configparser.ConfigParser()
parser.read(config_path)
config_data = dict(parser.items("default"))
except Exception as e:
# TODO: There is a better way to log this message than print.
print 'Failed to load config.ini. Reason: %r' % str(e)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
4a4b1ec09d0c0757ff5dd5a91ab59406903f96fa | Fix api routes | alexandermendes/pybossa-analyst,LibCrowds/libcrowds-analyst,alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst | libcrowds_analyst/api.py | libcrowds_analyst/api.py | # -*- coding: utf8 -*-
import json
from rq import Queue
from redis import Redis
from flask import Blueprint, request, current_app, jsonify, abort
from libcrowds_analyst import analysis
BP = Blueprint('api', __name__)
QUEUE = Queue('libcrowds_analyst', connection=Redis())
MINUTE = 60
def analyse(func):
"""Analyse a webhook."""
payload = json.loads(request.json) or {}
if payload['event'] == 'task_completed':
payload['api_key'] = current_app.config['API_KEY']
payload['endpoint'] = current_app.config['ENDPOINT']
payload['doi'] = current_app.config['DOI']
QUEUE.enqueue_call(func=func, kwargs=payload, timeout=10*MINUTE)
response = jsonify({
"message": "Accepted",
"status": 202
})
response.status_code = 202
return response
abort(400)
@BP.route('convert-a-card', methods=['GET', 'POST'])
def convert_a_card():
"""Endpoint for Convert-a-Card webhooks."""
if request.method != 'POST':
abort(405)
return analyse(analysis.convert_a_card.analyse)
@BP.route('playbills/select', methods=['GET', 'POST'])
def playbills_mark():
"""Endpoint for In the Spotlight select task webhooks."""
if request.method != 'POST':
abort(405)
return analyse(analysis.playbills.analyse_selections)
| # -*- coding: utf8 -*-
import json
from rq import Queue
from redis import Redis
from flask import Blueprint, request, current_app, jsonify, abort
from libcrowds_analyst import analysis
BP = Blueprint('api', __name__)
QUEUE = Queue('libcrowds_analyst', connection=Redis())
MINUTE = 60
def analyse(func):
"""Analyse a webhook."""
payload = json.loads(request.json) or {}
if payload['event'] == 'task_completed':
payload['api_key'] = current_app.config['API_KEY']
payload['endpoint'] = current_app.config['ENDPOINT']
payload['doi'] = current_app.config['DOI']
QUEUE.enqueue_call(func=func, kwargs=payload, timeout=10*MINUTE)
response = jsonify({
"message": "Accepted",
"status": 202
})
response.status_code = 202
return response
abort(400)
@BP.route('/convert-a-card', methods=['GET', 'POST'])
def convert_a_card():
"""Endpoint for Convert-a-Card webhooks."""
if request.method != 'POST':
abort(405)
return analyse(analysis.convert_a_card.analyse)
@BP.route('/playbills/select', methods=['GET', 'POST'])
def playbills_mark():
"""Endpoint for In the Spotlight select task webhooks."""
if request.method != 'POST':
abort(405)
return analyse(analysis.playbills.analyse_selections)
| unknown | Python |
2d7dbe1a30edd2a915e8b1b60db8fe8dbf6402e0 | bump version | ingresse/message-queue-python | message_queue/__init__.py | message_queue/__init__.py | __version__ = '0.3.3'
from message_queue.logger import *
from message_queue.publisher import Publisher
from message_queue.message import Message
from message_queue.subscriber import Subscriber
from message_queue.adapters import *
| __version__ = '0.3.2'
from message_queue.logger import *
from message_queue.publisher import Publisher
from message_queue.message import Message
from message_queue.subscriber import Subscriber
from message_queue.adapters import *
| bsd-3-clause | Python |
abd85c1ade671ba991caacd73dad0dc77190c865 | Fix upload field character limit | cweems/sendcertified,cweems/sendcertified,cweems/sendcertified | main/forms.py | main/forms.py | from django import forms
from .models import MailOrder
from tinymce.widgets import TinyMCE
class GoogleAddressForm(forms.Form):
sender_street_number = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_route = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_locality = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_state = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_postal_code = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_street_number = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_route = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_locality = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_state = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_postal_code = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
class AddressForm(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'sender_name',
'sender_unit',
'sender_street_number',
'sender_route',
'sender_locality',
'sender_state',
'sender_postal_code',
'recipient_name',
'recipient_unit',
'recipient_street_number',
'recipient_route',
'recipient_locality',
'recipient_state',
'recipient_postal_code',
]
class DocumentEditor(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'letter',
]
class DocumentUploader(forms.Form):
pdf_letter_url = forms.CharField(max_length=800, required=False, widget=forms.HiddenInput())
class OrderEmail(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'email'
]
class Payment(forms.Form):
stripe_token = forms.CharField(label='Stripe token', max_length=100, widget=forms.HiddenInput(), required=False)
| from django import forms
from .models import MailOrder
from tinymce.widgets import TinyMCE
class GoogleAddressForm(forms.Form):
sender_street_number = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_route = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_locality = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_state = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
sender_postal_code = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_street_number = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_route = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_locality = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_state = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
recipient_postal_code = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
class AddressForm(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'sender_name',
'sender_unit',
'sender_street_number',
'sender_route',
'sender_locality',
'sender_state',
'sender_postal_code',
'recipient_name',
'recipient_unit',
'recipient_street_number',
'recipient_route',
'recipient_locality',
'recipient_state',
'recipient_postal_code',
]
class DocumentEditor(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'letter',
]
class DocumentUploader(forms.Form):
pdf_letter_url = forms.CharField(max_length=100, required=False, widget=forms.HiddenInput())
class OrderEmail(forms.ModelForm):
class Meta:
model = MailOrder
fields = [
'email'
]
class Payment(forms.Form):
stripe_token = forms.CharField(label='Stripe token', max_length=100, widget=forms.HiddenInput(), required=False)
| mit | Python |
a9b2b5acd3b629aeb7d6254f3a346619028fe9ba | Add sketch column | alanhdu/nycodex | nycodex/db/schema.py | nycodex/db/schema.py | import enum
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from .base import Base
from .utils import UpsertMixin
@enum.unique
class AssetType(enum.Enum):
calendar = "calendar"
chart = "chart"
datalens = "datalens"
dataset = "dataset"
file = "file"
filter = "filter"
href = "href"
map = "map"
visualization = "visualization"
@enum.unique
class DataType(enum.Enum):
calendar_date = "calendar_date"
checkbox = "checkbox"
date = "date"
email = "email"
html = "html"
location = "location"
money = "money"
multi_line = "multi_line"
multi_polygon = "multi_polygon"
number = "number"
percent = "percent"
phone = "phone"
point = "point"
text = "text"
url = "url"
class Dataset(Base, UpsertMixin):
__tablename__ = "dataset"
id = sa.Column(sa.CHAR(9), primary_key=True)
asset_type = sa.Column(postgresql.ENUM(AssetType), nullable=False)
created_at = sa.Column(sa.TIMESTAMP(timezone=True), nullable=False)
description = sa.Column(sa.TEXT, nullable=False)
is_official = sa.Column(sa.BOOLEAN, nullable=False)
name = sa.Column(sa.VARCHAR, nullable=False)
updated_at = sa.Column(sa.TIMESTAMP(timezone=True), nullable=False)
fields = sa.orm.relationship("Field", back_populates="dataset")
class Field(Base, UpsertMixin):
__tablename__ = "field"
dataset_id = sa.Column(
sa.CHAR(9), sa.ForeignKey(Dataset.id), primary_key=True
)
field_name = sa.Column(sa.TEXT, nullable=False, primary_key=True)
datatype = sa.Column(postgresql.ENUM(DataType), nullable=False)
description = sa.Column(sa.TEXT, nullable=True)
name = sa.Column(sa.TEXT, nullable=False)
dataset = sa.orm.relationship("Dataset", back_populates="fields")
class Sketch(Base, UpsertMixin):
__tablename__ = "sketch"
__table_args__ = (
sa.ForeignKeyConstraint(
["dataset_id", "field_name"],
["field.dataset_id", "field.field_name"],
),
)
dataset_id = sa.Column(
sa.CHAR(9), sa.ForeignKey(Dataset.id), primary_key=True
)
field_name = sa.Column(sa.TEXT, nullable=False, primary_key=True)
update_time = sa.Column(
sa.TIMESTAMP(timezone=False),
server_default=sa.func.now(),
onupdate=sa.func.now(),
nullable=False,
)
count = sa.Column(sa.Integer, nullable=False)
distinct_count = sa.Column(sa.Integer, nullable=False)
| import enum
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from .base import Base
from .utils import UpsertMixin
@enum.unique
class AssetType(enum.Enum):
calendar = "calendar"
chart = "chart"
datalens = "datalens"
dataset = "dataset"
file = "file"
filter = "filter"
href = "href"
map = "map"
visualization = "visualization"
@enum.unique
class DataType(enum.Enum):
calendar_date = "calendar_date"
checkbox = "checkbox"
date = "date"
email = "email"
html = "html"
location = "location"
money = "money"
multi_line = "multi_line"
multi_polygon = "multi_polygon"
number = "number"
percent = "percent"
phone = "phone"
point = "point"
text = "text"
url = "url"
class Dataset(Base, UpsertMixin):
__tablename__ = "dataset"
id = sa.Column(sa.CHAR(9), primary_key=True)
asset_type = sa.Column(postgresql.ENUM(AssetType), nullable=False)
created_at = sa.Column(sa.TIMESTAMP(timezone=True), nullable=False)
description = sa.Column(sa.TEXT, nullable=False)
is_official = sa.Column(sa.BOOLEAN, nullable=False)
name = sa.Column(sa.VARCHAR, nullable=False)
updated_at = sa.Column(sa.TIMESTAMP(timezone=True), nullable=False)
fields = sa.orm.relationship("Field", back_populates="dataset")
class Field(Base, UpsertMixin):
__tablename__ = "field"
dataset_id = sa.Column(
sa.CHAR(9), sa.ForeignKey(Dataset.id), primary_key=True
)
field_name = sa.Column(sa.TEXT, nullable=False, primary_key=True)
datatype = sa.Column(postgresql.ENUM(DataType), nullable=False)
description = sa.Column(sa.TEXT, nullable=True)
name = sa.Column(sa.TEXT, nullable=False)
dataset = sa.orm.relationship("Dataset", back_populates="fields")
| apache-2.0 | Python |
b13ecb2e802f832d028e246f5edc88ec36844da2 | Bump minor version | ForeverWintr/metafunctions | metafunctions/__init__.py | metafunctions/__init__.py | __version__ = '0.2.1'
| __version__ = '0.2.0'
| mit | Python |
ae8c3b4817dfd9d47275c8ddfe9240ae1825b4e2 | Fix argument | lightning-viz/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python | lightning/types/three.py | lightning/types/three.py | from lightning.types.base import Base
from lightning.types.decorators import viztype
from lightning.types.utils import vecs_to_points_three, add_property
from numpy import ndarray, asarray
from lightning.types.utils import array_to_im
@viztype
class Scatter3(Base):
_name = 'scatter-3'
_func = 'scatter3'
@staticmethod
def clean(x, y, z, color=None, group=None, alpha=None, size=None):
"""
Plot three-dimensional data as points.
.. image:: scatter3.png
Parameters
----------
x, y, z : array-like, each (n,)
Input data
color : array-like, optional, singleton or (n,3)
Single rgb value or array to set colors
group : array-like, optional, singleton or (n,)
Single integer or array to set colors via groups
size : array-like, optional, singleton or (n,)
Single size or array to set point sizes
alpha : array-like, optional, singleton or (n,)
Single alpha value or array to set fill and stroke opacity
"""
points = vecs_to_points_three(x, y, z)
outdict = {'points': points}
outdict = add_property(outdict, color, 'color')
outdict = add_property(outdict, group, 'group')
outdict = add_property(outdict, size, 'size')
outdict = add_property(outdict, alpha, 'alpha')
return outdict
@viztype
class Volume(Base):
_name = 'volume'
@staticmethod
def clean(imagedata):
"""
Display a collection of images as a three-dimensional volume.
.. image:: volume.png
Parameters
----------
imagedata : array-like, or list of array-like
Image or list of images as two dimensional (grayscale) or three dimensional (RGB) arrays.
"""
if isinstance(imagedata, ndarray):
imagedata = [imagedata]
outdict = [array_to_im(im) for im in imagedata]
return {'images': outdict}
| from lightning.types.base import Base
from lightning.types.decorators import viztype
from lightning.types.utils import vecs_to_points_three, add_property
from numpy import ndarray, asarray
from lightning.types.utils import array_to_im
@viztype
class Scatter3(Base):
_name = 'scatter-3'
_func = 'scatter3'
@staticmethod
def clean(x, y, z, color=None, label=None, alpha=None, size=None):
"""
Plot three-dimensional data as points.
.. image:: scatter3.png
Parameters
----------
x, y, z : array-like, each (n,)
Input data
color : array-like, optional, singleton or (n,3)
Single rgb value or array to set colors
label : array-like, optional, singleton or (n,)
Single integer or array to set colors via groups
size : array-like, optional, singleton or (n,)
Single size or array to set point sizes
alpha : array-like, optional, singleton or (n,)
Single alpha value or array to set fill and stroke opacity
"""
points = vecs_to_points_three(x, y, z)
outdict = {'points': points}
outdict = add_property(outdict, color, 'color')
outdict = add_property(outdict, label, 'label')
outdict = add_property(outdict, size, 'size')
outdict = add_property(outdict, alpha, 'alpha')
return outdict
@viztype
class Volume(Base):
_name = 'volume'
@staticmethod
def clean(imagedata):
"""
Display a collection of images as a three-dimensional volume.
.. image:: volume.png
Parameters
----------
imagedata : array-like, or list of array-like
Image or list of images as two dimensional (grayscale) or three dimensional (RGB) arrays.
"""
if isinstance(imagedata, ndarray):
imagedata = [imagedata]
outdict = [array_to_im(im) for im in imagedata]
return {'images': outdict}
| mit | Python |
224dec2f90350c7fd8266a8421a0df199084c7ce | Read site configuration file in wmt-script | csdms/wmt-exe,csdms/wmt-exe,csdms/wmt-exe,csdms/wmt-exe | wmtexe/cmd/script.py | wmtexe/cmd/script.py | """Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher, SbatchLauncher
from ..config import load_configuration
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
'sbatch': SbatchLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--config', default='wmt.cfg',
help='WMT site configuration file')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
config = load_configuration(args.config)
launch_dir = config.get('paths', 'launch_dir')
exec_dir = config.get('paths', 'exec_dir')
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url,
launch_dir=launch_dir,
exec_dir=exec_dir)
if args.run:
launcher.run()
else:
print(launcher.script().strip())
| """Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher, SbatchLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
'sbatch': SbatchLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script().strip())
| mit | Python |
8bbcd20e119a3241cfb022945bfbe59ebb0f6cf3 | Fix bad function call in memoization | MOLSSI-BSE/basis_set_exchange | basis_set_exchange/memo.py | basis_set_exchange/memo.py | '''
Class/decorator for memoizing BSE functionality
'''
import functools
import pickle
import inspect
# If set to True, memoization of some internal functions
# will be used. Generally safe to leave enabled - it
# won't use that much memory
memoize_enabled = True
def _make_key(args_spec, *args, **kwargs):
left_args = args_spec.args[len(args):]
num_defaults = len(args_spec.defaults or ())
defaults_names = args_spec.args[-num_defaults:]
if not set(left_args).symmetric_difference(kwargs).issubset(defaults_names):
# Return None to signal an issue with the argument list
return None
start = 0
key = []
for arg, arg_name in zip(args, args_spec.args):
key.append(arg)
if arg_name in defaults_names:
start += 1
for left_arg in left_args:
try:
key.append(kwargs[left_arg])
except KeyError:
key.append(args_spec.defaults[start])
# Increase index if we used a default, or if the argument was provided
if left_arg in defaults_names:
start += 1
return pickle.dumps(key)
class BSEMemoize:
def __init__(self, f):
self.__f = f
self.args_spec = inspect.getfullargspec(f)
self.__memo = {}
functools.update_wrapper(self, f)
def __call__(self, *args, **kwargs):
if not memoize_enabled:
return self.__f(*args, **kwargs)
arg_key = _make_key(self.args_spec, *args, **kwargs)
if arg_key is None:
# There was a problem with the arguments. Just call the
# function to trigger the error
return self.__f(*args, **kwargs)
if arg_key in self.__memo:
return pickle.loads(self.__memo[arg_key])
ret = self.__f(*args, **kwargs)
self.__memo[arg_key] = pickle.dumps(ret)
return ret
| '''
Class/decorator for memoizing BSE functionality
'''
import functools
import pickle
import inspect
# If set to True, memoization of some internal functions
# will be used. Generally safe to leave enabled - it
# won't use that much memory
memoize_enabled = True
def _make_key(args_spec, *args, **kwargs):
left_args = args_spec.args[len(args):]
num_defaults = len(args_spec.defaults or ())
defaults_names = args_spec.args[-num_defaults:]
if not set(left_args).symmetric_difference(kwargs).issubset(defaults_names):
# We got an error in the function call. Let's simply trigger it
func(*args, **kwargs)
start = 0
key = []
for arg, arg_name in zip(args, args_spec.args):
key.append(arg)
if arg_name in defaults_names:
start += 1
for left_arg in left_args:
try:
key.append(kwargs[left_arg])
except KeyError:
key.append(args_spec.defaults[start])
# Increase index if we used a default, or if the argument was provided
if left_arg in defaults_names:
start += 1
return pickle.dumps(key)
class BSEMemoize:
def __init__(self, f):
self.__f = f
self.args_spec = inspect.getfullargspec(f)
self.__memo = {}
functools.update_wrapper(self, f)
def __call__(self, *args, **kwargs):
if not memoize_enabled:
return self.__f(*args, **kwargs)
arg_key = _make_key(self.args_spec, *args, **kwargs)
if arg_key in self.__memo:
return pickle.loads(self.__memo[arg_key])
ret = self.__f(*args, **kwargs)
self.__memo[arg_key] = pickle.dumps(ret)
return ret
| bsd-3-clause | Python |
75f1f5891b0e5dbe5562dac7b488ed731dbab3fb | Add CAN_DETECT | sounak98/coala-bears,Vamshi99/coala-bears,SanketDG/coala-bears,LWJensen/coala-bears,chriscoyfish/coala-bears,damngamerz/coala-bears,refeed/coala-bears,coala/coala-bears,Shade5/coala-bears,yashtrivedi96/coala-bears,ku3o/coala-bears,refeed/coala-bears,vijeth-aradhya/coala-bears,ankit01ojha/coala-bears,naveentata/coala-bears,kaustubhhiware/coala-bears,LWJensen/coala-bears,seblat/coala-bears,damngamerz/coala-bears,kaustubhhiware/coala-bears,shreyans800755/coala-bears,horczech/coala-bears,vijeth-aradhya/coala-bears,madhukar01/coala-bears,meetmangukiya/coala-bears,gs0510/coala-bears,mr-karan/coala-bears,horczech/coala-bears,LWJensen/coala-bears,sounak98/coala-bears,arjunsinghy96/coala-bears,chriscoyfish/coala-bears,ankit01ojha/coala-bears,dosarudaniel/coala-bears,Vamshi99/coala-bears,coala-analyzer/coala-bears,kaustubhhiware/coala-bears,damngamerz/coala-bears,madhukar01/coala-bears,SanketDG/coala-bears,seblat/coala-bears,incorrectusername/coala-bears,ku3o/coala-bears,srisankethu/coala-bears,naveentata/coala-bears,vijeth-aradhya/coala-bears,dosarudaniel/coala-bears,gs0510/coala-bears,arjunsinghy96/coala-bears,coala/coala-bears,mr-karan/coala-bears,yash-nisar/coala-bears,seblat/coala-bears,srisankethu/coala-bears,Vamshi99/coala-bears,coala/coala-bears,yashtrivedi96/coala-bears,coala-analyzer/coala-bears,madhukar01/coala-bears,horczech/coala-bears,damngamerz/coala-bears,ankit01ojha/coala-bears,gs0510/coala-bears,yashtrivedi96/coala-bears,chriscoyfish/coala-bears,Shade5/coala-bears,coala/coala-bears,naveentata/coala-bears,incorrectusername/coala-bears,shreyans800755/coala-bears,incorrectusername/coala-bears,aptrishu/coala-bears,horczech/coala-bears,ku3o/coala-bears,arjunsinghy96/coala-bears,LWJensen/coala-bears,damngamerz/coala-bears,coala/coala-bears,seblat/coala-bears,refeed/coala-bears,coala-analyzer/coala-bears,srisankethu/coala-bears,naveentata/coala-bears,meetmangukiya/coala-bears,sounak98/coala-bears,meetmangukiya/coala-bears,horczech/coala-bears,srisankethu/coala-bears,coala-analyzer/coala-bears,Asnelchristian/coala-bears,ankit01ojha/coala-bears,naveentata/coala-bears,damngamerz/coala-bears,refeed/coala-bears,incorrectusername/coala-bears,dosarudaniel/coala-bears,srisankethu/coala-bears,vijeth-aradhya/coala-bears,SanketDG/coala-bears,chriscoyfish/coala-bears,yashtrivedi96/coala-bears,damngamerz/coala-bears,coala/coala-bears,SanketDG/coala-bears,yash-nisar/coala-bears,coala/coala-bears,refeed/coala-bears,ku3o/coala-bears,srisankethu/coala-bears,vijeth-aradhya/coala-bears,gs0510/coala-bears,madhukar01/coala-bears,kaustubhhiware/coala-bears,yashtrivedi96/coala-bears,damngamerz/coala-bears,refeed/coala-bears,meetmangukiya/coala-bears,coala-analyzer/coala-bears,horczech/coala-bears,naveentata/coala-bears,yash-nisar/coala-bears,meetmangukiya/coala-bears,aptrishu/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears,LWJensen/coala-bears,Asnelchristian/coala-bears,arjunsinghy96/coala-bears,coala/coala-bears,LWJensen/coala-bears,damngamerz/coala-bears,refeed/coala-bears,vijeth-aradhya/coala-bears,meetmangukiya/coala-bears,mr-karan/coala-bears,chriscoyfish/coala-bears,arjunsinghy96/coala-bears,kaustubhhiware/coala-bears,damngamerz/coala-bears,Asnelchristian/coala-bears,ankit01ojha/coala-bears,shreyans800755/coala-bears,aptrishu/coala-bears,naveentata/coala-bears,arjunsinghy96/coala-bears,Vamshi99/coala-bears,Vamshi99/coala-bears,coala-analyzer/coala-bears,LWJensen/coala-bears,arjunsinghy96/coala-bears,incorrectusername/coala-bears,yash-nisar/coala-bears,dosarudaniel/coala-bears,refeed/coala-bears,aptrishu/coala-bears,horczech/coala-bears,horczech/coala-bears,meetmangukiya/coala-bears,srisankethu/coala-bears,madhukar01/coala-bears,seblat/coala-bears,vijeth-aradhya/coala-bears,arjunsinghy96/coala-bears,gs0510/coala-bears,shreyans800755/coala-bears,Asnelchristian/coala-bears,ankit01ojha/coala-bears,coala/coala-bears,seblat/coala-bears,srisankethu/coala-bears,aptrishu/coala-bears,horczech/coala-bears,Vamshi99/coala-bears,mr-karan/coala-bears,SanketDG/coala-bears,dosarudaniel/coala-bears,sounak98/coala-bears,sounak98/coala-bears,seblat/coala-bears,seblat/coala-bears,ankit01ojha/coala-bears,gs0510/coala-bears,mr-karan/coala-bears,madhukar01/coala-bears,ankit01ojha/coala-bears,shreyans800755/coala-bears,Shade5/coala-bears,madhukar01/coala-bears,vijeth-aradhya/coala-bears,Shade5/coala-bears,coala/coala-bears,Shade5/coala-bears,sounak98/coala-bears,horczech/coala-bears,Shade5/coala-bears,Asnelchristian/coala-bears,kaustubhhiware/coala-bears,dosarudaniel/coala-bears,madhukar01/coala-bears,chriscoyfish/coala-bears,shreyans800755/coala-bears,dosarudaniel/coala-bears,Asnelchristian/coala-bears,ku3o/coala-bears,ankit01ojha/coala-bears,yashtrivedi96/coala-bears,shreyans800755/coala-bears,refeed/coala-bears,yash-nisar/coala-bears,ankit01ojha/coala-bears,yash-nisar/coala-bears,coala-analyzer/coala-bears,chriscoyfish/coala-bears,shreyans800755/coala-bears,ku3o/coala-bears,Asnelchristian/coala-bears,madhukar01/coala-bears,Vamshi99/coala-bears,incorrectusername/coala-bears,yashtrivedi96/coala-bears,aptrishu/coala-bears,LWJensen/coala-bears,srisankethu/coala-bears,yashtrivedi96/coala-bears,shreyans800755/coala-bears,aptrishu/coala-bears,SanketDG/coala-bears,incorrectusername/coala-bears,naveentata/coala-bears,sounak98/coala-bears,SanketDG/coala-bears,yash-nisar/coala-bears,kaustubhhiware/coala-bears,horczech/coala-bears,yashtrivedi96/coala-bears,yash-nisar/coala-bears,coala/coala-bears,meetmangukiya/coala-bears,yash-nisar/coala-bears,Asnelchristian/coala-bears,kaustubhhiware/coala-bears,mr-karan/coala-bears,refeed/coala-bears,ku3o/coala-bears,Vamshi99/coala-bears,kaustubhhiware/coala-bears,incorrectusername/coala-bears,Shade5/coala-bears,aptrishu/coala-bears,gs0510/coala-bears,chriscoyfish/coala-bears,aptrishu/coala-bears,Shade5/coala-bears,Asnelchristian/coala-bears,LWJensen/coala-bears,yash-nisar/coala-bears,ku3o/coala-bears,incorrectusername/coala-bears,sounak98/coala-bears,gs0510/coala-bears,dosarudaniel/coala-bears,mr-karan/coala-bears,damngamerz/coala-bears,Vamshi99/coala-bears,Shade5/coala-bears,aptrishu/coala-bears,ku3o/coala-bears,sounak98/coala-bears,coala-analyzer/coala-bears,Vamshi99/coala-bears,SanketDG/coala-bears,naveentata/coala-bears,SanketDG/coala-bears,Vamshi99/coala-bears,aptrishu/coala-bears,coala-analyzer/coala-bears,vijeth-aradhya/coala-bears,gs0510/coala-bears,dosarudaniel/coala-bears,shreyans800755/coala-bears,srisankethu/coala-bears,srisankethu/coala-bears,ankit01ojha/coala-bears,yash-nisar/coala-bears,coala/coala-bears,shreyans800755/coala-bears,mr-karan/coala-bears,meetmangukiya/coala-bears | bears/rest/reSTLintBear.py | bears/rest/reSTLintBear.py | from restructuredtext_lint import lint
from coalib.bears.LocalBear import LocalBear
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class reSTLintBear(LocalBear):
LANGUAGES = {"reStructuredText"}
REQUIREMENTS = {PipRequirement('restructuredtext_lint', '0.14.*')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_DETECT = {'Formatting', 'Syntax'}
def run(self, filename, file):
"""
Lints reStructuredText.
"""
content = ''.join(file)
errors = lint(content)
for error in errors:
severity = {
1: RESULT_SEVERITY.INFO,
2: RESULT_SEVERITY.NORMAL,
3: RESULT_SEVERITY.MAJOR,
4: RESULT_SEVERITY.MAJOR}.get(error.level,
RESULT_SEVERITY.NORMAL)
yield Result.from_values(
self,
error.message,
file=filename,
line=error.line,
debug_msg=error.full_message,
severity=severity)
| from restructuredtext_lint import lint
from coalib.bears.LocalBear import LocalBear
from coalib.bears.requirements.PipRequirement import PipRequirement
from coalib.results.Result import Result
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class reSTLintBear(LocalBear):
LANGUAGES = {"reStructuredText"}
REQUIREMENTS = {PipRequirement('restructuredtext_lint', '0.14.*')}
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
def run(self, filename, file):
"""
Lints reStructuredText.
"""
content = ''.join(file)
errors = lint(content)
for error in errors:
severity = {
1: RESULT_SEVERITY.INFO,
2: RESULT_SEVERITY.NORMAL,
3: RESULT_SEVERITY.MAJOR,
4: RESULT_SEVERITY.MAJOR}.get(error.level,
RESULT_SEVERITY.NORMAL)
yield Result.from_values(
self,
error.message,
file=filename,
line=error.line,
debug_msg=error.full_message,
severity=severity)
| agpl-3.0 | Python |
b8638ab2befa55029f2aeb8a907acb1a94aba3a9 | Decrease sensitivity of dark ground checking. | legorovers/legoflask,legorovers/legoflask,legorovers/legoflask | app/rules.py | app/rules.py |
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 40) \
or (rule.trigger == 'light ground' and color >= 40):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
|
class Rule(object):
def __init__(self, trigger, actions):
self.trigger = trigger
print "trigger: %s" % trigger
self.code = []
time = 0
for a in actions:
print "action: %s" % a
if a == 'back':
action = ('reverse', 40)
elif a == 'stop':
action = (None, 0)
else: # forward, left, right, speak, light-*
action = (a, 40)
self.code.append(time)
self.code.append(action)
time += 0.5
print "code: %s" % self.code
class RuleEngine(object):
def __init__(self, control):
self.control = control
self.rules = []
def check(self, color, touch, direction):
for rule in self.rules:
if (rule.trigger == 'collision' and touch) \
or (rule.trigger == 'dark ground' and color < 10) \
or (rule.trigger == 'light ground' and color > 10):
self.control.program(*rule.code)
def activate(self, rules):
self.rules = rules
| bsd-2-clause | Python |
191ce55833aee3f9b313da78918bc3a9a4fe8f58 | Test #84 | borntyping/python-colorlog | colorlog/tests/conftest.py | colorlog/tests/conftest.py | """Fixtures that can be used in other tests."""
from __future__ import print_function
import inspect
import logging
import sys
import pytest
import colorlog
class TestingStreamHandler(logging.StreamHandler):
"""Raise errors to be caught by py.test instead of printing to stdout."""
def handleError(self, record):
_type, value, _traceback = sys.exc_info()
raise value
def assert_log_message(capsys, log_function, message, *args):
"""Call a log function and check the message has been output."""
log_function(message, *args)
out, err = capsys.readouterr()
# Print the output so that py.test shows it when a test fails
print(err, end="", file=sys.stderr)
# Assert the message send to the logger was output
assert message % args in err, "Log message not output to STDERR"
return err
@pytest.fixture()
def reset_loggers():
logging.root.handlers = list()
logging.root.setLevel(logging.DEBUG)
@pytest.fixture()
def test_logger(reset_loggers, capsys):
def function(logger, validator=None):
lines = [
assert_log_message(capsys, logger.debug, "a debug message %s", 1),
assert_log_message(capsys, logger.info, "an info message %s", 2),
assert_log_message(capsys, logger.warning, "a warning message %s", 3),
assert_log_message(capsys, logger.error, "an error message %s", 4),
assert_log_message(capsys, logger.critical, "a critical message %s", 5),
]
if validator is not None:
for line in lines:
valid = validator(line.strip())
assert valid, "{!r} did not validate".format(line.strip())
return lines
return function
@pytest.fixture()
def create_and_test_logger(test_logger):
def function(*args, **kwargs):
validator = kwargs.pop("validator", None)
formatter_cls = kwargs.pop("formatter_class", colorlog.ColoredFormatter)
formatter = formatter_cls(*args, **kwargs)
stream = TestingStreamHandler(stream=sys.stderr)
stream.setLevel(logging.DEBUG)
stream.setFormatter(formatter)
logger = logging.getLogger(inspect.stack()[1][3])
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
return test_logger(logger, validator)
return function
| """Fixtures that can be used in other tests."""
from __future__ import print_function
import inspect
import logging
import sys
import pytest
import colorlog
class TestingStreamHandler(logging.StreamHandler):
"""Raise errors to be caught by py.test instead of printing to stdout."""
def handleError(self, record):
_type, value, _traceback = sys.exc_info()
raise value
def assert_log_message(log_function, message, capsys):
"""Call a log function and check the message has been output."""
log_function(message)
out, err = capsys.readouterr()
# Print the output so that py.test shows it when a test fails
print(err, end="", file=sys.stderr)
# Assert the message send to the logger was output
assert message in err, "Log message not output to STDERR"
return err
@pytest.fixture()
def reset_loggers():
logging.root.handlers = list()
logging.root.setLevel(logging.DEBUG)
@pytest.fixture()
def test_logger(reset_loggers, capsys):
def function(logger, validator=None):
lines = [
assert_log_message(logger.debug, "a debug message", capsys),
assert_log_message(logger.info, "an info message", capsys),
assert_log_message(logger.warning, "a warning message", capsys),
assert_log_message(logger.error, "an error message", capsys),
assert_log_message(logger.critical, "a critical message", capsys),
]
if validator is not None:
for line in lines:
valid = validator(line.strip())
assert valid, "{!r} did not validate".format(line.strip())
return lines
return function
@pytest.fixture()
def create_and_test_logger(test_logger):
def function(*args, **kwargs):
validator = kwargs.pop("validator", None)
formatter_cls = kwargs.pop("formatter_class", colorlog.ColoredFormatter)
formatter = formatter_cls(*args, **kwargs)
stream = TestingStreamHandler(stream=sys.stderr)
stream.setLevel(logging.DEBUG)
stream.setFormatter(formatter)
logger = logging.getLogger(inspect.stack()[1][3])
logger.setLevel(logging.DEBUG)
logger.addHandler(stream)
return test_logger(logger, validator)
return function
| mit | Python |
120822fe10326fad9e7b9e82a2ac9a6d4f09821b | Set version for next tag | tskisner/pytoast,tskisner/pytoast | toast/_version.py | toast/_version.py | __version__ = '2.01'
| __version__ = '2.0'
| bsd-2-clause | Python |
5ba54bcea283361bdd97cd35354a8490f2ee6c7b | Work around a docutils bug. | pydotorg/pypi,pydotorg/pypi,pydotorg/pypi,pydotorg/pypi | tools/demodata.py | tools/demodata.py | #!/usr/bin/python
import sys, os, urllib
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root)
# Work around http://sourceforge.net/p/docutils/bugs/214/
import docutils.utils
import admin, store, config
cfg = config.Config(root+'/config.ini')
st = store.Store(cfg)
# classifiers
for c in urllib.urlopen("http://pypi.python.org/pypi?%3Aaction=list_classifiers").read().splitlines():
admin.add_classifier(st, c)
# Demo data starts here
# an admin
otk = st.store_user('fred', 'fredpw', 'fred@python.test')
st.delete_otk(otk)
st.add_role('fred', 'Admin', None)
# an owner
otk = st.store_user('barney', 'barneypw', 'barney@python.test')
st.delete_otk(otk)
# package spam
st.set_user('barney', '127.0.0.1', True)
for version in ('0.8', '0.9', '1.0'):
st.store_package('spam', version, {
'author':'Barney Geroellheimer',
'author_email':'barney@python.test',
'homepage':'http://spam.python.test/',
'license':'GPL',
'summary':'The spam package',
'description': 'spam '*500,
'classifiers':["Development Status :: 6 - Mature",
"Programming Language :: Python :: 2"],
'_pypi_hidden':False
})
# package eggs
for version in ('0.1', '0.2', '0.3', '0.4'):
st.store_package('eggs', version, {
'author':'Barney Geroellheimer',
'author_email':'barney@python.test',
'homepage':'http://eggs.python.test/',
'license':'GPL',
'summary':'The eggs package',
'description':'Does anybody want to provide real data here?',
'classifiers':["Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3"],
'requires_dist':['spam'],
'_pypi_hidden':version!='0.4'
})
st.add_file('spam', '1.0', 'THIS IS SOME CONTENT', '1234', 'sdist',
'any', '', 'demo.txt', None)
st.commit()
| #!/usr/bin/python
import sys, os, urllib
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root)
import admin, store, config
cfg = config.Config(root+'/config.ini')
st = store.Store(cfg)
# classifiers
for c in urllib.urlopen("http://pypi.python.org/pypi?%3Aaction=list_classifiers").read().splitlines():
admin.add_classifier(st, c)
# Demo data starts here
# an admin
otk = st.store_user('fred', 'fredpw', 'fred@python.test')
st.delete_otk(otk)
st.add_role('fred', 'Admin', None)
# an owner
otk = st.store_user('barney', 'barneypw', 'barney@python.test')
st.delete_otk(otk)
# package spam
st.set_user('barney', '127.0.0.1', True)
for version in ('0.8', '0.9', '1.0'):
st.store_package('spam', version, {
'author':'Barney Geroellheimer',
'author_email':'barney@python.test',
'homepage':'http://spam.python.test/',
'license':'GPL',
'summary':'The spam package',
'description': 'spam '*500,
'classifiers':["Development Status :: 6 - Mature",
"Programming Language :: Python :: 2"],
'_pypi_hidden':False
})
# package eggs
for version in ('0.1', '0.2', '0.3', '0.4'):
st.store_package('eggs', version, {
'author':'Barney Geroellheimer',
'author_email':'barney@python.test',
'homepage':'http://eggs.python.test/',
'license':'GPL',
'summary':'The eggs package',
'description':'Does anybody want to provide real data here?',
'classifiers':["Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3"],
'requires_dist':['spam'],
'_pypi_hidden':version!='0.4'
})
st.add_file('spam', '1.0', 'THIS IS SOME CONTENT', '1234', 'sdist',
'any', '', 'demo.txt', None)
st.commit()
| bsd-3-clause | Python |
da48827efd87a60386316d835aa1d79aaf366da3 | fix name | xuhdev/nikola,damianavila/nikola,getnikola/nikola,immanetize/nikola,getnikola/nikola,Proteus-tech/nikola,okin/nikola,x1101/nikola,immanetize/nikola,s2hc-johan/nikola,JohnTroony/nikola,xuhdev/nikola,jjconti/nikola,andredias/nikola,jjconti/nikola,andredias/nikola,wcmckee/nikola,yamila-moreno/nikola,knowsuchagency/nikola,getnikola/nikola,TyberiusPrime/nikola,Proteus-tech/nikola,masayuko/nikola,damianavila/nikola,damianavila/nikola,techdragon/nikola,okin/nikola,lucacerone/nikola,TyberiusPrime/nikola,okin/nikola,yamila-moreno/nikola,berezovskyi/nikola,jjconti/nikola,wcmckee/nikola,atiro/nikola,TyberiusPrime/nikola,knowsuchagency/nikola,knowsuchagency/nikola,JohnTroony/nikola,Proteus-tech/nikola,getnikola/nikola,x1101/nikola,schettino72/nikola,techdragon/nikola,schettino72/nikola,berezovskyi/nikola,servalproject/nikola,pluser/nikola,lucacerone/nikola,JohnTroony/nikola,gwax/nikola,servalproject/nikola,xuhdev/nikola,andredias/nikola,s2hc-johan/nikola,schettino72/nikola,kotnik/nikola,x1101/nikola,yamila-moreno/nikola,okin/nikola,Proteus-tech/nikola,techdragon/nikola,atiro/nikola,kotnik/nikola,servalproject/nikola,pluser/nikola,pluser/nikola,xuhdev/nikola,atiro/nikola,gwax/nikola,masayuko/nikola,berezovskyi/nikola,gwax/nikola,wcmckee/nikola,immanetize/nikola,kotnik/nikola,masayuko/nikola,lucacerone/nikola,s2hc-johan/nikola | nikola/plugins/task_render_sources.py | nikola/plugins/task_render_sources.py | import os
from nikola.plugin_categories import Task
from nikola import utils
class Sources(Task):
"""Copy page sources into the output."""
name = "render_sources"
def gen_tasks(self):
"""Publish the page sources into the output.
Required keyword arguments:
translations
default_lang
post_pages
output_folder
"""
kw = {
"translations": self.site.config["TRANSLATIONS"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"default_lang": self.site.config["DEFAULT_LANG"],
}
self.site.scan_posts()
flag = False
for lang in kw["translations"]:
for post in self.site.timeline:
output_name = os.path.join(kw['output_folder'],
post.destination_path(lang, post.source_ext()))
source = post.source_path
if lang != kw["default_lang"]:
source_lang = source + '.' + lang
if os.path.exists(source_lang):
source = source_lang
yield {
'basename': 'render_sources',
'name': output_name.encode('utf8'),
'file_dep': [source],
'targets': [output_name],
'actions': [(utils.copy_file, (source, output_name))],
'clean': True,
'uptodate': [utils.config_changed(kw)],
}
if flag == False: # No page rendered, yield a dummy task
yield {
'basename': 'render_sources',
'name': 'None',
'uptodate': [True],
'actions': [],
}
| import os
from nikola.plugin_categories import Task
from nikola import utils
class Sources(Task):
"""Copy page sources into the output."""
name = "render_sources"
def gen_tasks(self):
"""Publish the page sources into the output.
Required keyword arguments:
translations
default_lang
post_pages
output_folder
"""
kw = {
"translations": self.site.config["TRANSLATIONS"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"default_lang": self.site.config["DEFAULT_LANG"],
}
self.site.scan_posts()
flag = False
for lang in kw["translations"]:
for post in self.site.timeline:
output_name = os.path.join(kw['output_folder'],
post.destination_path(lang, post.source_ext()))
source = post.source_path
if lang != kw["default_lang"]:
source_lang = source + '.' + lang
if os.path.exists(source_lang):
source = source_lang
yield {
'basename': 'render_sources',
'name': output_name.encode('utf8'),
'file_dep': [source],
'targets': [output_name],
'actions': [(utils.copy_file, (source, output_name))],
'clean': True,
'uptodate': [config_changed(kw)],
}
if flag == False: # No page rendered, yield a dummy task
yield {
'basename': 'render_sources',
'name': 'None',
'uptodate': [True],
'actions': [],
}
| mit | Python |
2d4063e936b3e739d03145891135fa0349e9a53d | fix spelling of `render_admin_panel` in the definition of the `IAdminPanelProvider` interface | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac | trac/admin/api.py | trac/admin/api.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2006 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
class IAdminPanelProvider(Interface):
"""Extension point interface for adding panels to the web-based
administration interface.
"""
def get_admin_panels(req):
"""Return a list of available admin pages.
The pages returned by this function must be a tuple of the form
`(category, category_label, page, page_label)`.
"""
def render_admin_panel(req, category, page, path_info):
"""Process a request for an admin panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2006 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
class IAdminPanelProvider(Interface):
"""Extension point interface for adding panels to the web-based
administration interface.
"""
def get_admin_panels(req):
"""Return a list of available admin pages.
The pages returned by this function must be a tuple of the form
`(category, category_label, page, page_label)`.
"""
def render_admin_paneel(req, category, page, path_info):
"""Process a request for an admin panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
| bsd-3-clause | Python |
790de15cc1a1eb680af319346ef8f90cfe8caffc | Add basic models. | GeneralMaximus/secondhand | tracker/models.py | tracker/models.py | from django.db import models
from django.contrib.auth.models import User
class Task(models.Model):
name = models.DateTimeField()
user = models.ForeignKey(User)
class WorkSession(models.Model):
task = models.ForeignKey('Task')
user = models.ForeignKey(User)
start_time = models.DateTimeField()
end_time = models.DateTimeField()
| from django.db import models
# Create your models here.
| mit | Python |
3214f9ab9ca35ca227377459eed77a9e97d8997a | Remove unused module | mjschultz/django-tracking2,bruth/django-tracking2,bruth/django-tracking2 | tracking/views.py | tracking/views.py | import logging
from datetime import timedelta
from django import forms
from django.shortcuts import render
from django.contrib.auth.decorators import permission_required
from django.utils.timezone import now
from tracking.models import Visitor, Pageview
from tracking.settings import TRACK_PAGEVIEWS
log = logging.getLogger(__file__)
# tracking wants to accept more formats than default, here they are
input_formats = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%Y-%m', # '2006-10'
'%Y', # '2006'
]
class DashboardForm(forms.Form):
start_time = forms.DateTimeField(
required=False, input_formats=input_formats)
end_time = forms.DateTimeField(
required=False, input_formats=input_formats)
@permission_required('tracking.view_visitor')
def dashboard(request):
"Counts, aggregations and more!"
end_time = now()
start_time = end_time - timedelta(days=1)
defaults = {'start_time': start_time, 'end_time': end_time}
form = DashboardForm(data=request.GET or defaults)
if form.is_valid():
start_time = form.cleaned_data['start_time']
end_time = form.cleaned_data['end_time']
# determine when tracking began
try:
track_start_time = Visitor.objects.earliest('start_time').start_time
except Visitor.DoesNotExist:
track_start_time = now()
# If the start_date is before tracking began, warn about incomplete data
warn_incomplete = (start_time < track_start_time)
# queries take `date` objects (for now)
start_date = start_time.date()
end_date = end_time.date()
user_stats = Visitor.objects.user_stats(start_date, end_date)
visitor_stats = Visitor.objects.stats(start_date, end_date)
if TRACK_PAGEVIEWS:
pageview_stats = Pageview.objects.stats(start_date, end_date)
else:
pageview_stats = None
context = {
'form': form,
'track_start_time': track_start_time,
'warn_incomplete': warn_incomplete,
'user_stats': user_stats,
'visitor_stats': visitor_stats,
'pageview_stats': pageview_stats,
}
return render(request, 'tracking/dashboard.html', context)
| import logging
import calendar
from datetime import timedelta
from django import forms
from django.shortcuts import render
from django.contrib.auth.decorators import permission_required
from django.utils.timezone import now
from tracking.models import Visitor, Pageview
from tracking.settings import TRACK_PAGEVIEWS
log = logging.getLogger(__file__)
# tracking wants to accept more formats than default, here they are
input_formats = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%Y-%m', # '2006-10'
'%Y', # '2006'
]
class DashboardForm(forms.Form):
start_time = forms.DateTimeField(
required=False, input_formats=input_formats)
end_time = forms.DateTimeField(
required=False, input_formats=input_formats)
@permission_required('tracking.view_visitor')
def dashboard(request):
"Counts, aggregations and more!"
end_time = now()
start_time = end_time - timedelta(days=1)
defaults = {'start_time': start_time, 'end_time': end_time}
form = DashboardForm(data=request.GET or defaults)
if form.is_valid():
start_time = form.cleaned_data['start_time']
end_time = form.cleaned_data['end_time']
# determine when tracking began
try:
track_start_time = Visitor.objects.earliest('start_time').start_time
except Visitor.DoesNotExist:
track_start_time = now()
# If the start_date is before tracking began, warn about incomplete data
warn_incomplete = (start_time < track_start_time)
# queries take `date` objects (for now)
start_date = start_time.date()
end_date = end_time.date()
user_stats = Visitor.objects.user_stats(start_date, end_date)
visitor_stats = Visitor.objects.stats(start_date, end_date)
if TRACK_PAGEVIEWS:
pageview_stats = Pageview.objects.stats(start_date, end_date)
else:
pageview_stats = None
context = {
'form': form,
'track_start_time': track_start_time,
'warn_incomplete': warn_incomplete,
'user_stats': user_stats,
'visitor_stats': visitor_stats,
'pageview_stats': pageview_stats,
}
return render(request, 'tracking/dashboard.html', context)
| bsd-2-clause | Python |
5a714bb3878890e5f4b36503585d361263103c7d | Remove dev only app | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/server/test_settings.py | nodeconductor/server/test_settings.py | # Django test settings for nodeconductor project.
from nodeconductor.server.base_settings import *
SECRET_KEY = 'test-key'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS += (
'kombu.transport.django', # Needed for broker backend
'djcelery', # Needed for result backend,
)
BROKER_URL = 'django://'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
NODECONDUCTOR.update({
'MONITORING': {
'ZABBIX': {
'server': "http://127.0.0.1:8888/zabbix",
'username': "admin",
'password': "zabbix",
'interface_parameters': {"ip": "0.0.0.0", "main": 1, "port": "10050", "type": 1, "useip": 1, "dns": ""},
'templateid': '10106',
'groupid': '8',
'default_service_parameters': {'algorithm': 1, 'showsla': 1, 'sortorder': 1, 'goodsla': 95},
}
},
})
| # Django test settings for nodeconductor project.
from nodeconductor.server.base_settings import *
SECRET_KEY = 'test-key'
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS += (
'kombu.transport.django', # Needed for broker backend
'djcelery', # Needed for result backend,
'test_without_migrations',
)
BROKER_URL = 'django://'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
NODECONDUCTOR.update({
'MONITORING': {
'ZABBIX': {
'server': "http://127.0.0.1:8888/zabbix",
'username': "admin",
'password': "zabbix",
'interface_parameters': {"ip": "0.0.0.0", "main": 1, "port": "10050", "type": 1, "useip": 1, "dns": ""},
'templateid': '10106',
'groupid': '8',
'default_service_parameters': {'algorithm': 1, 'showsla': 1, 'sortorder': 1, 'goodsla': 95},
}
},
})
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.