commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
17ac4f42fae21dcd0ebb18a63b50d949540488f2
|
Test for update on User Field
|
aligot/tests/test_user.py
|
aligot/tests/test_user.py
|
# coding: utf-8
from django.core.urlresolvers import reverse
from django.db.utils import IntegrityError
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from ..models import User
class TestUser(TestCase):
def test_create_with_same_email(self):
User.objects.create(username='user1', password='mypassword', email='email@email.com')
self.assertRaises(
IntegrityError,
User.objects.create,
username='user2', password='mypassword', email='email@email.com'
)
class TestUserApi(TestCase):
def setUp(self):
self.client = APIClient()
def test_create_without_params(self):
self.assertEquals(status.HTTP_400_BAD_REQUEST, self.client.post(reverse('user-create')).status_code)
self.assertEquals(0, User.objects.count())
def test_create(self):
"""
Create user & wait for 201 response.
"""
data = {
'username': 'test',
'password': 'test',
'email': 'test@mail.com'
}
response = self.client.post(reverse('user-create'), data)
self.assertEqual(status.HTTP_201_CREATED, response.status_code, response.content)
self.assertEqual(1, User.objects.count())
# Check the first
user = User.objects.all()[0]
self.assertEqual(user.username, data['username'], 'Username in DB don\'t match')
def test_retrieve(self):
"""
Retrieve user & wait for 200 response
"""
user = User.objects.create_user(
username='test',
password='test',
email='mail@mail.com'
)
self.client.force_authenticate(user=user)
self.assertEqual(1, User.objects.count(), 'ORM don\'t insert user in DB')
response = self.client.get(reverse('user-detail', args=[user.username]))
self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
def test_delete(self):
"""
Simple deletion of an user in DB
Wait for 204 response.
"""
user = User.objects.create_user(
username='test',
password='test',
email='mail@mail.com'
)
self.client.force_authenticate(user=user)
self.assertEqual(1, User.objects.count(), 'ORM don\'t insert user in DB')
response = self.client.delete(reverse('user-detail', args=[user.username]))
self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code, response.content)
|
Python
| 0
|
@@ -2549,28 +2549,850 @@
tus_code, response.content)%0A
+%0A def test_update(self):%0A %22%22%22%0A Test if a connected user can change is own mail.%0A %22%22%22%0A user = User.objects.create_user(%0A username='test',%0A password='test',%0A email='mail@mail.com'%0A )%0A self.assertEqual(1, User.objects.count(), 'ORM don%5C't insert user in DB')%0A%0A self.client.force_authenticate(user=user)%0A%0A data = %7B%0A 'username': 'test',%0A 'password': 'test',%0A 'email': 'changed_mail@mail.com',%0A %7D%0A%0A response = self.client.put(reverse('user-detail', args=%5Buser.username%5D), data)%0A self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)%0A%0A user = User.objects.get(username='test')%0A%0A self.assertEqual(user.email, data%5B'email'%5D, 'Update failed in DB')%0A
|
5c3af04057104e69531ae51e6dd755d0abbdc0be
|
move initialization code to init
|
yandextank/plugins/Influx/plugin.py
|
yandextank/plugins/Influx/plugin.py
|
# coding=utf-8
# TODO: make the next two lines unnecessary
# pylint: disable=line-too-long
# pylint: disable=missing-docstring
import logging
import sys
import datetime
from uuid import uuid4
from builtins import str
from influxdb import InfluxDBClient
from ...common.interfaces import AbstractPlugin, \
MonitoringDataListener, AggregateResultListener
from .decoder import Decoder
logger = logging.getLogger(__name__) # pylint: disable=C0103
def chop(data_list, chunk_size):
if sys.getsizeof(str(data_list)) <= chunk_size:
return [data_list]
elif len(data_list) == 1:
logger.warning("Too large piece of Telegraf data. Might experience upload problems.")
return [data_list]
else:
mid = len(data_list) / 2
return chop(data_list[:mid], chunk_size) + chop(data_list[mid:], chunk_size)
class Plugin(AbstractPlugin, AggregateResultListener,
MonitoringDataListener):
SECTION = 'influx'
def __init__(self, core, cfg, cfg_updater):
AbstractPlugin.__init__(self, core, cfg, cfg_updater)
self.client = None
self.decoder = None
def start_test(self):
self.start_time = datetime.datetime.now()
def end_test(self, retcode):
self.end_time = datetime.datetime.now() + datetime.timedelta(minutes=1)
return retcode
def configure(self):
'''Read configuration'''
self.tank_tag = self.get_option("tank_tag")
address = self.get_option("address")
port = self.get_option("port")
self.client = InfluxDBClient(
address, port, 'root', 'root', 'mydb')
grafana_root = self.get_option("grafana_root")
grafana_dashboard = self.get_option("grafana_dashboard")
uuid = str(uuid4())
logger.info(
"Grafana link: {grafana_root}"
"dashboard/db/{grafana_dashboard}?var-uuid={uuid}&from=-5m&to=now".format(
grafana_root=grafana_root,
grafana_dashboard=grafana_dashboard,
uuid=uuid,
)
)
self.decoder = Decoder(self.tank_tag, uuid)
def prepare_test(self):
self.core.job.subscribe_plugin(self)
def on_aggregated_data(self, data, stats):
if self.client:
points = self.decoder.decode_aggregate(data, stats)
self.client.write_points(points, 's')
def monitoring_data(self, data_list):
if self.client:
if len(data_list) > 0:
[
self._send_monitoring(chunk)
for chunk in chop(data_list, self.get_option("chunk_size"))
]
def _send_monitoring(self, data):
points = self.decoder.decode_monitoring(data)
self.client.write_points(points, 's')
|
Python
| 0.000005
|
@@ -1071,336 +1071,8 @@
er)%0A
- self.client = None%0A self.decoder = None%0A%0A def start_test(self):%0A self.start_time = datetime.datetime.now()%0A%0A def end_test(self, retcode):%0A self.end_time = datetime.datetime.now() + datetime.timedelta(minutes=1)%0A return retcode%0A%0A def configure(self):%0A '''Read configuration'''%0A
@@ -1791,16 +1791,230 @@
uuid)%0A%0A
+ def start_test(self):%0A self.start_time = datetime.datetime.now()%0A%0A def end_test(self, retcode):%0A self.end_time = datetime.datetime.now() + datetime.timedelta(minutes=1)%0A return retcode%0A%0A
def
|
ba7a3a51e34dc710ed30153a881ba86348fd8af2
|
Improve playback controls by stopping before playing new track. Fix a couple of bugs.
|
mopidy/backends/libspotify.py
|
mopidy/backends/libspotify.py
|
import datetime as dt
import logging
import threading
from spotify import Link
from spotify.manager import SpotifySessionManager
from spotify.alsahelper import AlsaController
from mopidy import config
from mopidy.backends import (BaseBackend, BaseCurrentPlaylistController,
BaseLibraryController, BasePlaybackController,
BaseStoredPlaylistsController)
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.libspotify')
ENCODING = 'utf-8'
class LibspotifyBackend(BaseBackend):
def __init__(self):
self.current_playlist = LibspotifyCurrentPlaylistController(
backend=self)
self.library = LibspotifyLibraryController(backend=self)
self.playback = LibspotifyPlaybackController(backend=self)
self.stored_playlists = LibspotifyStoredPlaylistsController(
backend=self)
self.uri_handlers = [u'spotify:', u'http://open.spotify.com/']
self.translate = LibspotifyTranslator()
self.spotify = self._connect()
self.stored_playlists.refresh()
def _connect(self):
logger.info(u'Connecting to Spotify')
spotify = LibspotifySessionManager(
config.SPOTIFY_USERNAME, config.SPOTIFY_PASSWORD, backend=self)
spotify.start()
return spotify
class LibspotifyCurrentPlaylistController(BaseCurrentPlaylistController):
pass
class LibspotifyLibraryController(BaseLibraryController):
def search(self, type, query):
return Playlist() # TODO
class LibspotifyPlaybackController(BasePlaybackController):
def _pause(self):
# TODO
return False
def _play(self, track):
self.backend.spotify.session.load(
Link.from_string(self._current_track.uri).as_track())
self.backend.spotify.session.play(1)
return True
def _resume(self):
# TODO
return False
def _stop(self):
self.spotify.session.play(0)
return True
class LibspotifyStoredPlaylistsController(BaseStoredPlaylistsController):
def refresh(self):
logger.info(u'Refreshing stored playlists')
playlists = []
for spotify_playlist in self.backend.spotify.playlists:
playlists.append(
self.backend.translate.to_mopidy_playlist(spotify_playlist))
self._playlists = playlists
logger.debug(u'Available playlists: %s',
u', '.join([u'<%s>' % p.name for p in self.playlists]))
class LibspotifyTranslator(object):
uri_to_id_map = {}
next_id = 0
def to_mopidy_id(self, spotify_uri):
if spotify_uri not in self.uri_to_id_map:
this_id = self.next_id
self.next_id += 1
self.uri_to_id_map[spotify_uri] = this_id
return self.uri_to_id_map[spotify_uri]
def to_mopidy_artist(self, spotify_artist):
if not spotify_artist.is_loaded():
return Artist(name=u'[loading...]')
return Artist(
uri=str(Link.from_artist(spotify_artist)),
name=spotify_artist.name().decode(ENCODING),
)
def to_mopidy_album(self, spotify_album):
if not spotify_album.is_loaded():
return Album(name=u'[loading...]')
# TODO pyspotify got much more data on albums than this
return Album(name=spotify_album.name().decode(ENCODING))
def to_mopidy_track(self, spotify_track):
if not spotify_track.is_loaded():
return Track(title=u'[loading...]')
uri = str(Link.from_track(spotify_track, 0))
return Track(
uri=uri,
title=spotify_track.name().decode(ENCODING),
artists=[self.to_mopidy_artist(a) for a in spotify_track.artists()],
album=self.to_mopidy_album(spotify_track.album()),
track_no=spotify_track.index(),
date=dt.date(spotify_track.album().year(), 1, 1),
length=spotify_track.duration(),
bitrate=320,
id=self.to_mopidy_id(uri),
)
def to_mopidy_playlist(self, spotify_playlist):
if not spotify_playlist.is_loaded():
return Playlist(name=u'[loading...]')
return Playlist(
uri=str(Link.from_playlist(spotify_playlist)),
name=spotify_playlist.name().decode(ENCODING),
tracks=[self.to_mopidy_track(t) for t in spotify_playlist],
)
class LibspotifySessionManager(SpotifySessionManager, threading.Thread):
def __init__(self, username, password, backend):
SpotifySessionManager.__init__(self, username, password)
threading.Thread.__init__(self)
self.backend = backend
self.audio = AlsaController()
self.playlists = []
def run(self):
self.connect()
def logged_in(self, session, error):
logger.info('Logged in')
self.session = session
try:
self.playlists = session.playlist_container()
logger.debug('Got playlist container')
except Exception, e:
logger.exception(e)
def logged_out(self, session):
logger.info('Logged out')
def metadata_updated(self, session):
logger.debug('Metadata updated')
# XXX This changes data "owned" by another thread, and leads to
# segmentation fault. We should use locking and messaging here.
self.backend.stored_playlists.refresh()
def connection_error(self, session, error):
logger.error('Connection error: %s', error)
def message_to_user(self, session, message):
logger.info(message)
def notify_main_thread(self, session):
logger.debug('Notify main thread')
def music_delivery(self, *args, **kwargs):
self.audio.music_delivery(*args, **kwargs)
def play_token_lost(self, session):
logger.debug('Play token lost')
def log_message(self, session, data):
logger.debug(data)
def end_of_track(self, session):
logger.debug('End of track')
|
Python
| 0
|
@@ -1593,85 +1593,343 @@
ef _
-pause(self):%0A # TODO%0A return False%0A%0A def _play(self, track):
+next(self, track):%0A if self.state == self.PLAYING:%0A self._stop()%0A self._play(track)%0A return True%0A%0A def _pause(self):%0A # TODO%0A return False%0A%0A def _play(self, track):%0A if self.state == self.PLAYING:%0A self._stop()%0A if track.uri is None:%0A return False
%0A
@@ -2001,22 +2001,8 @@
ing(
-self._current_
trac
@@ -2078,32 +2078,175 @@
return True%0A%0A
+ def _previous(self, track):%0A if self.state == self.PLAYING:%0A self._stop()%0A self._play(track)%0A return True%0A%0A
def _resume(
@@ -2319,24 +2319,32 @@
self.
+backend.
spotify.sess
|
42b1f3e7b7cc856b099796c7eb118ec2a6a96216
|
Optimize queries
|
lib/aquilon/worker/commands/compile.py
|
lib/aquilon/worker/commands/compile.py
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2013,2014 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq compile`."""
from sqlalchemy.orm import joinedload, subqueryload
from sqlalchemy.sql import and_
from aquilon.aqdb.model import (Personality, Host, Cluster, CompileableMixin,
ServiceInstance)
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.dbwrappers.branch import get_branch_and_author
from aquilon.worker.locks import CompileKey
from aquilon.worker.templates import Plenary, PlenaryCollection
from aquilon.worker.templates.domain import TemplateDomain
class CommandCompile(BrokerCommand):
required_parameters = []
requires_readonly = True
def render(self, session, logger, domain, sandbox,
pancinclude, pancexclude, pancdebug, cleandeps,
**arguments):
dbdomain, dbauthor = get_branch_and_author(session, domain=domain,
sandbox=sandbox, compel=True)
# Grab a shared lock on personalities and services used by the domain.
# Object templates (hosts, clusters) are protected by the domain lock.
plenaries = PlenaryCollection(logger=logger)
for cls_ in CompileableMixin.__subclasses__():
q = session.query(Personality)
q = q.join(cls_)
q = q.filter(and_(cls_.branch == dbdomain,
cls_.sandbox_author == dbauthor))
q = q.reset_joinpoint()
q = q.options(joinedload('paramholder'),
subqueryload('paramholder.parameters'))
for dbpers in q:
plenaries.append(Plenary.get_plenary(dbpers))
q1 = session.query(ServiceInstance)
q1 = q1.join(ServiceInstance.clients)
q1 = q1.filter(and_(Host.branch == dbdomain,
Host.sandbox_author == dbauthor))
q2 = session.query(ServiceInstance)
q2 = q2.join(ServiceInstance.cluster_clients)
q2 = q2.filter(and_(Cluster.branch == dbdomain,
Cluster.sandbox_author == dbauthor))
plenaries.extend(map(Plenary.get_plenary, q1.union(q2)))
if pancdebug:
pancinclude = r'.*'
pancexclude = r'components/spma/functions'
dom = TemplateDomain(dbdomain, dbauthor, logger=logger)
with CompileKey.merge([CompileKey(domain=dbdomain.name, logger=logger),
plenaries.get_key(exclusive=False)]):
dom.compile(session,
panc_debug_include=pancinclude,
panc_debug_exclude=pancexclude,
cleandeps=cleandeps,
locked=True)
return
|
Python
| 0.000002
|
@@ -2382,17 +2382,16 @@
q
-1
= sessi
@@ -2429,19 +2429,19 @@
q
-1
= q
-1.join
+.filter
(Ser
@@ -2456,25 +2456,29 @@
ance.clients
-)
+.any(
%0A q1
@@ -2478,23 +2478,12 @@
-q1 = q1.filter(
+
and_
@@ -2525,27 +2525,16 @@
-
-
Host.san
@@ -2550,32 +2550,59 @@
or == dbauthor))
+)%0A services = set(q)
%0A%0A q2 = s
@@ -2596,17 +2596,16 @@
q
-2
= sessi
@@ -2643,19 +2643,19 @@
q
-2
= q
-2.join
+.filter
(Ser
@@ -2686,33 +2686,26 @@
ents
-)%0A q2 = q2.filter(
+.any(%0A
and_
@@ -2750,27 +2750,16 @@
-
-
Cluster.
@@ -2786,16 +2786,44 @@
author))
+)%0A services.update(q)
%0A%0A
@@ -2845,12 +2845,8 @@
end(
-map(
Plen
@@ -2864,23 +2864,31 @@
nary
-, q1.union(q2))
+(si) for si in services
)%0A%0A
|
e6045c103fa813b60471b57af6a82e274d798afd
|
add interactive console for debugging
|
gateway.py
|
gateway.py
|
#!/usr/bin/env python
import logging
import tornado.options
import tornado.web
import tornado.websocket
import os.path
import obelisk
import json
import threading
# Install Tornado reactor loop into Twister
# http://www.tornadoweb.org/en/stable/twisted.html
from tornado.platform.twisted import TwistedIOLoop
from twisted.internet import reactor
TwistedIOLoop().install()
from tornado.options import define, options
import rest_handlers
import obelisk_handler
import jsonchan
import broadcast
import ticker
define("port", default=8888, help="run on the given port", type=int)
global ioloop
ioloop = tornado.ioloop.IOLoop.instance()
class GatewayApplication(tornado.web.Application):
def __init__(self, service):
settings = dict(debug=True)
client = obelisk.ObeliskOfLightClient(service)
self.obelisk_handler = obelisk_handler.ObeliskHandler(client)
self.brc_handler = broadcast.BroadcastHandler()
self.json_chan_handler = jsonchan.JsonChanHandler()
self.ticker_handler = ticker.TickerHandler()
handlers = [
# /block/<block hash>
(r"/block/([^/]*)(?:/)?", rest_handlers.BlockHeaderHandler),
# /block/<block hash>/transactions
(r"/block/([^/]*)/transactions(?:/)?",
rest_handlers.BlockTransactionsHandler),
# /tx/
(r"/tx(?:/)?", rest_handlers.TransactionPoolHandler),
# /tx/<txid>
(r"/tx/([^/]*)(?:/)?", rest_handlers.TransactionHandler),
# /address/<address>
(r"/address/([^/]*)(?:/)?", rest_handlers.AddressHistoryHandler),
# /height
(r"/height(?:/)?", rest_handlers.HeightHandler),
# /
(r"/", QuerySocketHandler)
]
tornado.web.Application.__init__(self, handlers, **settings)
class QuerySocketHandler(tornado.websocket.WebSocketHandler):
# Set of WebsocketHandler
listeners = set()
# Protects listeners
listen_lock = threading.Lock()
def initialize(self):
self._obelisk_handler = self.application.obelisk_handler
self._brc_handler = self.application.brc_handler
self._json_chan_handler = self.application.json_chan_handler
self._ticker_handler = self.application.ticker_handler
def open(self):
logging.info("OPEN")
with QuerySocketHandler.listen_lock:
self.listeners.add(self)
def on_close(self):
logging.info("CLOSE")
with QuerySocketHandler.listen_lock:
self.listeners.remove(self)
def _check_request(self, request):
return request.has_key("command") and request.has_key("id") and \
request.has_key("params") and type(request["params"]) == list
def on_message(self, message):
try:
request = json.loads(message)
except:
logging.error("Error decoding message: %s", message, exc_info=True)
logging.info("Request: %s", request)
# Check request is correctly formed.
if not self._check_request(request):
logging.error("Malformed request: %s", request, exc_info=True)
return
# Try different handlers until one accepts request and
# processes it.
if self._json_chan_handler.handle_request(self, request):
return
if self._obelisk_handler.handle_request(self, request):
return
if self._brc_handler.handle_request(self, request):
return
if self._ticker_handler.handle_request(self, request):
return
logging.warning("Unhandled command. Dropping request: %s",
request, exc_info=True)
def _send_response(self, response):
try:
self.write_message(json.dumps(response))
except tornado.websocket.WebSocketClosedError:
logging.warning("Dropping response to closed socket: %s",
response, exc_info=True)
def queue_response(self, response):
try:
# calling write_message or the socket is not thread safe
ioloop.add_callback(self._send_response, response)
except:
logging.error("Error adding callback", exc_info=True)
def main(service):
application = GatewayApplication(service)
tornado.autoreload.start(ioloop)
application.listen(8888)
reactor.run()
if __name__ == "__main__":
service = "tcp://127.0.0.1:9091"
main(service)
|
Python
| 0.000001
|
@@ -156,16 +156,28 @@
hreading
+%0Aimport code
%0A%0A# Inst
@@ -4253,16 +4253,338 @@
=True)%0A%0A
+class DebugConsole(threading.Thread):%0A%0A daemon = True%0A%0A def __init__(self, application):%0A self.application = application%0A super(DebugConsole, self).__init__()%0A self.start()%0A%0A def run(self):%0A console = code.InteractiveConsole()%0A code.interact(local=dict(globals(), **locals()))%0A%0A
def main
@@ -4706,16 +4706,62 @@
n(8888)%0A
+ debug_console = DebugConsole(application)%0A
reac
|
62cd48f8a0fc83261af5c4275a38102fc983d3ff
|
Increase failure tolerance for the Dashboard
|
tests/test_dashboard.py
|
tests/test_dashboard.py
|
from datetime import datetime, timedelta
from dateutil import parser as date_parser
import pytest
import pytz
from web_test_base import *
class TestIATIDashboard(WebTestBase):
requests_to_load = {
'Dashboard Homepage': {
'url': 'http://dashboard.iatistandard.org/'
}
}
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = utility.get_links_from_page(loaded_request)
assert "https://github.com/IATI/IATI-Dashboard/" in result
def test_recently_generated(self, loaded_request):
"""
Tests that the dashboard was generated in the past 2 days.
"""
max_delay = timedelta(days=2)
generation_time_xpath = '//*[@id="footer"]/div/p/em[1]'
data_time_xpath = '//*[@id="footer"]/div/p/em[2]'
generation_time_arr = utility.get_text_from_xpath(loaded_request, generation_time_xpath)
data_time_arr = utility.get_text_from_xpath(loaded_request, data_time_xpath)
generation_time = date_parser.parse(generation_time_arr[0])
data_time = date_parser.parse(data_time_arr[0])
now = datetime.now(pytz.utc)
assert len(generation_time_arr) == 1
assert len(data_time_arr) == 1
assert (now - max_delay) < generation_time
assert (now - max_delay) < data_time
|
Python
| 0
|
@@ -695,17 +695,17 @@
he past
-2
+7
days.%0A
@@ -750,17 +750,17 @@
ta(days=
-2
+7
)%0A
|
c076fb75d40b85b593bd569eaf7f6e13ab95cdd8
|
Replace Pykka internals misuse with proxies
|
mopidy/frontends/mpd/actor.py
|
mopidy/frontends/mpd/actor.py
|
import logging
import sys
import pykka
from mopidy import settings
from mopidy.core import CoreListener
from mopidy.frontends.mpd import session
from mopidy.utils import encoding, network, process
logger = logging.getLogger('mopidy.frontends.mpd')
class MpdFrontend(pykka.ThreadingActor, CoreListener):
"""
The MPD frontend.
**Dependencies:**
- None
**Settings:**
- :attr:`mopidy.settings.MPD_SERVER_HOSTNAME`
- :attr:`mopidy.settings.MPD_SERVER_PORT`
- :attr:`mopidy.settings.MPD_SERVER_PASSWORD`
"""
def __init__(self, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(settings.MPD_SERVER_HOSTNAME)
port = settings.MPD_SERVER_PORT
try:
network.Server(
hostname, port,
protocol=session.MpdSession, protocol_kwargs={'core': core},
max_connections=settings.MPD_SERVER_MAX_CONNECTIONS)
except IOError as error:
logger.error(
u'MPD server startup failed: %s',
encoding.locale_decode(error))
sys.exit(1)
logger.info(u'MPD server running at [%s]:%s', hostname, port)
def on_stop(self):
process.stop_actors_by_class(session.MpdSession)
def send_idle(self, subsystem):
# FIXME this should be updated once pykka supports non-blocking calls
# on proxies or some similar solution
pykka.ActorRegistry.broadcast({
'command': 'pykka_call',
'attr_path': ('on_idle',),
'args': [subsystem],
'kwargs': {},
}, target_class=session.MpdSession)
def playback_state_changed(self, old_state, new_state):
self.send_idle('player')
def playlist_changed(self):
self.send_idle('playlist')
def options_changed(self):
self.send_idle('options')
def volume_changed(self):
self.send_idle('mixer')
|
Python
| 0.000014
|
@@ -1329,200 +1329,107 @@
-# FIXME this should be updated once pykka supports non-blocking calls%0A # on proxies or some similar solution%0A pykka.ActorRegistry.broadcast(%7B%0A 'command': 'pykka_call',
+listeners = pykka.ActorRegistry.get_by_class(session.MpdSession)%0A for listener in listeners:
%0A
@@ -1441,136 +1441,54 @@
-'
+get
attr
-_path': ('on_idle',),%0A 'args': %5Bsubsystem%5D,%0A 'kwargs': %7B%7D,%0A %7D, target_class=session.MpdSession
+(listener.proxy(), 'on_idle')(subsystem
)%0A%0A
|
c68833f3c464720b676080705d2df4f7e37c4392
|
fix template render() expect Context and not dict
|
feincms/tests/applicationcontent_urls.py
|
feincms/tests/applicationcontent_urls.py
|
"""
This is a dummy module used to test the ApplicationContent
"""
from django import template
from django.conf.urls.defaults import *
from django.http import HttpResponse, HttpResponseRedirect
def module_root(request):
return 'module_root'
def args_test(request, kwarg1, kwarg2):
return HttpResponse(u'%s-%s' % (kwarg1, kwarg2))
def reverse_test(request):
t = template.Template('home:{% url ac_module_root %} args:{% url ac_args_test "xy" "zzy" %} base:{% url feincms.views.applicationcontent.handler "test" %}')
return t.render(template.Context())
def raises(request):
raise NotImplementedError, 'not really not implemented, but it is as good as anything for the test'
def fragment(request):
t = template.Template('{% load applicationcontent_tags %}{% fragment request "something" %}some things{% endfragment %}')
return t.render({'request': request})
def redirect(request):
return HttpResponseRedirect('../')
urlpatterns = patterns('',
url(r'^$', module_root, name='ac_module_root'),
url(r'^args_test/([^/]+)/([^/]+)/$', args_test, name='ac_args_test'),
url(r'^kwargs_test/(?P<kwarg2>[^/]+)/(?P<kwarg1>[^/]+)/$', args_test),
url(r'^reverse_test/$', reverse_test),
url(r'^raises/$', raises),
url(r'^fragment/$', fragment),
url(r'^redirect/$', redirect),
)
|
Python
| 0.000001
|
@@ -864,16 +864,33 @@
.render(
+template.Context(
%7B'reques
@@ -902,16 +902,17 @@
equest%7D)
+)
%0A%0A%0Adef r
|
2da6415008f42b0295772730428fb8dc1b3137d2
|
Add more tests for JST regression case
|
tests/test_ext_tasks.py
|
tests/test_ext_tasks.py
|
# -*- coding: utf-8 -*-
"""
Tests for discord.ext.tasks
"""
import asyncio
import datetime
import pytest
import sys
from discord import utils
from discord.ext import tasks
@pytest.mark.asyncio
async def test_explicit_initial_runs_tomorrow_single():
now = utils.utcnow()
if not ((0, 4) < (now.hour, now.minute) < (23, 59)):
await asyncio.sleep(5 * 60) # sleep for 5 minutes
now = utils.utcnow()
has_run = False
async def inner():
nonlocal has_run
has_run = True
time = utils.utcnow() - datetime.timedelta(minutes=1)
# a loop that should have an initial run tomorrow
loop = tasks.loop(time=datetime.time(hour=time.hour, minute=time.minute))(inner)
loop.start()
await asyncio.sleep(1)
try:
assert not has_run
finally:
loop.cancel()
@pytest.mark.asyncio
async def test_explicit_initial_runs_tomorrow_multi():
now = utils.utcnow()
if not ((0, 4) < (now.hour, now.minute) < (23, 59)):
await asyncio.sleep(5 * 60) # sleep for 5 minutes
now = utils.utcnow()
# multiple times that are in the past for today
times = []
for _ in range(3):
now -= datetime.timedelta(minutes=1)
times.append(datetime.time(hour=now.hour, minute=now.minute))
has_run = False
async def inner():
nonlocal has_run
has_run = True
# a loop that should have an initial run tomorrow
loop = tasks.loop(time=times)(inner)
loop.start()
await asyncio.sleep(1)
try:
assert not has_run
finally:
loop.cancel()
def test_task_regression_issue7659():
jst = datetime.timezone(datetime.timedelta(hours=9))
# 00:00, 03:00, 06:00, 09:00, 12:00, 15:00, 18:00, 21:00
times = [datetime.time(hour=h, tzinfo=jst) for h in range(0, 24, 3)]
@tasks.loop(time=times)
async def loop():
pass
before_midnight = datetime.datetime(2022, 3, 12, 23, 50, 59, tzinfo=jst)
after_midnight = before_midnight + datetime.timedelta(minutes=9, seconds=2)
expected_before_midnight = datetime.datetime(2022, 3, 13, 0, 0, 0, tzinfo=jst)
expected_after_midnight = datetime.datetime(2022, 3, 13, 3, 0, 0, tzinfo=jst)
assert loop._get_next_sleep_time(before_midnight) == expected_before_midnight
assert loop._get_next_sleep_time(after_midnight) == expected_after_midnight
@pytest.mark.skipif(sys.version_info < (3, 9), reason="zoneinfo requires 3.9")
def test_task_is_imaginary():
import zoneinfo
tz = zoneinfo.ZoneInfo('America/New_York')
# 2:30 AM was skipped
dt = datetime.datetime(2022, 3, 13, 2, 30, tzinfo=tz)
assert tasks.is_imaginary(dt)
now = utils.utcnow()
# UTC time is never imaginary or ambiguous
assert not tasks.is_imaginary(now)
@pytest.mark.skipif(sys.version_info < (3, 9), reason="zoneinfo requires 3.9")
def test_task_is_ambiguous():
import zoneinfo
tz = zoneinfo.ZoneInfo('America/New_York')
# 1:30 AM happened twice
dt = datetime.datetime(2022, 11, 6, 1, 30, tzinfo=tz)
assert tasks.is_ambiguous(dt)
now = utils.utcnow()
# UTC time is never imaginary or ambiguous
assert not tasks.is_imaginary(now)
@pytest.mark.skipif(sys.version_info < (3, 9), reason="zoneinfo requires 3.9")
@pytest.mark.parametrize(
('dt', 'key', 'expected'),
[
(datetime.datetime(2022, 11, 6, 1, 30), 'America/New_York', datetime.datetime(2022, 11, 6, 1, 30, fold=1)),
(datetime.datetime(2022, 3, 13, 2, 30), 'America/New_York', datetime.datetime(2022, 3, 13, 3, 30)),
(datetime.datetime(2022, 4, 8, 2, 30), 'America/New_York', datetime.datetime(2022, 4, 8, 2, 30)),
(datetime.datetime(2023, 1, 7, 12, 30), 'UTC', datetime.datetime(2023, 1, 7, 12, 30)),
],
)
def test_task_date_resolve(dt, key, expected):
import zoneinfo
tz = zoneinfo.ZoneInfo(key)
actual = tasks.resolve_datetime(dt.replace(tzinfo=tz))
expected = expected.replace(tzinfo=tz)
assert actual == expected
|
Python
| 0
|
@@ -2369,16 +2369,375 @@
dnight%0A%0A
+ today = datetime.date.today()%0A minute_before = %5Bdatetime.datetime.combine(today, time, tzinfo=jst) - datetime.timedelta(minutes=1) for time in times%5D%0A%0A for before, expected_time in zip(minute_before, times):%0A expected = datetime.datetime.combine(today, expected_time, tzinfo=jst)%0A assert loop._get_next_sleep_time(before) == expected%0A%0A
%0A@pytest
|
8b364a2b9eaff5e038d47a114746458de56b4ed5
|
fix apitype casing for underscore classes
|
saga/base.py
|
saga/base.py
|
import string
import saga.utils.logger
import saga.engine.engine
class SimpleBase (object) :
""" This is a very simple API base class which just initializes
the self._logger and self._engine members, but does not perform any further
initialization, nor any adaptor binding. This base is used for API classes
which are not backed by a (single) adaptor (session, task, etc).
"""
def __init__ (self) :
self._apitype = self._get_apitype ()
self._engine = saga.engine.engine.Engine ()
self._logger = saga.utils.logger.getLogger (self._apitype)
print self._engine
self._logger.debug ("[saga.Base] %s.__init__()" % self._apitype)
def get_session (self) :
"""
Returns the session which is managing the object instance. For objects
which do not accept a session handle on construction, this call returns
None.
The object's session is also available via the `session` property.
"""
return self._adaptor.get_session ()
session = property (get_session)
def _get_apitype (self) :
apitype = self.__module__ + '.' + self.__class__.__name__
name_parts = apitype.split ('.')
l = len(name_parts)
if len > 2 :
t1 = name_parts [l-1]
t2 = name_parts [l-2]
if t1 == string.capwords (t2) :
del name_parts[l-2]
apitype = string.join (name_parts, '.')
return apitype
class Base (SimpleBase) :
def __init__ (self, schema, adaptor, adaptor_state, *args, **kwargs) :
print "schema2: %s" % schema
SimpleBase.__init__ (self)
self._adaptor = adaptor
print "schema3: %s" % schema
self._adaptor = self._engine.bind_adaptor (self, self._apitype, schema, adaptor)
self._init_task = self._adaptor.init_instance (adaptor_state, *args, **kwargs)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Python
| 0.000001
|
@@ -1335,24 +1335,60 @@
parts %5Bl-2%5D%0A
+ t2 = t2.replace ('_', ' ')
%0A i
@@ -1390,15 +1390,11 @@
-if t1 =
+t2
= st
@@ -1411,16 +1411,75 @@
rds (t2)
+%0A t2 = t2.replace (' ', '')%0A%0A if t1 == t2
:%0A
|
1a830910b104476630bf5d1bc8a0eb8f757c1e20
|
Fix scope for self view in orgchart.
|
mozillians/dino_park/views.py
|
mozillians/dino_park/views.py
|
import requests
import urlparse
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.http import HttpResponseForbidden, JsonResponse
from django.shortcuts import render
from django.views.decorators.cache import never_cache
from mozillians.common.decorators import allow_public
from mozillians.dino_park.utils import UserAccessLevel
@never_cache
@login_required
def main(request):
return render(request, 'dino_park/index.html', {})
@never_cache
def orgchart(request):
"""Internal routing to expose orgchart service."""
scope = UserAccessLevel.get_privacy(request)
if scope not in [UserAccessLevel.STAFF, UserAccessLevel.PRIVATE]:
return HttpResponseForbidden()
url_parts = urlparse.ParseResult(
scheme='http',
netloc=settings.DINO_PARK_ORGCHART_SVC,
path='/orgchart',
params='',
query='',
fragment=''
)
url = urlparse.urlunparse(url_parts)
resp = requests.get(url)
resp.raise_for_status()
return JsonResponse(resp.json(), safe=False)
@never_cache
def orgchart_get_by_username(request, path, username):
"""Internal routing to expose orgchart service by user_id."""
scope = UserAccessLevel.get_privacy(request)
if scope not in [UserAccessLevel.STAFF, UserAccessLevel.PRIVATE]:
return HttpResponseForbidden()
url_parts = urlparse.ParseResult(
scheme='http',
netloc=settings.DINO_PARK_ORGCHART_SVC,
path='/orgchart/{0}/{1}'.format(path, username),
params='',
query='',
fragment=''
)
url = urlparse.urlunparse(url_parts)
resp = requests.get(url)
resp.raise_for_status()
return JsonResponse(resp.json(), safe=False)
@never_cache
@allow_public
def search_simple(request, query):
"""Internal routing to expose simple search."""
scope = UserAccessLevel.get_privacy(request)
url_parts = urlparse.ParseResult(
scheme='http',
netloc=settings.DINO_PARK_SEARCH_SVC,
path='/search/simple/{}/{}'.format(scope, query),
params='',
query=request.GET.urlencode(),
fragment=''
)
url = urlparse.urlunparse(url_parts)
resp = requests.get(url)
resp.raise_for_status()
return JsonResponse(resp.json(), safe=False)
@never_cache
@allow_public
def search_get_profile(request, username, scope=None):
"""Internal routing to expose search by user ID."""
try:
user = User.objects.get(username=username)
except (User.DoesNotExist, User.MultipleObjectsReturned):
pass
else:
if user == request.user:
scope = UserAccessLevel.PRIVATE
if not scope:
scope = UserAccessLevel.get_privacy(request)
url_parts = urlparse.ParseResult(
scheme='http',
netloc=settings.DINO_PARK_SEARCH_SVC,
path='/search/get/{}/{}'.format(scope, username),
params='',
query='',
fragment=''
)
url = urlparse.urlunparse(url_parts)
resp = requests.get(url)
resp.raise_for_status()
return JsonResponse(resp.json(), safe=False)
|
Python
| 0
|
@@ -1255,24 +1255,268 @@
user_id.%22%22%22%0A
+ try:%0A user = User.objects.get(username=username)%0A except (User.DoesNotExist, User.MultipleObjectsReturned):%0A pass%0A else:%0A if user == request.user:%0A scope = UserAccessLevel.PRIVATE%0A else:%0A
scope =
@@ -1544,32 +1544,40 @@
rivacy(request)%0A
+
if scope not
@@ -1622,32 +1622,40 @@
Level.PRIVATE%5D:%0A
+
return H
@@ -1649,36 +1649,36 @@
return
-Http
+Json
Response
Forbidden()%0A
@@ -1661,34 +1661,27 @@
JsonResponse
-Forbidden(
+(%7B%7D
)%0A%0A url_p
|
b282b2df1aed13cf5ad42b656f9cc2910feb90c2
|
removed whitespace
|
python-wallaroo/wallaroo/client/node.py
|
python-wallaroo/wallaroo/client/node.py
|
# Copyright (c) 2013 Red Hat, Inc.
# Author: William Benton (willb@redhat.com)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .proxy import Proxy, proxied_attr
from .proxy import proxied_attr_get as pag, proxied_attr_set as pas, proxied_attr_getset as pags
from .arc_utils import arcmethod
import errors
from errors import not_implemented, fail
class node(Proxy):
name = property(pag("name"))
memberships = property(*pags("memberships"))
identity_group = property(pag("identity_group"))
provisioned = property(*pags("provisioned"))
modifyMemberships = arcmethod(pag("memberships"), pas("memberships"), heterogeneous=True, preserve_order=True)
def getConfig(self, **options):
if len(options) > 0:
not_implemented()
return self.cm.fetch_json_resource("/config/node/%s" % self.name)
def makeProvisioned(self):
self.provisioned = True
self.update()
def explain(self):
not_implemented()
def whatChanged(old, new):
not_implemented()
proxied_attr(node, "name")
proxied_attr(node, "memberships")
proxied_attr(node, "identity_group")
proxied_attr(node, "provisioned")
|
Python
| 0.999428
|
@@ -847,17 +847,16 @@
, fail%0A%0A
-%0A
class no
|
a8718082754246e32d0611655a4f6f487a34ddb7
|
Add missed copyright notice
|
tests/test_fileaware.py
|
tests/test_fileaware.py
|
import os
import tempfile
from contextlib import contextmanager
import environ
import pytest
@contextmanager
def make_temp_file(text):
with tempfile.NamedTemporaryFile("w", delete=False) as f:
f.write(text)
f.close()
try:
yield f.name
finally:
if os.path.exists(f.name):
os.unlink(f.name)
@pytest.fixture
def tmp_f():
with make_temp_file(text="fish") as f_name:
yield f_name
def test_mapping(tmp_f):
env = environ.FileAwareMapping(env={"ANIMAL_FILE": tmp_f})
assert env["ANIMAL"] == "fish"
def test_precidence(tmp_f):
env = environ.FileAwareMapping(
env={
"ANIMAL_FILE": tmp_f,
"ANIMAL": "cat",
}
)
assert env["ANIMAL"] == "fish"
def test_missing_file_raises_exception():
env = environ.FileAwareMapping(env={"ANIMAL_FILE": "non-existant-file"})
with pytest.raises(FileNotFoundError):
env["ANIMAL"]
def test_iter():
env = environ.FileAwareMapping(
env={
"ANIMAL_FILE": "some-file",
"VEGETABLE": "leek",
"VEGETABLE_FILE": "some-vegetable-file",
}
)
keys = set(env)
assert keys == {"ANIMAL_FILE", "ANIMAL", "VEGETABLE", "VEGETABLE_FILE"}
assert "ANIMAL" in keys
def test_len():
env = environ.FileAwareMapping(
env={
"ANIMAL_FILE": "some-file",
"VEGETABLE": "leek",
"VEGETABLE_FILE": "some-vegetable-file",
}
)
assert len(env) == 4
def test_cache(tmp_f):
env = environ.FileAwareMapping(env={"ANIMAL_FILE": tmp_f})
assert env["ANIMAL"] == "fish"
with open(tmp_f, "w") as f:
f.write("cat")
assert env["ANIMAL"] == "fish"
os.unlink(tmp_f)
assert not os.path.exists(env["ANIMAL_FILE"])
assert env["ANIMAL"] == "fish"
def test_no_cache(tmp_f):
env = environ.FileAwareMapping(
cache=False,
env={"ANIMAL_FILE": tmp_f},
)
assert env["ANIMAL"] == "fish"
with open(tmp_f, "w") as f:
f.write("cat")
assert env["ANIMAL"] == "cat"
os.unlink(tmp_f)
assert not os.path.exists(env["ANIMAL_FILE"])
with pytest.raises(FileNotFoundError):
assert env["ANIMAL"]
def test_setdefault(tmp_f):
env = environ.FileAwareMapping(env={"ANIMAL_FILE": tmp_f})
assert env.setdefault("FRUIT", "apple") == "apple"
assert env.setdefault("ANIMAL", "cat") == "fish"
assert env.env == {"ANIMAL_FILE": tmp_f, "FRUIT": "apple"}
class TestDelItem:
def test_del_key(self):
env = environ.FileAwareMapping(env={"FRUIT": "apple"})
del env["FRUIT"]
with pytest.raises(KeyError):
env["FRUIT"]
def test_del_key_with_file_key(self):
env = environ.FileAwareMapping(env={"ANIMAL_FILE": "some-file"})
del env["ANIMAL"]
with pytest.raises(KeyError):
env["ANIMAL"]
def test_del_shadowed_key_with_file_key(self):
env = environ.FileAwareMapping(
env={"ANIMAL_FILE": "some-file", "ANIMAL": "cat"}
)
del env["ANIMAL"]
with pytest.raises(KeyError):
env["ANIMAL"]
def test_del_file_key(self):
env = environ.FileAwareMapping(
env={
"ANIMAL_FILE": "some-file",
"ANIMAL": "fish",
}
)
del env["ANIMAL_FILE"]
assert env["ANIMAL"] == "fish"
def test_del_file_key_clears_cache(self, tmp_f):
env = environ.FileAwareMapping(
env={
"ANIMAL_FILE": tmp_f,
"ANIMAL": "cat",
}
)
assert env["ANIMAL"] == "fish"
del env["ANIMAL_FILE"]
assert env["ANIMAL"] == "cat"
class TestSetItem:
def test_set_key(self):
env = environ.FileAwareMapping(env={"FRUIT": "apple"})
env["FRUIT"] = "banana"
assert env["FRUIT"] == "banana"
def test_cant_override_key_with_file_key(self, tmp_f):
env = environ.FileAwareMapping(
env={
"FRUIT": "apple",
"FRUIT_FILE": tmp_f,
}
)
with open(tmp_f, "w") as f:
f.write("banana")
env["FRUIT"] = "cucumber"
assert env["FRUIT"] == "banana"
def test_set_file_key(self, tmp_f):
env = environ.FileAwareMapping(env={"ANIMAL": "cat"})
env["ANIMAL_FILE"] = tmp_f
assert env["ANIMAL"] == "fish"
def test_change_file_key_clears_cache(self, tmp_f):
env = environ.FileAwareMapping(env={"ANIMAL_FILE": tmp_f})
assert env["ANIMAL"] == "fish"
with make_temp_file(text="cat") as new_tmp_f:
env["ANIMAL_FILE"] = new_tmp_f
assert env["ANIMAL"] == "cat"
|
Python
| 0
|
@@ -1,12 +1,323 @@
+# This file is part of the django-environ.%0A#%0A# Copyright (c) 2021, Serghei Iakovlev %3Cegrep@protonmail.ch%3E%0A# Copyright (c) 2013-2021, Daniele Faraglia %3Cdaniele.faraglia@gmail.com%3E%0A#%0A# For the full copyright and license information, please view%0A# the LICENSE.txt file that was distributed with this source code.%0A%0A
import os%0Aim
|
2023f8ff3534b2bd70a4f31d247033b91dd76e79
|
Fix crash when urlrewrite is configured to allow infinite rewriting
|
flexget/plugins/urlrewrite_urlrewrite.py
|
flexget/plugins/urlrewrite_urlrewrite.py
|
from __future__ import unicode_literals, division, absolute_import
import re
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('urlrewrite')
class UrlRewrite(object):
"""
Generic configurable urlrewriter.
Example::
urlrewrite:
demonoid:
regexp: http://www\.demonoid\.com/files/details/
format: http://www.demonoid.com/files/download/HTTP/
"""
resolves = {}
# built-in resolves
# resolves = yaml.safe_load("""
# tvsubtitles:
# match: http://www.tvsubtitles.net/subtitle-
# replace: http://www.tvsubtitles.net/download-
# """
# )
schema = {
'type': 'object',
'additionalProperties': {
'type': 'object',
'properties': {
'regexp': {'type': 'string', 'format': 'regex'},
'format': {'type': 'string'}
},
'required': ['regexp', 'format'],
'additionalProperties': False
}
}
def on_task_start(self, task, config):
for name, rewrite_config in config.iteritems():
match = re.compile(rewrite_config['regexp'])
format = rewrite_config['format']
self.resolves[name] = {'regexp_compiled': match, 'format': format, 'regexp': rewrite_config['regexp']}
log.debug('Added rewrite %s' % name)
def url_rewritable(self, task, entry):
log.trace('running url_rewritable')
log.trace(self.resolves)
for name, config in self.resolves.iteritems():
regexp = config['regexp_compiled']
log.trace('testing %s' % config['regexp'])
if regexp.search(entry['url']):
return True
return False
def url_rewrite(self, task, entry):
for name, config in self.resolves.iteritems():
regexp = config['regexp_compiled']
format = config['format']
if regexp.search(entry['url']):
log.debug('Regexp resolving %s with %s' % (entry['url'], name))
# run the regexp
entry['url'] = regexp.sub(format, entry['url'])
if regexp.match(entry['url']):
entry.fail('urlrewriting')
task.purge()
from flexget.plugins.plugin_urlrewriting import UrlRewritingError
raise UrlRewritingError('Regexp %s result should NOT continue to match!' % name)
return
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewrite, 'urlrewrite', groups=['urlrewriter'], api_ver=2)
|
Python
| 0
|
@@ -144,16 +144,82 @@
rt event
+%0Afrom flexget.plugins.plugin_urlrewriting import UrlRewritingError
%0A%0Alog =
@@ -2327,127 +2327,8 @@
g')%0A
- task.purge()%0A from flexget.plugins.plugin_urlrewriting import UrlRewritingError%0A
|
7e7fef808da6a350722660844ba2b913d5b4daa7
|
Fix tests that fail after CET enters DST (#217)
|
tests/test_formatter.py
|
tests/test_formatter.py
|
from datetime import date, datetime, time, timedelta
import pytest
import pytz
from freezegun import freeze_time
from todoman.cli import cli
@pytest.mark.parametrize('interval', [
(65, 'in a minute'),
(-10800, '3 hours ago'),
])
@pytest.mark.parametrize('tz', ['CET', 'HST'])
def test_humanized_date(runner, create, interval, now_for_tz, tz):
seconds, expected = interval
due = now_for_tz(tz) + timedelta(seconds=seconds)
create(
'test.ics',
'SUMMARY:Hi human!\n'
'DUE;VALUE=DATE-TIME;TZID={}:{}\n'
.format(tz, due.strftime('%Y%m%dT%H%M%S'))
)
result = runner.invoke(cli, ['--humanize', 'list', '--all'])
assert not result.exception
assert expected in result.output
def test_format_priority(default_formatter):
assert default_formatter.format_priority(None) == 'none'
assert default_formatter.format_priority(0) == 'none'
assert default_formatter.format_priority(5) == 'medium'
for i in range(1, 5):
assert default_formatter.format_priority(i) == 'high'
for i in range(6, 10):
assert default_formatter.format_priority(i) == 'low'
def test_format_priority_compact(default_formatter):
assert default_formatter.format_priority_compact(None) == ''
assert default_formatter.format_priority_compact(0) == ''
assert default_formatter.format_priority_compact(5) == '!!'
for i in range(1, 5):
assert default_formatter.format_priority_compact(i) == '!!!'
for i in range(6, 10):
assert default_formatter.format_priority_compact(i) == '!'
def test_format_date(default_formatter):
assert default_formatter.format_datetime(date(2017, 3, 4)) == '2017-03-04'
def test_format_datetime(default_formatter):
assert default_formatter.format_datetime(datetime(2017, 3, 4, 17, 00)) == \
'2017-03-04 17:00'
def test_detailed_format(runner, todo_factory):
todo_factory(
description='Test detailed formatting\n'
'This includes multiline descriptions\n'
'Blah!',
location='Over the hills, and far away',
)
# TODO:use formatter instead of runner?
result = runner.invoke(cli, ['show', '1'])
expected = (
'1 [ ] YARR! @default\n\n'
'Description Test detailed formatting\n'
' This includes multiline descriptions\n'
' Blah!\n'
'Location Over the hills, and far away'
)
assert not result.exception
assert result.output.strip() == expected
def test_parse_time(default_formatter):
tz = pytz.timezone('CET')
parsed = default_formatter.parse_datetime('12:00')
expected = datetime.combine(
date.today(),
time(hour=12, minute=0),
).replace(tzinfo=tz)
assert parsed == expected
def test_parse_datetime(default_formatter):
tz = pytz.timezone('CET')
parsed = default_formatter.parse_datetime('2017-03-05')
assert parsed == datetime(2017, 3, 5).replace(tzinfo=tz)
parsed = default_formatter.parse_datetime('2017-03-05 12:00')
assert parsed == datetime(2017, 3, 5, 12).replace(tzinfo=tz)
# Notes. will round to the NEXT matching date, so we need to freeze time
# for this one:
with freeze_time('2017-03-04'):
parsed = default_formatter.parse_datetime(
'Mon Mar 6 22:50:52 -03 2017'
)
assert parsed == datetime(2017, 3, 6, 20, 17).replace(tzinfo=tz)
assert default_formatter.parse_datetime('') is None
assert default_formatter.parse_datetime(None) is None
def test_humanized_parse_datetime(humanized_formatter):
tz = pytz.timezone('CET')
humanized_formatter.now = datetime(2017, 3, 6, 22, 17).replace(tzinfo=tz)
dt = datetime(2017, 3, 6, 20, 17).replace(tzinfo=tz)
assert humanized_formatter.format_datetime(dt) == '2 hours ago'
assert humanized_formatter.format_datetime(None) == ''
def test_simple_action(default_formatter, todo_factory):
todo = todo_factory()
assert default_formatter.simple_action('Delete', todo) == \
'Delete "YARR!"'
def test_formatting_parsing_consitency(default_formatter):
tz = pytz.timezone('CET')
dt = datetime(2017, 3, 8, 21, 6).replace(tzinfo=tz)
formatted = default_formatter.format_datetime(dt)
assert default_formatter.parse_datetime(formatted) == dt
|
Python
| 0
|
@@ -281,16 +281,43 @@
'HST'%5D)%0A
+@freeze_time('2017-03-25')%0A
def test
|
e1d7d822c71779102ccb31106a71cce33970d75a
|
Address review comment: Better variable name.
|
flocker/control/test/test_persistence.py
|
flocker/control/test/test_persistence.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for ``flocker.control._persistence``.
"""
from uuid import uuid4
from eliot.testing import validate_logging, assertHasMessage, assertHasAction
from twisted.internet import reactor
from twisted.trial.unittest import TestCase, SynchronousTestCase
from twisted.python.filepath import FilePath
from pyrsistent import PRecord
from .._persistence import (
ConfigurationPersistenceService, wire_decode, wire_encode,
_LOG_SAVE, _LOG_STARTUP,
)
from .._model import (
Deployment, Application, DockerImage, Node, Dataset, Manifestation,
AttachedVolume, SERIALIZABLE_CLASSES)
DATASET = Dataset(dataset_id=unicode(uuid4()),
metadata={u"name": u"myapp"})
MANIFESTATION = Manifestation(dataset=DATASET, primary=True)
TEST_DEPLOYMENT = Deployment(
nodes=[Node(hostname=u'node1.example.com',
applications=[
Application(
name=u'myapp',
image=DockerImage.from_string(u'postgresql:7.6'),
volume=AttachedVolume(
manifestation=MANIFESTATION,
mountpoint=FilePath(b"/xxx/yyy"))
)],
manifestations={DATASET.dataset_id: MANIFESTATION})])
class ConfigurationPersistenceServiceTests(TestCase):
"""
Tests for ``ConfigurationPersistenceService``.
"""
def service(self, path, logger=None):
"""
Start a service, schedule its stop.
:param FilePath path: Where to store data.
:param logger: Optional eliot ``Logger`` to set before startup.
:return: Started ``ConfigurationPersistenceService``.
"""
service = ConfigurationPersistenceService(reactor, path)
if logger:
self.patch(service, "logger", logger)
service.startService()
self.addCleanup(service.stopService)
return service
def test_empty_on_start(self):
"""
If no configuration was previously saved, starting a service results
in an empty ``Deployment``.
"""
service = self.service(FilePath(self.mktemp()))
self.assertEqual(service.get(), Deployment(nodes=frozenset()))
def test_directory_is_created(self):
"""
If a directory does not exist in given path, it is created.
"""
path = FilePath(self.mktemp())
self.service(path)
self.assertTrue(path.isdir())
def test_file_is_created(self):
"""
If no configuration file exists in the given path, it is created.
"""
path = FilePath(self.mktemp())
self.service(path)
self.assertTrue(path.child(b"current_configuration.v1.json").exists())
@validate_logging(assertHasAction, _LOG_SAVE, True,
dict(configuration=TEST_DEPLOYMENT))
def test_save_then_get(self, logger):
"""
A configuration that was saved can subsequently retrieved.
"""
service = self.service(FilePath(self.mktemp()), logger)
d = service.save(TEST_DEPLOYMENT)
d.addCallback(lambda _: service.get())
d.addCallback(self.assertEqual, TEST_DEPLOYMENT)
return d
@validate_logging(assertHasMessage, _LOG_STARTUP,
dict(configuration=TEST_DEPLOYMENT))
def test_persist_across_restarts(self, logger):
"""
A configuration that was saved can be loaded from a new service.
"""
path = FilePath(self.mktemp())
service = ConfigurationPersistenceService(reactor, path)
service.startService()
d = service.save(TEST_DEPLOYMENT)
d.addCallback(lambda _: service.stopService())
def retrieve_in_new_service(_):
new_service = self.service(path, logger)
self.assertEqual(new_service.get(), TEST_DEPLOYMENT)
d.addCallback(retrieve_in_new_service)
return d
def test_register_for_callback(self):
"""
Callbacks can be registered that are called every time there is a
change saved.
"""
service = self.service(FilePath(self.mktemp()))
l = []
l2 = []
service.register(lambda: l.append(1))
d = service.save(TEST_DEPLOYMENT)
def saved(_):
service.register(lambda: l2.append(1))
return service.save(TEST_DEPLOYMENT)
d.addCallback(saved)
def saved_again(_):
self.assertEqual((l, l2), ([1, 1], [1]))
d.addCallback(saved_again)
return d
@validate_logging(
lambda test, logger:
test.assertEqual(len(logger.flush_tracebacks(ZeroDivisionError)), 1))
def test_register_for_callback_failure(self, logger):
"""
Failed callbacks don't prevent later callbacks from being called.
"""
service = self.service(FilePath(self.mktemp()), logger)
l = []
service.register(lambda: 1/0)
service.register(lambda: l.append(1))
d = service.save(TEST_DEPLOYMENT)
def saved(_):
self.assertEqual(l, [1])
d.addCallback(saved)
return d
class WireEncodeDecodeTests(SynchronousTestCase):
"""
Tests for ``wire_encode`` and ``wire_decode``.
"""
def test_encode_to_bytes(self):
"""
``wire_encode`` converts the given object to ``bytes``.
"""
self.assertIsInstance(wire_encode(TEST_DEPLOYMENT), bytes)
def test_roundtrip(self):
"""
``wire_decode`` returns object passed to ``wire_encode``.
"""
self.assertEqual(TEST_DEPLOYMENT,
wire_decode(wire_encode(TEST_DEPLOYMENT)))
def test_no_arbitrary_decoding(self):
"""
``wire_decode`` will not decode classes that are not in
``SERIALIZABLE_CLASSES``.
"""
class Temp(PRecord):
"""A class."""
SERIALIZABLE_CLASSES.append(Temp)
def cleanup():
if Temp in SERIALIZABLE_CLASSES:
SERIALIZABLE_CLASSES.remove(Temp)
self.addCleanup(cleanup)
data = wire_encode(Temp())
SERIALIZABLE_CLASSES.remove(Temp)
# Possibly future versions might throw exception, the key point is
# that the returned object is not a Temp instance.
self.assertFalse(isinstance(wire_decode(data), Temp))
|
Python
| 0
|
@@ -4211,33 +4211,41 @@
emp()))%0A
-l
+callbacks
= %5B%5D%0A l2
@@ -4242,17 +4242,25 @@
-l
+callbacks
2 = %5B%5D%0A
@@ -4283,33 +4283,41 @@
egister(lambda:
-l
+callbacks
.append(1))%0A
@@ -4414,17 +4414,25 @@
lambda:
-l
+callbacks
2.append
@@ -4577,12 +4577,28 @@
al((
-l, l
+callbacks, callbacks
2),
@@ -5023,17 +5023,25 @@
-l
+callbacks
= %5B%5D%0A
@@ -5109,17 +5109,25 @@
lambda:
-l
+callbacks
.append(
@@ -5224,17 +5224,25 @@
rtEqual(
-l
+callbacks
, %5B1%5D)%0A
|
bfc248e92afdc02a8b3089d7a2a5194b7f55c2fe
|
add in the split out setup_ufw_rules to the api and setupnode
|
woven/api.py
|
woven/api.py
|
#!/usr/bin/env python
"""
The full public woven api
"""
from fabric.state import env
from woven.decorators import run_once_per_node, run_once_per_version
from woven.deployment import deploy_files, mkdirs
from woven.deployment import upload_template
from woven.environment import check_settings, deployment_root, set_env, patch_project, get_project_version, server_state, set_server_state
from woven.environment import set_version_state, version_state, get_packages
from woven.project import deploy_static, deploy_media, deploy_project, deploy_db, deploy_templates
from woven.linux import add_user, install_package, port_is_open, skip_disable_root
from woven.linux import install_packages, post_install_package, post_setupnode, uninstall_packages
from woven.linux import upgrade_packages, setup_ufw, disable_root
from woven.linux import add_repositories, restrict_ssh, upload_ssh_key
from woven.linux import change_ssh_port, set_timezone, lsb_release, upload_etc
from woven.virtualenv import activate, active_version
from woven.virtualenv import mkvirtualenv, rmvirtualenv, pip_install_requirements
from woven.virtualenv import post_deploy
from woven.webservers import deploy_wsgi, deploy_webconf, start_webserver, stop_webserver, reload_webservers
from woven.webservers import webserver_list
def deploy(overwrite=False):
"""
deploy a versioned project on the host
"""
check_settings()
if overwrite:
rmvirtualenv()
deploy_funcs = [deploy_project,deploy_templates, deploy_static, deploy_media, deploy_webconf, deploy_wsgi]
if not patch_project() or overwrite:
deploy_funcs = [deploy_db,mkvirtualenv,pip_install_requirements] + deploy_funcs
for func in deploy_funcs: func()
def setupnode(overwrite=False):
"""
Install a baseline host. Can be run multiple times
"""
if not port_is_open():
if not skip_disable_root():
disable_root()
port_changed = change_ssh_port()
#avoid trying to take shortcuts if setupnode did not finish
#on previous execution
if server_state('setupnode-incomplete'): env.overwrite=True
else: set_server_state('setupnode-incomplete')
upload_ssh_key()
restrict_ssh()
add_repositories()
upgrade_packages()
setup_ufw()
uninstall_packages()
install_packages()
upload_etc()
post_install_package()
set_timezone()
set_server_state('setupnode-incomplete',delete=True)
#stop and start webservers - and reload nginx
for s in webserver_list():
stop_webserver(s)
start_webserver(s)
|
Python
| 0
|
@@ -796,16 +796,33 @@
tup_ufw,
+ setup_ufw_rules,
disable
@@ -2111,16 +2111,24 @@
plete'):
+%0A
env.ove
@@ -2389,16 +2389,33 @@
e()%0A
+setup_ufw_rules()
%0A set
|
bb74df460e0d1823048cbb3e09ce882e519167c5
|
make not empty strings hold in node.text in XML backend
|
anyconfig/backend/xml_.py
|
anyconfig/backend/xml_.py
|
#
# Copyright (C) 2011 - 2014 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
# pylint: disable=R0921
from anyconfig.globals import LOGGER as logging
import anyconfig.backend.base as Base
import anyconfig.compat as AC
SUPPORTED = True
try:
# First, try lxml which is compatible with elementtree and looks faster a
# lot. See also: http://getpython3.com/diveintopython3/xml.html
from lxml2 import etree
except ImportError:
try:
import xml.etree.ElementTree as etree
except ImportError:
try:
import elementtree.ElementTree as etree
except ImportError:
logging.warn("ElementTree module is not available. Disabled "
"XML support.")
SUPPORTED = False
if SUPPORTED:
def etree_getroot_fromstring(s):
"""
:param s: A XML string
:return: etree object gotten by parsing ``s``
"""
return etree.ElementTree(etree.fromstring(s)).getroot()
def etree_getroot_fromsrc(src):
"""
:param src: A file name/path or a file[-like] object or a URL
:return: etree object gotten by parsing ``s``
"""
return etree.parse(src).getroot()
else:
def _dummy_fun(*args, **kwargs):
logging.warn("Return None as XML module is not available: "
"args=%s, kwargs=%s", ','.join(args), str(kwargs))
return None
etree_getroot_fromstring = etree_getroot_fromsrc = _dummy_fun
def etree_to_container(root, container):
"""
Convert XML ElementTree to a collection of container objects.
:param root: etree root object or None
:param container: A nested dict like objects
"""
tree = container()
if root is None:
return tree
tree[root.tag] = container()
if root.attrib:
tree[root.tag]["attrs"] = container(AC.iteritems(root.attrib))
if root.text:
tree[root.tag]["text"] = root.text.strip()
if len(root): # It has children.
# FIXME: Configuration item cannot have both attributes and
# values (list) at the same time in current implementation:
tree[root.tag]["children"] = [etree_to_container(c, container) for c
in root]
return tree
class XmlConfigParser(Base.ConfigParser):
_type = "xml"
_extensions = ["xml"]
_supported = SUPPORTED
@classmethod
def loads(cls, config_content, **kwargs):
"""
:param config_content: Config file content
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: cls.container() object holding config parameters
"""
root = etree_getroot_fromstring(config_content)
return etree_to_container(root, cls.container())
@classmethod
def load(cls, config_path, **kwargs):
"""
:param config_path: Config file path
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: cls.container() object holding config parameters
"""
root = etree_getroot_fromsrc(config_path)
return etree_to_container(root, cls.container())
@classmethod
def dumps_impl(cls, data, **kwargs):
"""
:param data: Data to dump :: dict
:param kwargs: backend-specific optional keyword parameters :: dict
:return: string represents the configuration
"""
raise NotImplementedError("XML dumper not implemented yet!")
# vim:sw=4:ts=4:et:
|
Python
| 0.000001
|
@@ -1898,16 +1898,38 @@
oot.text
+ and root.text.strip()
:%0A
|
1a755a0512753111fedc229241c57d302f0115d4
|
Refactor test_importers
|
tests/test_importers.py
|
tests/test_importers.py
|
try:
from mock import Mock
except ImportError:
from unittest.mock import Mock
import yaml
from passpie.importers import find_importer
from passpie.importers.default import DefaultImporter
def test_find_importer_returns_first_match_default_importer(mocker):
mock_importer = Mock()
mock_importer2 = Mock()
mock_importer.match.return_value = False
mock_importer2.match.return_value = True
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.get_instances',
return_value=[mock_importer, mock_importer2])
importer = find_importer('mockpath')
assert importer is mock_importer2
def test_default_importer_match_passpie_exported_yaml(mocker):
dict_content = {'handler': 'passpie', 'version': 1.0}
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
return_value=dict_content)
result = DefaultImporter().match('filepath')
assert result is True
def test_default_importer_returns_false_when_missing_version_key(mocker):
dict_content = {'handler': 'passpie'}
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
return_value=dict_content)
result = DefaultImporter().match('filepath')
assert result is False
def test_default_importer_returns_false_when_missing_handler_key(mocker):
dict_content = {'version': 1.0}
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
return_value=dict_content)
result = DefaultImporter().match('filepath')
assert result is False
def test_default_importer_returns_false_when_version_keys_isnt_float(mocker):
dict_content = {'version': '1.0'}
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
return_value=dict_content)
result = DefaultImporter().match('filepath')
assert result is False
def test_default_importer_returns_loaded_credentials_from_yaml_file(mocker):
dict_content = {'credentials': {'name': 'foo', 'name': 'bar'}}
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
return_value=dict_content)
result = DefaultImporter().handle('filepath')
assert result is dict_content.get('credentials')
def test_default_importer_match_returns_false_when_bad_yaml(mocker):
mocker.patch('passpie.importers.default.os.path.isfile', return_value=True)
mocker.patch('passpie.importers.default.DefaultImporter._read_file')
mocker.patch('passpie.importers.default.yaml.load',
side_effect=[yaml.scanner.ScannerError])
result = DefaultImporter().match('filepath')
assert result is False
|
Python
| 0
|
@@ -167,16 +167,25 @@
.default
+_importer
import
@@ -452,32 +452,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -860,32 +860,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -949,32 +949,41 @@
mporters.default
+_importer
.DefaultImporter
@@ -1031,32 +1031,41 @@
mporters.default
+_importer
.yaml.load',%0A
@@ -1334,32 +1334,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -1423,32 +1423,41 @@
mporters.default
+_importer
.DefaultImporter
@@ -1505,32 +1505,41 @@
mporters.default
+_importer
.yaml.load',%0A
@@ -1803,32 +1803,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -1892,32 +1892,41 @@
mporters.default
+_importer
.DefaultImporter
@@ -1974,32 +1974,41 @@
mporters.default
+_importer
.yaml.load',%0A
@@ -2278,32 +2278,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -2367,32 +2367,41 @@
mporters.default
+_importer
.DefaultImporter
@@ -2449,32 +2449,41 @@
mporters.default
+_importer
.yaml.load',%0A
@@ -2781,32 +2781,41 @@
mporters.default
+_importer
.os.path.isfile'
@@ -2870,32 +2870,41 @@
mporters.default
+_importer
.DefaultImporter
@@ -2952,32 +2952,41 @@
mporters.default
+_importer
.yaml.load',%0A
@@ -3244,16 +3244,25 @@
.default
+_importer
.os.path
@@ -3333,16 +3333,25 @@
.default
+_importer
.Default
@@ -3415,16 +3415,25 @@
.default
+_importer
.yaml.lo
|
dbb153b4681a1fa73f93855e86d3657e4fff9bfb
|
remove self
|
tests/test_interface.py
|
tests/test_interface.py
|
''' Script to check the readout system interface (software + FPGA firmware).
A global register test is performed with pyBAR and a simulation of the FPGA + FE-I4.
'''
import unittest
import shutil
import mock
from Queue import Empty
import subprocess
import time
import os
from pybar.run_manager import RunManager
from pybar.fei4.register_utils import FEI4RegisterUtils
from pybar.scans.test_register import RegisterTest
def configure_pixel(self, same_mask_for_all_dc=False):
return
def send_commands(self, commands, repeat=1, wait_for_finish=True, concatenate=True, byte_padding=False, clear_memory=False, use_timeout=True):
# no timeout for simulation
use_timeout = False
# append some zeros since simulation is more slow
commands = commands.extend(self.register.get_commands("zeros", length=20))
return FEI4RegisterUtils.send_commands(self, commands=commands, repeat=repeat, wait_for_finish=wait_for_finish, concatenate=concatenate, byte_padding=byte_padding, clear_memory=clear_memory, use_timeout=use_timeout)
class TestInterface(unittest.TestCase):
@classmethod
def setUpClass(cls):
subprocess.call('unzip -o test_interface/sim_build.zip', shell=True)
subprocess.Popen(['make', '-f', '../firmware/mio/cosim/Makefile', 'sim_only'])
time.sleep(10) # some time for simulator to start
@classmethod
def tearDownClass(cls):
shutil.rmtree('test_interface/module_test', ignore_errors=True)
shutil.rmtree('./sim_build', ignore_errors=True)
try:
os.remove('./results.xml')
except OSError:
pass
# keep waveform file
# shutil.rmtree('./tb.vcd', ignore_errors=True)
@mock.patch('pybar.fei4.register_utils.FEI4RegisterUtils.configure_pixel', side_effect=lambda *args, **kwargs: configure_pixel(*args, **kwargs))
@mock.patch('pybar.fei4.register_utils.FEI4RegisterUtils.send_commands', side_effect=lambda *args, **kwargs: send_commands(*args, **kwargs))
def test_global_register(self, mock_send_commands, mock_configure_pixel):
run_manager = RunManager('test_interface/configuration.yaml')
run_manager.run_run(RegisterTest, run_conf={'test_pixel': False})
error_msg = 'Global register test failed. '
try:
error_msg += str(run_manager.current_run.err_queue.get(timeout=1)[1])
except Empty:
pass
ok = (run_manager.current_run._run_status == 'FINISHED')
self.assertTrue(ok, msg=error_msg)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestInterface)
unittest.TextTestRunner(verbosity=2).run(suite)
|
Python
| 0.000039
|
@@ -436,22 +436,16 @@
e_pixel(
-self,
same_mas
@@ -488,38 +488,32 @@
f send_commands(
-self,
commands, repeat
|
2c67dd081895d00ffb33e29d8750b3f80121dfe5
|
Change import
|
tests/test_judicious.py
|
tests/test_judicious.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `judicious` package."""
import pytest
from judicious import judicious
@pytest.fixture
def response():
"""Sample pytest fixture.
See more at: http://doc.pytest.org/en/latest/fixture.html
"""
# import requests
# return requests.get('https://github.com/audreyr/cookiecutter-pypackage')
def test_content(response):
"""Sample pytest test function with the pytest fixture as an argument."""
# from bs4 import BeautifulSoup
# assert 'GitHub' in BeautifulSoup(response.content).title.string
|
Python
| 0.000001
|
@@ -98,23 +98,8 @@
t%0A%0A%0A
-from judicious
impo
|
9a278ac9ea0c124cfd108f276bc5d74da6c5c50c
|
Update notebook test
|
tests/test_notebooks.py
|
tests/test_notebooks.py
|
# Copyright 2017 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
import sys
import time
import traceback
import nbformat
import pytest
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors.execute import CellExecutionError
from gpflow.test_util import session_context
# blacklisted notebooks should have a unique basename
BLACKLISTED_NOTEBOOKS = []
def _nbpath():
this_dir = os.path.dirname(__file__)
return os.path.join(this_dir, '../notebooks2')
def get_notebooks():
"""
Returns all notebooks in `_nbpath` that are not blacklisted.
"""
def notebook_blacklisted(nb):
blacklisted_notebooks_basename = map(os.path.basename, BLACKLISTED_NOTEBOOKS)
return os.path.basename(nb) in blacklisted_notebooks_basename
# recursively traverse the notebook directory in search for ipython notebooks
all_notebooks = glob.iglob(os.path.join(_nbpath(), '**', '*.ipynb'), recursive=True)
notebooks_to_test = [nb for nb in all_notebooks if not notebook_blacklisted(nb)]
return notebooks_to_test
def _preproc():
pythonkernel = 'python' + str(sys.version_info[0])
return ExecutePreprocessor(timeout=300, kernel_name=pythonkernel, interrupt_on_timeout=True)
def _exec_notebook(notebook_filename):
with open(notebook_filename) as notebook_file:
nb = nbformat.read(notebook_file, as_version=nbformat.current_nbformat)
try:
meta_data = {'path': os.path.dirname(notebook_filename)}
_preproc().preprocess(nb, {'metadata': meta_data})
except CellExecutionError as cell_error:
traceback.print_exc(file=sys.stdout)
msg = 'Error executing the notebook {0}. See above for error.\nCell error: {1}'
pytest.fail(msg.format(notebook_filename, str(cell_error)))
def _exec_notebook_ts(notebook_filename):
with session_context():
ts = time.time()
_exec_notebook(notebook_filename)
elapsed = time.time() - ts
print(notebook_filename, 'took {0} seconds.'.format(elapsed))
@pytest.mark.notebooks
@pytest.mark.parametrize('notebook_file', get_notebooks())
def test_notebook(notebook_file):
_exec_notebook_ts(notebook_file)
|
Python
| 0
|
@@ -610,20 +610,8 @@
sys%0A
-import time%0A
impo
@@ -778,54 +778,8 @@
or%0A%0A
-from gpflow.test_util import session_context%0A%0A
# bl
@@ -2346,202 +2346,40 @@
-with session_context():%0A ts = time.time()%0A _exec_notebook(notebook_filename)%0A elapsed = time.time() - ts%0A print(notebook_filename, 'took %7B0%7D seconds.'.format(elapsed)
+_exec_notebook(notebook_filename
)%0A%0A%0A
|
be412947c0fe30dd659298cd7641b5a701310f7d
|
add auth option
|
gitdata.py
|
gitdata.py
|
"""GitHub query CLI.
cli() --------------> Handle command-line arguments.
"""
import os
import click
from click.testing import CliRunner
#------------------------------------------------------------------------------
@click.group()
@click.version_option(version='1.0', prog_name='Photerino')
def cli():
"""\b
---------------
| ? | ? | ? | /// gitdata
---------------
| ? | ? | ? | Retrieve data via GitHub REST API.
---------------
"""
hexdump(filename=file, offset=offset, totbytes=nbytes)
#------------------------------------------------------------------------------
@cli.command()
def members():
click.echo('/// members subcommand')
#------------------------------------------------------------------------------
@cli.command()
def repos():
click.echo('/// repos subcommand')
# code to execute when running standalone: -------------------------------------
if __name__ == '__main__':
print('/// need to implement tests here')
|
Python
| 0.000004
|
@@ -217,21 +217,143 @@
---%0A
-@click.group(
+CONTEXT_SETTINGS = dict(help_option_names=%5B'-h', '--help'%5D)%0A@click.group(context_settings=CONTEXT_SETTINGS, options_metavar='%3Coptions%3E'
)%0A@c
@@ -602,61 +602,40 @@
-hexdump(filename=file, offset=offset, totbytes=nbytes
+click.echo('/// NOT IMPLEMENTED'
)%0A%0A#
@@ -724,24 +724,107 @@
i.command()%0A
+@click.option('-a', '--auth', default='', help='GitHub username', metavar='%3Cstr%3E')%0A
def members(
@@ -823,16 +823,20 @@
members(
+auth
):%0A c
@@ -967,16 +967,99 @@
mmand()%0A
+@click.option('-a', '--auth', default='', help='GitHub username', metavar='%3Cstr%3E')%0A
def repo
@@ -1060,16 +1060,20 @@
f repos(
+auth
):%0A c
|
c0b5b971c184894afb66cfc1c8d2eb97cfc17d92
|
Change test code to subclass UserString, not str.
|
tests/test_serialize.py
|
tests/test_serialize.py
|
"""Tests for toron._serialize module."""
import unittest
from collections import namedtuple, OrderedDict
from toron._serialize import get_primitive_repr
from toron._serialize import dumps
class TestGetPrimitiveRepr(unittest.TestCase):
def test_supported_types(self):
"""Check that all supported instance types get expected reprs."""
supported_instances = [
('abc', "'abc'"), # str
(b'xyz', "b'xyz'"), # bytes
(123, '123'), # int
(1.125, '1.125'), # float
(True, 'True'), # bool
(None, 'None'), # NoneType
((3+0j), '(3+0j)'), # complex
]
for obj, obj_repr in supported_instances:
with self.subTest(obj=obj):
self.assertEqual(get_primitive_repr(obj), obj_repr)
def test_unsupported_types(self):
"""Should return None for unsupported types (containers, etc.)"""
self.assertIsNone(get_primitive_repr(Ellipsis))
self.assertIsNone(get_primitive_repr([1, 2]))
self.assertIsNone(get_primitive_repr({'a': 1}))
def test_exact_type_matching(self):
"""Values that are a subclass of supported types should get None."""
class StrSubclass(str):
pass
instance_of_str_subclass = StrSubclass('abc')
self.assertIsNone(get_primitive_repr(instance_of_str_subclass))
def test_no_valid_literal_repr(self):
"""Values that don't have a literal representation must return
a None value even if the instance is of a supported type.
"""
self.assertIsNone(get_primitive_repr(float('nan')))
self.assertIsNone(get_primitive_repr(float('inf')))
class TestDumpS(unittest.TestCase):
def test_primitive_types(self):
self.assertEqual(dumps(1.125), '1.125')
self.assertEqual(dumps(b'abc'), "b'abc'")
def test_list_or_tuple(self):
self.assertEqual(dumps([4, 8, 2]), "[4, 8, 2]")
self.assertEqual(dumps((1, 'a', 2.25)), "(1, 'a', 2.25)")
msg = 'should not serialize nested containers'
with self.assertRaises(TypeError, msg=msg):
dumps([1, [2, 3]])
msg = 'should not serialize instances of subclasses'
with self.assertRaises(TypeError, msg=msg):
coord = namedtuple('coord', ['x', 'y'])
dumps(coord(1, 2))
def test_set(self):
msg = 'serialized form should always be in sorted order'
self.assertEqual(dumps({4, 8, 2}), "{2, 4, 8}", msg=msg)
msg = 'mixed types should sort without problems'
self.assertEqual(dumps({None, 2, 'a', 1.25}), "{'a', 1.25, 2, None}", msg=msg)
msg = 'should not serialize nested containers'
with self.assertRaises(TypeError, msg=msg):
dumps({4, (8, 2)})
msg = 'should not serialize instances of subclasses'
with self.assertRaises(TypeError, msg=msg):
dumps(frozenset([1, 2, 3]))
def test_dict(self):
msg = 'serialized form should always be in sorted order'
self.assertEqual(dumps({'b': 2, 'a': 1}), "{'a': 1, 'b': 2}", msg=msg)
msg = 'mixed types should sort without problems'
self.assertEqual(dumps({None: 2, 'a': 1.25}), "{'a': 1.25, None: 2}", msg=msg)
msg = 'should not serialize nested containers'
with self.assertRaises(TypeError, msg=msg):
dumps({4: (8, 2)})
msg = 'should not serialize non-primitive keys'
with self.assertRaises(TypeError, msg=msg):
dumps({(4, 8): 2})
msg = 'should not serialize instances of subclasses'
with self.assertRaises(TypeError, msg=msg):
dumps(OrderedDict([('b', 2), ('a', 1)]))
def test_unsupported_types(self):
with self.assertRaises(TypeError):
dumps(frozenset([1, 2, 3]))
with self.assertRaises(TypeError):
dumps(Ellipsis)
|
Python
| 0
|
@@ -98,16 +98,28 @@
eredDict
+, UserString
%0Afrom to
@@ -1263,19 +1263,26 @@
ubclass(
-str
+UserString
):%0A
|
049d734486627224b87cba72c575450515060c55
|
fix split
|
vlermv/_s3.py
|
vlermv/_s3.py
|
import tempfile
import boto
from ._abstract import AbstractVlermv
def split(x):
return x.split('/')
class S3Vlermv(AbstractVlermv):
def __init__(self, bucketname, *args, connect_s3 = boto.connect_s3, **kwargs):
super(S3Vlermv, self).__init__(**kwargs)
self.bucket = connect_s3().create_bucket(bucketname)
def __repr__(self):
return 'S3Vlermv(%s)' % repr(self.bucket.name)
def __setitem__(self, index, obj):
keyname = self.filename(index)
key = self.bucket.new_key(keyname)
with tempfile.NamedTemporaryFile('w+' + self._b()) as tmp:
self.serializer.dump(obj, tmp.file)
tmp.file.close()
key.set_contents_from_filename(tmp.name, replace = True)
def __contains__(self, keyname):
return self.bucket.get_key(keyname) != None
def __getitem__(self, keyname):
key = self.bucket.get_key(keyname)
if key:
with tempfile.NamedTemporaryFile('w+' + self._b()) as tmp:
key.get_contents_to_filename(tmp.name)
tmp.file.seek(0)
value = self.serializer.load(tmp.file)
return value
else:
raise KeyError(keyname)
def keys(self, **kwargs):
for k in self.bucket.list(**kwargs):
yield self.transformer.from_path(split(k.name))
def __delitem__(self, index):
super(S3Vlermv, self).__delitem__(index)
raise NotImplementedError
def __len__(self):
return sum(1 for _ in self.keys())
|
Python
| 0.000001
|
@@ -87,16 +87,22 @@
return
+tuple(
x.split(
@@ -105,16 +105,17 @@
lit('/')
+)
%0A%0Aclass
|
cb5fd2c5b7b34b2c0c080563c50527f6c690177e
|
Añade condición, solo guarda Voluntario=si
|
core/management/commands/importdata.py
|
core/management/commands/importdata.py
|
import argparse
from django.core.management.base import BaseCommand
from openpyxl import load_workbook
from core import models
class Command(BaseCommand):
help = 'Importa datos desde un archivo.'
def add_arguments(self, parser):
parser.add_argument('filename', type=argparse.FileType('rb'))
def print_row_details(self, row_number, data):
print('-- Línea {row_number}'.format(row_number=row_number))
for key, value in data.items():
print('Columna `{key}`: {value!r}'.format(key=key, value=value))
def process_worksheet(self, ws):
header = None
for row_number, row in enumerate(ws.rows, 1):
# Row 1 to 3 are empty
if row_number in [1, 2, 3]:
continue
# First row contains the headers
if not header:
header = [cell.value for cell in row]
print('Leída cabecera: ', header)
continue
# Regular row
values = [cell.value for cell in row]
if not any(values):
print('Fila {row_number} vacía, seguimos'.format(row_number=row_number))
continue
print('Leída fila: ', values)
data = dict(zip(header, values))
self.print_row_details(row_number, data)
# Create or update
person_data = {
'name': data['Nome'],
'surname': data['Apelidos'],
# 'role': data['Rol'],
# 'group': data['Grupo'],
'phone_number': data['Teléfono fixo'] or '',
'mobile_number': data['Teléfono movil'] or '',
'email': data['Email'] or '',
}
membership_data = {
'membership_fee': data['Cuota socio'] or 0,
'payment_status': data['Pago'] or '',
'membership_status': data['Estado'] or '',
}
person_membership_data = {
'id_card_status': data['DNI autorizado'] or '',
'ss_card_status': data['Tarjeta sanitaria'] or '',
'photo_status': data['Foto'] or '',
'dpa_status': data['LOPD'] or '',
}
volunteer_data = {
'volunteer': data['Voluntario'] or '',
}
# `card_status´
por_entregar = data['Carnet entregar'] == 'si'
entregado = data['Carnet entregado'] == 'si'
if entregado:
card_status = 'Entregado'
elif por_entregar:
card_status = 'Por entregar'
else:
card_status = 'Falta documentación'
person_membership_data['card_status'] = card_status
# Store on database
person, created = models.Person.objects.update_or_create(
id=data['IdUsuario'],
defaults=person_data,
)
action = 'Creada' if created else 'Actualizada'
msg = '{} persona con UID {}'.format(action, person.id)
self.stdout.write(self.style.SUCCESS(msg))
volunteer, created = models.Volunteer.objects.update_or_create(
person=person,
defaults=volunteer_data,
)
action = 'Creada' if created else 'Actualizada'
msg = '{} voluntario con UID {}'.format(action, volunteer.id)
self.stdout.write(self.style.SUCCESS(msg))
membership, created = models.Membership.objects.update_or_create(
id=data['UIDMembresia'],
defaults=membership_data,
)
action = 'Creada' if created else 'Actualizada'
msg = '{} membresía con UID {}'.format(action, membership.id)
self.stdout.write(self.style.SUCCESS(msg))
print('Membership defaults: ', person_membership_data)
person_membership, created = models.PersonMembership.objects.update_or_create(
person=person,
membership=membership,
defaults=person_membership_data,
)
action = 'Creada' if created else 'Actualizada'
msg = '{} membresía con ID {}'.format(action, person_membership.id)
self.stdout.write(self.style.SUCCESS(msg))
def handle(self, *args, **options):
fp = options['filename']
wb = load_workbook(fp, read_only=True)
ws = wb['usuarios']
self.process_worksheet(ws)
self.stdout.write(self.style.SUCCESS(
'Importación finalizada con éxito.'))
|
Python
| 0.000001
|
@@ -3133,32 +3133,119 @@
.SUCCESS(msg))%0A%0A
+ is_volunteer = data%5B'Voluntario'%5D == 'si'%0A if is_volunteer:%0A
volu
@@ -3312,32 +3312,36 @@
+
person=person,%0A
@@ -3331,32 +3331,36 @@
person=person,%0A
+
@@ -3388,34 +3388,42 @@
ta,%0A
-)%0A
+ )%0A
acti
@@ -3470,32 +3470,36 @@
da'%0A
+
+
msg = '%7B%7D volunt
@@ -3548,32 +3548,36 @@
id)%0A
+
self.stdout.writ
@@ -3596,32 +3596,72 @@
.SUCCESS(msg))%0A%0A
+ else:%0A pass%0A%0A
memb
|
be0d287b2b23b89e5bf121558fc5ee4ea97710c2
|
Fix generated file location for inclusion in sdist target when building with scons.
|
numpy/core/setupscons.py
|
numpy/core/setupscons.py
|
import os
import sys
import glob
from os.path import join, basename
from numpy.distutils import log
from numscons import get_scons_build_dir
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration,dot_join
from numpy.distutils.system_info import get_info, default_lib_dirs
config = Configuration('core',parent_package,top_path)
local_dir = config.local_path
header_dir = 'include/numpy' # this is relative to config.path_in_package
config.add_subpackage('code_generators')
# List of files to register to numpy.distutils
dot_blas_src = [join('blasdot', '_dotblas.c'),
join('blasdot', 'cblas.h')]
api_definition = [join('code_generators', 'array_api_order.txt'),
join('code_generators', 'multiarray_api_order.txt'),
join('code_generators', 'ufunc_api_order.txt')]
core_src = [join('src', basename(i)) for i in glob.glob(join(local_dir,
'src',
'*.c'))]
core_src += [join('src', basename(i)) for i in glob.glob(join(local_dir,
'src',
'*.src'))]
source_files = dot_blas_src + api_definition + core_src + \
[join(header_dir, 'numpyconfig.h.in')]
# Add generated files to distutils...
def add_config_header():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
target = join(scons_build_dir, local_dir, 'config.h')
incl_dir = os.path.dirname(target)
if incl_dir not in config.numpy_include_dirs:
config.numpy_include_dirs.append(incl_dir)
def add_numpyconfig_header():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
target = join(scons_build_dir, local_dir, 'include/numpy/numpyconfig.h')
incl_dir = os.path.dirname(target)
if incl_dir not in config.numpy_include_dirs:
config.numpy_include_dirs.append(incl_dir)
config.add_data_files((header_dir, target))
def add_array_api():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
h_file = join(scons_build_dir, local_dir, '__multiarray_api.h')
t_file = join(scons_build_dir, local_dir, 'multiarray_api.txt')
config.add_data_files((header_dir, h_file),
(header_dir, t_file))
def add_ufunc_api():
scons_build_dir = get_scons_build_dir()
# XXX: I really have to think about how to communicate path info
# between scons and distutils, and set the options at one single
# location.
h_file = join(scons_build_dir, local_dir, '__ufunc_api.h')
t_file = join(scons_build_dir, local_dir, 'ufunc_api.txt')
config.add_data_files((header_dir, h_file),
(header_dir, t_file))
def add_generated_files(*args, **kw):
add_config_header()
add_numpyconfig_header()
add_array_api()
add_ufunc_api()
config.add_sconscript('SConstruct',
post_hook = add_generated_files,
source_files = source_files)
config.add_data_files('include/numpy/*.h')
config.add_include_dirs('src')
config.numpy_include_dirs.extend(config.paths('include'))
# Don't install fenv unless we need them.
if sys.platform == 'cygwin':
config.add_data_dir('include/numpy/fenv')
config.add_data_dir('tests')
config.make_svn_version_py()
return config
if __name__=='__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)
|
Python
| 0
|
@@ -254,16 +254,86 @@
ot_join%0A
+ from numpy.distutils.command.scons import get_scons_pkg_build_dir%0A
from
@@ -395,16 +395,16 @@
ib_dirs%0A
-
%0A con
@@ -1848,38 +1848,46 @@
target = join(
+get_
scons_
+pkg_
build_dir, local
@@ -1879,27 +1879,29 @@
uild_dir
-, local_dir
+(config.name)
, 'confi
@@ -2321,38 +2321,46 @@
target = join(
+get_
scons_
+pkg_
build_dir, local
@@ -2352,28 +2352,53 @@
uild_dir
-, local_dir,
+(config.name), %0A
'includ
@@ -2879,38 +2879,46 @@
h_file = join(
+get_
scons_
+pkg_
build_dir, local
@@ -2906,35 +2906,37 @@
kg_build_dir
-, local_dir
+(config.name)
, '__multiar
@@ -2961,38 +2961,46 @@
t_file = join(
+get_
scons_
+pkg_
build_dir, local
@@ -2992,27 +2992,29 @@
uild_dir
-, local_dir
+(config.name)
, 'multi
@@ -3387,38 +3387,46 @@
h_file = join(
+get_
scons_
+pkg_
build_dir, local
@@ -3418,27 +3418,29 @@
uild_dir
-, local_dir
+(config.name)
, '__ufu
@@ -3472,22 +3472,30 @@
= join(
+get_
scons_
+pkg_
build_di
@@ -3499,19 +3499,21 @@
_dir
-, local_dir
+(config.name)
, 'u
|
394473b6d8fb9898a19bb7e3bd2141a59572adec
|
更新 modules users/apps.py, 修正 PEP8 警告
|
commonrepo/users/apps.py
|
commonrepo/users/apps.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.apps import AppConfig
from actstream import registry
class UsersAppConfig(AppConfig):
name = 'commonrepo.users'
def ready(self):
registry.register(self.get_model('User'))
import commonrepo.users.signals
|
Python
| 0
|
@@ -142,16 +142,17 @@
gistry%0A%0A
+%0A
class Us
|
265411a6b3ca799b1e20a5e63839a9dc71eaff5c
|
Remove old changes
|
libqtile/layout/zoomy.py
|
libqtile/layout/zoomy.py
|
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2011 Paul Colomiets
# Copyright (c) 2012 Craig Barnes
# Copyright (c) 2012, 2014 Tycho Andersen
# Copyright (c) 2013 Tao Sauvage
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 dmpayton
# Copyright (c) 2014 dequis
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import division
from .base import SingleWindow
class Zoomy(SingleWindow):
"""
A layout with single active windows, and few other previews at the
right
"""
defaults = [
("columnwidth", 150, "Width of the right column"),
("property_name", "ZOOM", "Property to set on zoomed window"),
("property_small", "0.1", "Property value to set on zoomed window"),
("property_big", "1.0", "Property value to set on normal window"),
("margin", 0, "Margin of the layout"),
]
def __init__(self, **config):
SingleWindow.__init__(self, **config)
self.add_defaults(Zoomy.defaults)
self.clients = []
self.focused = None
def _get_window(self):
return self.focused
def focus_first(self):
if self.clients:
return self.clients[0]
def focus_last(self):
if self.clients:
return self.clients[-1]
def focus_next(self, client):
if client not in self.clients:
return
idx = self.clients.index(client)
return self.clients[(idx + 1) % len(self.clients)]
def focus_previous(self, client):
if not self.clients:
return
idx = self.clients.index(client)
return self.clients[idx - 1]
def clone(self, group):
c = SingleWindow.clone(self, group)
c.clients = []
return c
def add(self, client):
self.clients.insert(0, client)
self.focus(client)
def remove(self, client):
if client not in self.clients:
return
if self.focused == client:
self.focused = self.focus_previous(client)
if self.focused == client:
self.focused = None
self.clients.remove(client)
return self.focused
def configure(self, client, screen):
left, right = screen.hsplit(screen.width - self.columnwidth)
if client is self.focused:
client.place(
left.x,
left.y,
left.width,
left.height,
0,
None,
margin=self.margin,
)
else:
h = right.width * left.height // left.width
client_index = self.clients.index(client)
focused_index = self.clients.index(self.focused)
offset = client_index - focused_index - 1
if offset < 0:
offset += len(self.clients)
if h * (len(self.clients) - 1) < right.height:
client.place(
right.x,
right.y + h * offset,
right.width,
h,
0,
None,
margin=self.margin,
)
else:
hh = (right.height - h) // (len(self.clients) - 1)
client.place(
right.x,
right.y + hh * offset,
right.width,
h,
0,
None,
margin=self.margin,
)
client.unhide()
def info(self):
d = SingleWindow.info(self)
d["clients"] = [x.name for x in self.clients]
return d
def focus(self, win):
if self.focused and self.property_name and self.focused.window.get_property(
self.property_name,
"UTF8_STRING"
) is not None:
self.focused.window.set_property(
self.property_name,
self.property_small,
"UTF8_STRING",
format=8
)
SingleWindow.focus(self, win)
if self.property_name:
self.focused = win
win.window.set_property(
self.property_name,
self.property_big,
"UTF8_STRING",
format=8
)
def cmd_next(self):
client = self.focus_next(self.focused) or self.focus_first()
self.group.focus(client, False)
cmd_down = cmd_next
def cmd_previous(self):
client = self.focus_previous(self.focused) or self.focus_last()
self.group.focus(client, False)
cmd_up = cmd_previous
def get_state(self):
d = SingleWindow.info(self)
d["clients"] = [x.window.wid for x in self.clients]
if self.focused is not None:
d["focused"] = self.focused.window.wid
return d
def restore_state(self, info, windowMap):
self.clients = [windowMap[x] for x in info["clients"]]
try:
self.focused = windowMap[info["focused"]]
except KeyError: # No window is current
pass
|
Python
| 0.000016
|
@@ -5677,475 +5677,4 @@
ous%0A
-%0A def get_state(self):%0A d = SingleWindow.info(self)%0A d%5B%22clients%22%5D = %5Bx.window.wid for x in self.clients%5D%0A if self.focused is not None:%0A d%5B%22focused%22%5D = self.focused.window.wid%0A return d%0A%0A def restore_state(self, info, windowMap):%0A%0A self.clients = %5BwindowMap%5Bx%5D for x in info%5B%22clients%22%5D%5D%0A try:%0A self.focused = windowMap%5Binfo%5B%22focused%22%5D%5D%0A except KeyError: # No window is current%0A pass%0A
|
999b8c29c7e3be974b29f69b8e007fcc77742e03
|
fix broken test case
|
tests/test_transcode.py
|
tests/test_transcode.py
|
import os
from nose.tools import eq_
from pyexcel_cli.transcode import transcode
from click.testing import CliRunner
def test_simple_option():
runner = CliRunner()
test_fixture = os.path.join("tests", "fixtures",
"transcode_simple.csv")
output = "test_simple_option.csv"
result = runner.invoke(transcode, ["--csv-lineterminator", "\r\n", test_fixture, output])
print(result.output)
eq_(result.exit_code, 0)
with open(output, 'r') as f:
content = f.read()
eq_(content, '1,2,3\n')
os.unlink(output)
def test_stdout_option():
runner = CliRunner()
test_fixture = os.path.join("tests", "fixtures",
"transcode_simple.csv")
result = runner.invoke(transcode, ["--output-file-type", "csv",
test_fixture, '-'])
eq_(result.exit_code, 0)
eq_(result.output, '1,2,3\n')
def test_stdin_option():
runner = CliRunner()
result = runner.invoke(transcode,
["--source-file-type", "csv",
"--output-file-type", "csv", '-', '-'],
input='1,2,3')
eq_(result.output, '1,2,3\n')
eq_(result.exit_code, 0)
def test_name_columns_by_row():
runner = CliRunner()
test_fixture = os.path.join("tests", "fixtures",
"transcode_headers.csv")
result = runner.invoke(transcode, ["--output-file-type", "json",
"--name-columns-by-row", "0",
test_fixture, '-'])
eq_(result.exit_code, 0)
expected = ('{"transcode_headers.csv": ' +
'[{"grade": 100, "name": "Adam"}, ' +
'{"grade": 100, "name": "Eve"}]}')
eq_(result.output, expected)
def test_multiple_sheet():
runner = CliRunner()
test_fixture = os.path.join("tests", "fixtures",
"multiple-sheets.xls")
result = runner.invoke(transcode, ["--output-file-type", "json",
"--name-columns-by-row", "0",
"--sheet-name", "Sheet 3",
test_fixture, '-'])
eq_(result.exit_code, 0)
expected = ('{"Sheet 3": [{"O": 3, "P": 2, "Q": 1}, ' +
'{"O": 4, "P": 3, "Q": 2}]}')
eq_(result.output, expected)
def test_multiple_sheet_by_index():
runner = CliRunner()
test_fixture = os.path.join("tests", "fixtures",
"multiple-sheets.xls")
result = runner.invoke(transcode, ["--output-file-type", "json",
"--name-columns-by-row", "0",
"--sheet-index", "2",
test_fixture, '-'])
eq_(result.exit_code, 0)
expected = ('{"Sheet 3": [{"O": 3, "P": 2, "Q": 1}, ' +
'{"O": 4, "P": 3, "Q": 2}]}')
eq_(result.output, expected)
|
Python
| 0.000008
|
@@ -379,10 +379,8 @@
, %22%5C
-r%5C
n%22,
|
dfa39db42cc5ce2c29da2ec0c388865ec7f41030
|
Add allow field to form
|
oauth2_provider/forms.py
|
oauth2_provider/forms.py
|
from django import forms
class AllowForm(forms.Form):
redirect_uri = forms.URLField(widget=forms.HiddenInput())
scopes = forms.CharField(required=False, widget=forms.HiddenInput())
client_id = forms.CharField(widget=forms.HiddenInput())
state = forms.CharField(required=False, widget=forms.HiddenInput())
response_type = forms.CharField(widget=forms.HiddenInput())
|
Python
| 0
|
@@ -49,16 +49,63 @@
.Form):%0A
+ allow = forms.BooleanField(required=False)%0A
redi
|
07b7291caa83ce1948e6687651645c0a964c25b5
|
Remove a space before colons
|
openquake/engine/settings.py
|
openquake/engine/settings.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2014, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""Django settings for OpenQuake."""
from openquake.engine.utils import config
# DEBUG = True
DB_SECTION = config.get_section('database')
INSTALLED_APPS = ('openquake.engine.db',)
def _db_cfg(db_name):
"""
Helper method to create db config items for the various roles and schemas.
:param db_name: The name of the database configuration. Configurations for
this name will be loaded from the site specific config file. If an item
doesn't exist in the config file, a default value will be used instead.
:returns: Configuration dict, structured like so::
{'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'openquake2',
'USER': 'openquake',
'PASSWORD': 'secret',
'HOST': 'localhost',
'PORT': '5432',
}
"""
return dict(
ENGINE='django.contrib.gis.db.backends.postgis',
NAME=DB_SECTION.get('name', 'openquake'),
USER=DB_SECTION.get('%s_user' % db_name, 'openquake'),
PASSWORD=DB_SECTION.get('%s_password' % db_name, ''),
HOST=DB_SECTION.get('host', 'localhost'),
PORT=DB_SECTION.get('port', '5432'),
)
_DB_NAMES = (
'admin',
'job_init',
)
DATABASES = dict((db, _db_cfg(db)) for db in _DB_NAMES)
DEFAULT_USER = 'admin'
# We need a 'default' database to make Django happy:
DATABASES['default'] = {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': DB_SECTION.get('name', 'openquake'),
'USER': DB_SECTION.get('%s_user' % DEFAULT_USER, 'oq_admin'),
'PASSWORD': DB_SECTION.get('%s_password' % DEFAULT_USER, 'openquake'),
'HOST' : DB_SECTION.get('host', 'localhost'),
'PORT' : DB_SECTION.get('port', '5432'),
}
DATABASE_ROUTERS = ['openquake.engine.db.routers.OQRouter']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
TIME_ZONE = 'Europe/Zurich'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'change-me-in-production'
USE_I18N = False
USE_L10N = False
try:
from local_settings import *
except ImportError:
pass
|
Python
| 0.999998
|
@@ -2380,17 +2380,16 @@
'HOST'
-
: DB_SEC
@@ -2429,17 +2429,16 @@
'PORT'
-
: DB_SEC
|
7480fa45fac507a9d3a14589d87a9f96beb1ddbb
|
fix mute button bug
|
app/controllers/mixers.py
|
app/controllers/mixers.py
|
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +--------------------------------------------------------------------------+
# | _____ __ ___ ______ __ |
# | / ___// /___ ______/ (_)___ / ____/___ ____ ____ ___ _____/ /_ |
# | \__ \/ __/ / / / __ / / __ \ / / / __ \/ __ \/ __ \/ _ \/ ___/ __/ |
# | ___/ / /_/ /_/ / /_/ / / /_/ / / /___/ /_/ / / / / / / / __/ /__/ /_ |
# |/____/\__/\__,_/\__,_/_/\____/ \____/\____/_/ /_/_/ /_/\___/\___/\__/ |
# |Copyright Sebastian Reimers 2013 studio-connect.de |
# |License: BSD-2-Clause (see LICENSE File) |
# +--------------------------------------------------------------------------+
from flask import Blueprint, render_template, url_for, redirect, flash
import alsaaudio
from app import tasks
from app.models.settings import Settings
import redis
mod = Blueprint('mixers', __name__, url_prefix='/mixers')
@mod.route('/')
def index(card=""):
settings = Settings.query.get(1)
if settings:
card = settings.device
playbacks = {}
captures = {}
devices = alsaaudio.cards()
try:
idx = devices.index(card)
except ValueError:
return render_template('mixers/device_error.html')
try:
mixers = alsaaudio.mixers(idx)
except:
devices = {"No ALSA Device detected"}
return render_template('mixer.html', devices=devices, volumes=volumes)
for i in range(len(mixers)):
mixer = alsaaudio.Mixer(mixers[i], cardindex=idx)
try:
mutes = mixer.getmute()
getrecs = mixer.getrec()
except alsaaudio.ALSAAudioError:
mutes = {}
getrecs = {}
if mixer.getvolume('playback'):
playbacks[mixers[i]] = {'mixer': i,
'levels': mixer.getvolume('playback'),
'mutes': mutes}
if mixer.getvolume('capture'):
captures[mixers[i]] = {'mixer': i,
'levels': mixer.getvolume('capture'),
'mutes': getrecs}
return render_template('mixers/index.html',
devices=devices,
playbacks=playbacks,
captures=captures,
card=card)
@mod.route('/volume/<card>/<mixeridx>/<channel>/<value>/<direction>')
def set_volume(card="", mixeridx=0, channel=0, value=50, direction='playback'):
#channel = alsaaudio.MIXER_CHANNEL_ALL
devices = alsaaudio.cards()
try:
idx = devices.index(card)
except ValueError:
idx = 0
mixers = alsaaudio.mixers(idx)
mixer = alsaaudio.Mixer(mixers[int(mixeridx)], cardindex=idx)
mixer.setvolume(int(value), int(channel), direction)
return ""
@mod.route('/mute/<direction>/<card>/<mixeridx>/<channel>/<value>')
def mute(direction="playback", card="", mixeridx=0, channel=0, value=0):
devices = alsaaudio.cards()
try:
idx = devices.index(card)
except ValueError:
idx = 0
mixers = alsaaudio.mixers(idx)
mixer = alsaaudio.Mixer(mixers[int(mixeridx)], cardindex=idx)
if direction == "playback":
mixer.setmute(int(value), int(channel))
else:
mixer.setrec(int(value), int(channel))
return ""
@mod.route('/play/')
def play():
flash("You should hear something...")
store = redis.Redis('127.0.0.1')
if store.get('lock_play_audio') != 'true':
tasks.play_audio.delay()
return redirect(url_for('mixers.index'))
|
Python
| 0.000002
|
@@ -1565,24 +1565,25 @@
dindex=idx)%0A
+%0A
try:
@@ -1623,45 +1623,8 @@
e()%0A
- getrecs = mixer.getrec()%0A
@@ -1678,24 +1678,116 @@
mutes = %7B%7D
+%0A%0A try:%0A getrecs = mixer.getrec()%0A except alsaaudio.ALSAAudioError:
%0A
@@ -1796,24 +1796,25 @@
etrecs = %7B%7D%0A
+%0A
if m
|
cbb6929007b20d5b95be6aafba3e0841144a16e7
|
Rename get_scheduled_report_ids keys helper and pull out guess_reporting_minute
|
corehq/apps/saved_reports/scheduled.py
|
corehq/apps/saved_reports/scheduled.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from calendar import monthrange
from datetime import datetime
from corehq.apps.saved_reports.models import ReportNotification
from corehq.util.soft_assert import soft_assert
from six.moves import range
_soft_assert = soft_assert(
to='{}@{}'.format('supportteam', 'dimagi.com'),
exponential_backoff=False,
)
def _keys(period, as_of):
minute = guess_reporting_minute(as_of)
if minute == 0:
# for legacy purposes, on the hour also include reports that didn't have a minute set
minutes = (None, minute)
else:
minutes = (minute,)
if period == 'daily':
for minute in minutes:
yield {
'startkey': [period, as_of.hour, minute],
'endkey': [period, as_of.hour, minute, {}],
}
elif period == 'weekly':
for minute in minutes:
yield {
'key': [period, as_of.hour, minute, as_of.weekday()],
}
else:
# monthly
for minute in minutes:
yield {
'key': [period, as_of.hour, minute, as_of.day]
}
if as_of.day == monthrange(as_of.year, as_of.month)[1]:
for day in range(as_of.day + 1, 32):
for minute in minutes:
yield {
'key': [period, as_of.hour, minute, day]
}
def get_scheduled_report_ids(period, as_of=None):
as_of = as_of or datetime.utcnow()
assert period in ('daily', 'weekly', 'monthly'), period
keys = _keys(period, as_of)
for key in keys:
for result in ReportNotification.view(
"reportconfig/all_notifications",
reduce=False,
include_docs=False,
**key
).all():
yield result['id']
def guess_reporting_minute(now=None):
"""
Tries to guess a report window based on the current time.
This is super sketchy - will choose a close time to the hour or 30 minute mark or
fail hard in the event the task is too far from those.
Only looks forwards in time.
"""
now = now or datetime.utcnow()
window = 5
for reporting_minute in [0, 15, 30, 45]:
if reporting_minute <= now.minute <= reporting_minute + window:
return reporting_minute
raise ValueError("Couldn't guess reporting minute for time: {}".format(now))
|
Python
| 0.000001
|
@@ -395,16 +395,43 @@
)%0A%0A%0Adef
+_make_all_notification_view
_keys(pe
@@ -445,53 +445,18 @@
s_of
-):%0A
+,
minute
- = guess_reporting_minute(as_of
)
+:
%0A
@@ -1594,15 +1594,86 @@
-keys =
+minute = guess_reporting_minute(as_of)%0A%0A keys = _make_all_notification_view
_key
@@ -1683,26 +1683,33 @@
eriod, as_of
+, minute
)
-%0A
%0A for key
|
15fb011fd9af04516254c04f6d89d728ac52d5d3
|
fix users tests
|
corehq/apps/users/tests/phone_users.py
|
corehq/apps/users/tests/phone_users.py
|
from django.test import TestCase
from corehq.apps.users.models import CouchUser, WebUser, CommCareUser
from dimagi.utils.couch import get_cached_property
class PhoneUsersTestCase(TestCase):
def setUp(self):
all_users = CouchUser.all()
for user in all_users:
user.delete()
self.username = 'username'
self.password = 'password'
self.domain = 'mockdomain'
self.couch_user = WebUser.create(self.domain, self.username, self.password)
self.couch_user.language = 'en'
self.couch_user.save()
def testPhoneUsersViewNoNumberSet(self):
phone_users_count = CouchUser.view("users/phone_users_by_domain",
key=self.domain).count()
self.assertEquals(phone_users_count, 0)
# def testPhoneUsersViewLastNumberAdded(self):
# self.couch_user.add_phone_number(123)
# self.couch_user.add_phone_number(456)
# self.couch_user.save()
# phone_user = CouchUser.view("users/phone_users_by_domain",
# startkey=[self.domain],
# endkey=[self.domain, {}],
# include_docs=True,
# ).one()
# self.assertEquals(phone_user['name'], self.username)
# self.assertEquals(phone_user['phone_number'], '456')
def testPhoneUsersViewDefaultNumber(self):
self.couch_user.add_phone_number(789)
self.couch_user.add_phone_number(101, default=True)
self.couch_user.add_phone_number(112)
self.couch_user.save()
phone_user = CouchUser.phone_users_by_domain(self.domain).one()
self.assertEquals(phone_user.username, self.username)
self.assertEquals(phone_user.default_phone_number, '101')
def testPhoneUsersChangeDefaultNumber(self):
self.couch_user.add_phone_number(789)
self.couch_user.add_phone_number(101, default=True)
self.couch_user.save()
self.assertEquals(self.couch_user.default_phone_number, '101')
self.couch_user.set_default_phone_number(789)
self.couch_user.save()
self.assertEquals(self.couch_user.default_phone_number, '789')
def testPhoneUsersViewLastCommCareUsername(self):
self.couch_user.delete()
phone_user_count = CouchUser.phone_users_by_domain(self.domain).count()
self.assertEquals(phone_user_count, 0)
couch_user = WebUser.create(self.domain, 'commcare_username_2', 'password')
couch_user.add_phone_number(123)
couch_user.save()
phone_user_count = CouchUser.phone_users_by_domain(self.domain).count()
self.assertEquals(phone_user_count, 1)
phone_user = CouchUser.phone_users_by_domain(self.domain).one()
self.assertEquals(phone_user.username, 'commcare_username_2')
def testWebUserImplementsMobileMixIn(self):
time_zone = self.couch_user.get_time_zone()
self.assertEquals(time_zone, 'UTC')
lang_code = self.couch_user.get_language_code()
self.assertEquals(lang_code, 'en')
def testDeletePhoneNumber(self):
self.couch_user.add_phone_number('+11231231234')
self.couch_user.save()
self.assertEquals(len(self.couch_user.phone_numbers), 1)
self.couch_user.delete_phone_number('+11231231234')
self.assertEquals(len(self.couch_user.phone_numbers), 0)
def test_get_cached_full_name(self):
testuser = CommCareUser.create('test-domain', 'testuser', 'test-pass')
FULL_NAME = "Test User"
testuser.set_full_name(FULL_NAME)
testuser.save()
cached_full_name = get_cached_property(CouchUser, testuser.get_id, 'full_name', expiry=7*24*60*60)
self.assertEqual(FULL_NAME, cached_full_name)
|
Python
| 0.000002
|
@@ -26,16 +26,61 @@
estCase%0A
+from corehq.apps.domain.models import Domain%0A
from cor
@@ -450,16 +450,56 @@
domain'%0A
+ Domain(name=self.domain).save()%0A
|
cc4aca4cef667bfa4eb51cfad677e63ec54e60a9
|
update searching fields
|
emgapi/viewsets.py
|
emgapi/viewsets.py
|
# -*- coding: utf-8 -*-
# Copyright 2017 EMBL - European Bioinformatics Institute
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from rest_framework import viewsets
from rest_framework import filters
from django_filters.rest_framework import DjangoFilterBackend
from . import serializers as emg_serializers
from . import filters as emg_filters
logger = logging.getLogger(__name__)
# Base classes
class BaseStudyGenericViewSet(viewsets.GenericViewSet):
serializer_class = emg_serializers.StudySerializer
filter_class = emg_filters.StudyFilter
filter_backends = (
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
)
ordering_fields = (
('study_id', 'accession'),
'study_name',
'last_update',
'samples_count',
)
ordering = ('-last_update',)
search_fields = (
'@study_name',
'@study_abstract',
'centre_name',
'project_id',
)
class BaseSampleGenericViewSet(viewsets.GenericViewSet):
serializer_class = emg_serializers.SampleSerializer
filter_class = emg_filters.SampleFilter
filter_backends = (
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
)
ordering_fields = (
'accession',
'sample_name',
'last_update',
)
ordering = ('-last_update',)
search_fields = (
'accession',
'primary_accession',
'@sample_name',
'@sample_desc',
'sample_alias',
'species',
'environment_feature',
'environment_biome',
'environment_feature',
'environment_material',
'@metadata__var_val_ucv',
)
class BaseRunGenericViewSet(viewsets.GenericViewSet):
serializer_class = emg_serializers.RunSerializer
filter_class = emg_filters.RunFilter
filter_backends = (
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
)
ordering_fields = (
'accession',
)
ordering = ('-accession',)
search_fields = (
'accession',
'secondary_accession',
'instrument_platform',
'instrument_model',
'@sample__metadata__var_val_ucv',
)
class BaseAnalysisGenericViewSet(viewsets.GenericViewSet):
serializer_class = emg_serializers.AnalysisSerializer
filter_class = emg_filters.AnalysisJobFilter
filter_backends = (
DjangoFilterBackend,
filters.OrderingFilter,
)
ordering_fields = (
('job_id', 'accession'),
'pipeline',
)
ordering = ('-pipeline',)
class BasePublicationGenericViewSet(viewsets.GenericViewSet):
serializer_class = emg_serializers.PublicationSerializer
filter_class = emg_filters.PublicationFilter
filter_backends = (
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
)
ordering_fields = (
'pubmed_id',
'published_year',
'studies_count',
)
ordering = ('-pubmed_id',)
search_fields = (
'@pub_title',
'@pub_abstract',
'pub_type',
'authors',
'doi',
'isbn',
)
|
Python
| 0
|
@@ -1440,41 +1440,92 @@
'
-centre_name',%0A 'project_id
+study_id',%0A 'secondary_accession',%0A 'project_id',%0A 'centre_name
',%0A
@@ -2759,24 +2759,68 @@
ent_model',%0A
+ 'experiment_type__experiment_type',%0A
'@sa
@@ -3071,32 +3071,62 @@
oFilterBackend,%0A
+ filters.SearchFilter,%0A
filters.
@@ -3233,38 +3233,416 @@
-)%0A ordering = ('-pipeline',
+ 'run__accession',%0A 'sample__accession',%0A 'pipeline__release_version',%0A 'experiment_type__experiment_type',%0A )%0A ordering = ('-pipeline',)%0A%0A search_fields = (%0A 'job_id',%0A 'instrument_platform',%0A 'instrument_model',%0A 'run__accession',%0A 'sample__accession',%0A 'pipeline__release_version',%0A 'experiment_type__experiment_type',%0A
)%0A%0A%0A
|
5acfd8072b3f1dff89b3bf7f1f01a15951d72fe1
|
Add newline to please the Sphinx
|
ooni/templates/scapyt.py
|
ooni/templates/scapyt.py
|
# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
import random
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.internet import protocol, defer, threads
from scapy.all import send, sr, IP, TCP, config
from ooni.reporter import createPacketReport
from ooni.nettest import NetTestCase
from ooni.utils import log
from ooni import config
from ooni.utils.txscapy import ScapySender, getDefaultIface, ScapyFactory
class BaseScapyTest(NetTestCase):
"""
The report of a test run with scapy looks like this:
report:
sent_packets: [{'raw_packet': BASE64Encoding of packet,
'summary': 'IP / TCP 192.168.2.66:ftp_data > 8.8.8.8:http S']
answered_packets: []
"""
name = "Base Scapy Test"
version = 0.1
requiresRoot = True
baseFlags = [
['ipsrc', 's',
'Does *not* check if IP src and ICMP IP citation matches when processing answers'],
['seqack', 'k',
'Check if TCP sequence number and ACK match in the ICMP citation when processing answers'],
['ipid', 'i',
'Check if the IPID matches when processing answers']
]
def _setUp(self):
if not config.scapyFactory:
log.debug("Scapy factoring not set, registering it.")
config.scapyFactory = ScapyFactory(config.advanced.interface)
self.report['answer_flags'] = []
if self.localOptions['ipsrc']:
config.checkIPsrc = 0
else:
self.report['answer_flags'].append('ipsrc')
config.checkIPsrc = 1
if self.localOptions['ipid']:
self.report['answer_flags'].append('ipid')
config.checkIPID = 1
else:
config.checkIPID = 0
# XXX we don't support strict matching
# since (from scapy's documentation), some stacks have a bug for which
# the bytes in the IPID are swapped.
# Perhaps in the future we will want to have more fine grained control
# over this.
if self.localOptions['seqack']:
self.report['answer_flags'].append('seqack')
config.check_TCPerror_seqack = 1
else:
config.check_TCPerror_seqack = 0
self.report['sent_packets'] = []
self.report['answered_packets'] = []
def finishedSendReceive(self, packets):
"""
This gets called when all packets have been sent and received.
"""
answered, unanswered = packets
for snd, rcv in answered:
log.debug("Writing report for scapy test")
sent_packet = snd
received_packet = rcv
if not config.privacy.includeip:
log.msg("Detected you would not like to include your ip in the report")
log.msg("Stripping source and destination IPs from the reports")
sent_packet.src = '127.0.0.1'
received_packet.dst = '127.0.0.1'
self.report['sent_packets'].append(sent_packet)
self.report['answered_packets'].append(received_packet)
return packets
def sr(self, packets, *arg, **kw):
"""
Wrapper around scapy.sendrecv.sr for sending and receiving of packets
at layer 3.
"""
scapySender = ScapySender()
config.scapyFactory.registerProtocol(scapySender)
log.debug("Using sending with hash %s" % scapySender.__hash__)
d = scapySender.startSending(packets)
d.addCallback(self.finishedSendReceive)
return d
def sr1(self, packets, *arg, **kw):
def done(packets):
"""
We do this so that the returned value is only the one packet that
we expected a response for, identical to the scapy implementation
of sr1.
"""
try:
return packets[0][0][1]
except IndexError:
log.err("Got no response...")
return packets
scapySender = ScapySender()
scapySender.expected_answers = 1
config.scapyFactory.registerProtocol(scapySender)
log.debug("Running sr1")
d = scapySender.startSending(packets)
log.debug("Started to send")
d.addCallback(self.finishedSendReceive)
d.addCallback(done)
return d
def send(self, packets, *arg, **kw):
"""
Wrapper around scapy.sendrecv.send for sending of packets at layer 3
"""
scapySender = ScapySender()
config.scapyFactory.registerProtocol(scapySender)
scapySender.sendPackets(packets)
scapySender.stopSending()
for packet in packets:
self.reportSentPacket(packet)
ScapyTest = BaseScapyTest
|
Python
| 0
|
@@ -766,17 +766,19 @@
:http S'
-%5D
+%7D%5D%0A
%0A
|
8882569d3605d7b0e44f0e05fa6a9c86e590a843
|
remove the reopen controller from the application.py
|
opal/core/application.py
|
opal/core/application.py
|
"""
Application helpers for Opal
"""
import inspect
import os
import itertools
from opal.core import plugins
class OpalApplication(object):
core_javascripts = {
'opal.upstream.deps': [
"js/lib/modernizr.js",
"js/lib/jquery-1.11.3/jquery-1.11.3.js",
"js/lib/d3/d3.js",
"js/lib/c3-0.4.10/c3.js",
"js/lib/bower_components/angular/angular.js",
"js/lib/bower_components/angular-route/angular-route.js",
"js/lib/bower_components/angular-resource/angular-resource.js",
"js/lib/bower_components/angular-cookies/angular-cookies.js",
"js/lib/angular-ui-utils-0.1.0/ui-utils.js",
"js/lib/ui-bootstrap-tpls-0.14.3.js",
"js/lib/utils/clipboard.js",
"bootstrap-3.1.0/js/bootstrap.js",
"js/lib/angulartics-0.17.2/angulartics.min.js",
"js/lib/angulartics-0.17.2/angulartics-ga.min.js",
"js/lib/bower_components/ment.io/dist/mentio.js",
"js/lib/bower_components/ment.io/dist/templates.js",
# "js/ui-select/dist/select.js",
"js/lib/bower_components/angular-ui-select/dist/select.js",
"js/lib/bower_components/ng-idle/angular-idle.js",
"js/lib/bower_components/angular-local-storage/dist/angular-local-storage.js", # noqa: E501
"js/lib/bower_components/ment.io/dist/templates.js",
"js/lib/bower_components/angular-growl-v2/build/angular-growl.js",
"js/lib/jquery-plugins/idle-timer.js",
"js/lib/jquery-plugins/jquery.stickytableheaders.js",
"js/lib/utils/underscore.js",
"js/lib/utils/showdown.js",
"js/lib/utils/moment.js",
"js/lib/ngprogress-lite/ngprogress-lite.js",
],
'opal.utils': [
"js/opal/utils.js",
"js/opal/opaldown.js",
"js/opal/directives.js",
"js/opal/filters.js",
],
'opal.services': [
"js/opal/services_module.js",
"js/opal/services/flow.js",
"js/opal/services/user_profile.js",
"js/opal/services/item.js",
"js/opal/services/http_interceptors.js",
"js/opal/services/episode.js",
"js/opal/services/patient.js",
"js/opal/services/episode_visibility.js",
"js/opal/services/episode_loader.js",
"js/opal/services/patient_summary.js",
"js/opal/services/record_loader.js",
"js/opal/services/extract_schema_loader.js",
"js/opal/services/schema.js",
"js/opal/services/patient_loader.js",
"js/opal/services/episode_resource.js",
"js/opal/services/record_editor.js",
"js/opal/services/copy_to_category.js",
"js/opal/services/patientlist_loader.js",
'js/opal/services/fields_translater.js',
'js/opal/services/referencedata.js',
'js/opal/services/metadata.js',
'js/opal/services/patient_consultation_record.js',
],
'opal.controllers': [
"js/opal/controllers_module.js",
"js/opal/controllers/patient_list_redirect.js",
"js/opal/controllers/patient_list.js",
"js/opal/controllers/patient_detail.js",
"js/opal/controllers/hospital_number.js",
"js/opal/controllers/add_episode.js",
"js/opal/controllers/reopen_episode.js",
"js/opal/controllers/edit_item.js",
"js/opal/controllers/edit_teams.js",
"js/opal/controllers/delete_item_confirmation.js",
"js/opal/controllers/account.js",
"js/opal/controllers/discharge.js",
"js/opal/controllers/undischarge.js",
"js/opal/controllers/copy_to_category.js",
"js/opal/controllers/keyboard_shortcuts.js",
"js/opal/controllers/patient_access_log.js"
]
}
javascripts = []
styles = []
actions = []
menuitems = []
default_episode_category = 'Inpatient'
opal_angular_exclude_tracking_qs = [
"/search",
"/extract",
]
@classmethod
def get_core_javascripts(klass, namespace):
"""
Return core javascripts for a given NAMESPACE
"""
return klass.core_javascripts[namespace]
@classmethod
def get_javascripts(klass):
"""
Return the javascripts for our application
"""
return klass.javascripts
@classmethod
def get_menu_items(klass, user=None):
"""
Default implementation of get_menu_items()
By default we just return the menuitems property of the application,
which is itself set to [] by default.
"""
return klass.menuitems
@classmethod
def get_styles(klass):
"""
Return the stylesheets for our application
"""
return klass.styles
@classmethod
def directory(cls):
"""
Return the filesystem path to the app directory
"""
return os.path.realpath(os.path.dirname(inspect.getfile(cls)))
def get_app():
"""
Return the current Opal Application
"""
return OpalApplication.__subclasses__()[0]
def get_all_components():
"""
All components of an Opal application - all plugins and the application.
"""
return itertools.chain(
plugins.OpalPlugin.list(), [get_app()]
)
|
Python
| 0
|
@@ -3446,61 +3446,8 @@
s%22,%0A
- %22js/opal/controllers/reopen_episode.js%22,%0A
|
b0ff934a2e20916f9e777874b795c9d0942a48e4
|
use app.cfg from its proper place
|
openarticlegauge/core.py
|
openarticlegauge/core.py
|
import os, requests, json, redis
from flask import Flask
from openarticlegauge import config, licenses
from flask.ext.login import LoginManager, current_user
login_manager = LoginManager()
def create_app():
app = Flask(__name__)
configure_app(app)
if app.config['INITIALISE_INDEX']: initialise_index(app)
prep_redis(app)
setup_error_email(app)
login_manager.setup_app(app)
return app
def configure_app(app):
app.config.from_object(config)
# parent directory
here = os.path.dirname(os.path.abspath( __file__ ))
config_path = os.path.join(os.path.dirname(here), 'app.cfg')
if os.path.exists(config_path):
app.config.from_pyfile(config_path)
def prep_redis(app):
# wipe the redis temp cache (not the non-temp one)
client = redis.StrictRedis(host=app.config['REDIS_CACHE_HOST'], port=app.config['REDIS_CACHE_PORT'], db=app.config['REDIS_CACHE_DB'])
client.flushdb()
def initialise_index(app):
mappings = app.config["MAPPINGS"]
i = str(app.config['ELASTIC_SEARCH_HOST']).rstrip('/')
i += '/' + app.config['ELASTIC_SEARCH_DB']
for key, mapping in mappings.iteritems():
im = i + '/' + key + '/_mapping'
exists = requests.get(im)
if exists.status_code != 200:
ri = requests.post(i)
r = requests.put(im, json.dumps(mapping))
print key, r.status_code
# put the currently available licences into the licence index
for l in licenses.LICENSES:
r = requests.post(i + '/license/' + l, json.dumps(licenses.LICENSES[l]))
def setup_error_email(app):
ADMINS = app.config.get('ADMINS', '')
if not app.debug and ADMINS:
import logging
from logging.handlers import SMTPHandler
mail_handler = SMTPHandler('127.0.0.1',
'server-error@no-reply.com',
ADMINS, 'error')
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
app = create_app()
|
Python
| 0.000001
|
@@ -599,16 +599,22 @@
e(here),
+ '..',
'app.cf
@@ -616,16 +616,92 @@
pp.cfg')
+ # this file will be in the package dir, app.cfg is at the root of the repo
%0A if
|
da1dbddaa47e087b19dbeb1b256b337a3e77ed73
|
Fix os not defined
|
gaphor/services/tests/test_properties.py
|
gaphor/services/tests/test_properties.py
|
import tempfile
from unittest import TestCase
from gaphor.services.properties import FileBackend, Properties
class MockEventManager(list):
def handle(self, event):
self.append(event)
class TestProperties(TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
backend = FileBackend(self.tmpdir)
self.events = MockEventManager()
self.properties = Properties(self.events, backend)
def shutDown(self):
self.properties.shutdown()
os.remove(os.path.join(self.tmpdir, FileBackend.RESOURCE_FILE))
os.rmdir(self.tmpdir)
def test_properties(self):
prop = self.properties
prop.set("test1", 2)
assert len(self.events) == 1, self.events
event = self.events[0]
assert "test1" == event.key
assert None is event.old_value
assert 2 == event.new_value
assert 2 == prop("test1")
prop.set("test1", 2)
assert len(self.events) == 1
prop.set("test1", "foo")
assert len(self.events) == 2
event = self.events[1]
assert "test1" == event.key
assert 2 == event.old_value
assert "foo" == event.new_value
assert "foo" == prop("test1")
assert 3 == prop("test2", 3)
assert 3 == prop("test2", 4)
|
Python
| 0
|
@@ -1,12 +1,22 @@
+import os%0A
import tempf
|
56344224efbf74aff392967a84307f6f4b5429e1
|
Fix job tracker in apache
|
encore/__init__.py
|
encore/__init__.py
|
from flask import request, Response, Flask, render_template, redirect, url_for
from .user_blueprint import user_area
from .admin_blueprint import admin_area
from .api_blueprint import api, ApiResult, ApiException
from .auth_blueprint import auth
from .notifier import get_notifier
from . import job_tracking
import os
import atexit
import subprocess
import markdown
def create_app(config=None):
app = ApiFlask(__name__)
app.url_map.strict_slashes = False
if isinstance(config, str):
app.config.from_pyfile(config)
elif isinstance(config, dict):
app.config.update(config)
elif config is None:
pass
else:
raise Exception("Unknown config type")
app.config["PROPAGATE_EXCEPTIONS"] = True
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = 60*5 # seconds
app.register_blueprint(user_area)
app.register_blueprint(admin_area, url_prefix="/admin")
app.register_blueprint(api, url_prefix="/api")
app.register_blueprint(auth)
from .auth_blueprint import login_manager
login_manager.init_app(app)
app.register_error_handler(404, handle_not_found)
from .sql_pool import register_db
register_db(app)
register_helpers(app)
register_info(app)
# prevent double init when in debug mode
if os.environ.get("WERKZEUG_RUN_MAIN") == "true":
launch_tracker(app)
return app
def handle_not_found(e):
return render_template("not_found.html"), 404
def register_helpers(app):
@app.route('/favicon.ico')
def favicon():
return app.send_static_file('favicon.ico')
@app.template_filter("markdown")
def render_markdown(text):
return markdown.markdown(text)
@app.context_processor
def template_helpers():
def guess_tab(path):
if path.startswith("/geno"):
return "geno"
elif path.startswith("/pheno"):
return "pheno"
elif path.startswith("/jobs") or path == "/":
return "job"
elif path.startswith("/geno"):
return "geno"
elif path.startswith("/collab"):
return "collab"
elif path.startswith("/help"):
return "help"
elif path.startswith("/me/api-token"):
return "api"
elif path.startswith("/admin/user"):
return "user"
elif path.startswith("/admin/phenos"):
return "pheno"
elif path.startswith("/admin/genos"):
return "geno"
elif path.startswith("/admin/counts"):
return "counts"
elif path.startswith("/admin"):
return "job"
else:
return ""
def get_navigation_links(path, user=None):
links = {"left": [], "right":[]}
if path.startswith("/admin"):
links["left"].append(("job", "Jobs", url_for("admin.get_admin_page")))
links["left"].append(("user", "Users", url_for("admin.get_admin_user_page")))
links["left"].append(("pheno", "Phenos", url_for("admin.get_admin_pheno_page")))
links["left"].append(("geno", "Genos", url_for("admin.get_admin_geno_page")))
links["left"].append(("counts", "Counts", url_for("admin.get_admin_counts_page")))
links["right"].append(("return","Return to App", url_for("user.index")))
else:
links["left"].append(("job", "Jobs", url_for("user.index")))
links["left"].append(("pheno", "Phenotypes", url_for("user.get_phenos")))
links["left"].append(("geno", "Genotypes", url_for("user.get_genos")))
links["left"].append(("collab", "Collaborate", url_for("user.get_collaborators")))
if (user is not None) and hasattr(user, "is_admin") and user.is_admin():
links["right"].append(("admin","Admin", url_for("admin.get_admin_page")))
links["right"].append(("api","API", url_for("user.get_api_token")))
links["right"].append(("help","Help", url_for("user.get_help")))
links["right"].append(("logout","Logout", url_for("auth.sign_out")))
return links
return dict(guess_tab = guess_tab,
get_navigation_links = get_navigation_links)
def register_info(app):
try:
# grab current GIT commit (if available) for debugging
git_hash = subprocess.check_output(
[app.config.get("GIT_BINARY","git"), "rev-parse", "HEAD"],
stderr=subprocess.STDOUT)
app.config["git-hash"] = git_hash.decode()
except:
pass
def launch_tracker(app):
job_tracker = job_tracking.Tracker(5*60.0, app)
job_tracker.start()
atexit.register(lambda:job_tracker.cancel())
class ApiFlask(Flask):
def __init__(self, *args, **kwds):
super(ApiFlask, self).__init__(*args, **kwds)
self.register_error_handler(ApiException, lambda err: err.to_result())
def make_response(self, rv):
if isinstance(rv, ApiResult):
return rv.to_response()
return Flask.make_response(self, rv)
|
Python
| 0.000003
|
@@ -1282,16 +1282,58 @@
%0A if
+not %22FLASK_RUN_FROM_CLI%22 in os.environ or
os.envir
|
11cd074f67668135d606f68dddb66c465ec01756
|
Add db index on field tag name
|
opps/core/tags/models.py
|
opps/core/tags/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from opps.core.models import Date, Slugged
class Tag(Date, Slugged):
name = models.CharField(_(u'Name'), max_length=255, unique=True)
def save(self, *args, **kwargs):
if not self.slug:
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
__unicode__ = lambda self: self.name
class Meta:
verbose_name = _(u'Tag')
verbose_name_plural = _(u'Tags')
class Tagged(models.Model):
tags = models.CharField(_(u'Tags'), max_length=4000, blank=True,
help_text=_(u'A comma-separated list of tags.'))
def save(self, *args, **kwargs):
if self.tags:
tags = set(self.tags.split(','))
for tag in tags:
Tag.objects.get_or_create(name=tag)
self.tags = ','.join(tags)
super(Tagged, self).save(*args, **kwargs)
def get_tags(self):
if self.tags:
tags = []
for tag in self.tags.aplit(','):
t, created = Tag.objects.get_or_create(name=tag)
tags.append(t)
return tags
class Meta:
abstract = True
|
Python
| 0
|
@@ -292,16 +292,59 @@
que=True
+,%0A db_index=True
)%0A%0A d
@@ -624,16 +624,59 @@
'Tags')%0A
+ unique_together = %5B'slug', 'name'%5D%0A
%0A%0Aclass
@@ -758,13 +758,67 @@
00,
-blank
+db_index=True,%0A blank=True, null
=Tru
|
caa1b8ffb1fa5fb5edbd7d1e9744c17aabb3d5bf
|
refactor logger, pythonist way
|
ensembl/service.py
|
ensembl/service.py
|
#
# Author : Manuel Bernal Llinares
# Project : trackhub-creator
# Timestamp : 28-06-2017 10:13
# ---
# © 2017 Manuel Bernal Llinares <mbdebian@gmail.com>
# All rights reserved.
#
"""
This module models an Ensembl service
"""
# App imports
import config_manager
from ensembl.models import SpeciesService
from exceptions import ConfigManagerException
from toolbox import rest
# Ensembl Service is going to be a Singleton, unique for the running session
__configuration_file = None
__service_instance = None
def set_configuration_file(config_file):
global __configuration_file
if __configuration_file is None:
__configuration_file = config_file
return __configuration_file
def get_service():
global __service_instance
if __service_instance is None:
__service_instance = Service(config_manager.read_config_from_file(__configuration_file), __configuration_file)
return __service_instance
# Ensembl Service configuration manager
class ConfigurationManager(config_manager.ConfigurationManager):
# Configuration Keys
_CONFIG_KEY_SERVICE = 'service'
_CONFIG_KEY_ENSEMBL_API = 'ensembl_api'
_CONFIG_KEY_SERVER = 'server'
def __init__(self, configuration_object, configuration_file):
super(ConfigurationManager, self).__init__(configuration_object, configuration_file)
self._logger = config_manager.get_app_config_manager()\
.get_logger_for("{}.{}".format(__name__, type(self).__name__))
def get_api_server(self):
self._logger.debug(
"get_api_server, from configuration object '{}'".format(self._get_configuration_object()))
try:
return self._get_configuration_object()[self._CONFIG_KEY_SERVICE][self._CONFIG_KEY_ENSEMBL_API][
self._CONFIG_KEY_SERVER]
except Exception as e:
raise ConfigManagerException(
"MISSING information about Ensembl '{}.{}.{}' API server in configuration file '{}'".format(
self._CONFIG_KEY_SERVICE,
self._CONFIG_KEY_ENSEMBL_API,
self._CONFIG_KEY_SERVER,
self._get_configuration_file()))
# Ensembl Service model
class Service:
def __init__(self, configuration_object, configuration_file):
self._logger = config_manager.get_app_config_manager()\
.get_logger_for("{}.{}".format(__name__, type(self).__name__))
self._get_logger().debug("Using configuration file '{}'".format(configuration_file))
self.__config_manager = ConfigurationManager(configuration_object, configuration_file)
# Ensembl Release Number
self.__release_number = None
# Ensembl Species Data
self.__species_data_service = None
def __request_release_number(self):
request_url = self._get_config_manager().get_api_server() + "/info/data/?"
current_release_data = rest.make_rest_request(request_url)
self._get_logger().debug("Request Release Number response from Ensembl - '{}'".format(current_release_data))
self._get_logger().info(
"This session is working with Ensembl Release {}".format(current_release_data['releases'][0]))
return current_release_data['releases'][0]
def __request_species_data(self):
request_url = self._get_config_manager().get_api_server() + "/info/species?"
self._get_logger().debug("Requesting Species Data to Ensembl, url '{}'".format(request_url))
return rest.make_rest_request(request_url)
def _get_config_manager(self):
return self.__config_manager
def _get_logger(self):
return self._logger
def get_release_number(self):
"""
Get current Ensembl Release Number
:return: current Ensembl Release Number
"""
if self.__release_number is None:
self.__release_number = self.__request_release_number()
return self.__release_number
def get_species_data_service(self):
if self.__species_data_service is None:
self.__species_data_service = SpeciesService(self.__request_species_data())
return self.__species_data_service
if __name__ == '__main__':
print("ERROR: This script is part of a pipeline collection and it is not meant to be run in stand alone mode")
|
Python
| 0
|
@@ -2431,36 +2431,30 @@
self._
-get_
logger
-()
.debug(%22Usin
@@ -2948,36 +2948,30 @@
self._
-get_
logger
-()
.debug(%22Requ
@@ -3063,28 +3063,22 @@
self._
-get_
logger
-()
.info(%0A
@@ -3376,20 +3376,14 @@
lf._
-get_
logger
-()
.deb
@@ -3582,64 +3582,8 @@
er%0A%0A
- def _get_logger(self):%0A return self._logger%0A%0A
|
706aa6ed7170709828258bca1b9a1dfe6e8fa77e
|
improve helper functions
|
SLHelper.py
|
SLHelper.py
|
# -*- coding: utf-8 -*-
import re
import tkinter
from tkinter import messagebox
def file_content(filename):
with open(filename, 'r') as fin:
content = fin.read()
return content
def write_file(filename, content):
with open(filename, 'wb') as fout:
fout.write(content.encode('utf-8'))
def date_sanitizer(datestring):
result = re.findall(r'(\d{4})[-./ ](\d{1,2})[-./ ](\d{1,2})', datestring)
return '%s-%02d-%02d' % (result[0][0], int(result[0][1]), int(result[0][2])) if result else datestring
def alert_messagebox(title, content):
root = tkinter.Tk()
root.withdraw()
messagebox.showinfo(title, content)
root.destroy()
|
Python
| 0.00001
|
@@ -136,16 +136,17 @@
name, 'r
+b
') as fi
@@ -197,16 +197,32 @@
content
+.decode('utf-8')
%0D%0A%0D%0Adef
@@ -558,16 +558,24 @@
testring
+.strip()
%0D%0A%0D%0Adef
|
f3843f61f9480b74f2e94a9fa3d5bd97549a2b8e
|
Use a LOG instead of the root logger
|
entropy/entropy.py
|
entropy/entropy.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import datetime
import json
import logging
import os
import sys
import threading
import time
import croniter
sys.path.insert(0, os.path.join(os.path.abspath(os.pardir)))
sys.path.insert(0, os.path.abspath(os.getcwd()))
import audit
import utils
GOOD_MOOD = 1
SCRIPT_REPO = os.path.dirname(__file__)
LOG_REPO = os.path.join(os.getcwd(), 'logs')
def validate_cfg(file):
#TODO(praneshp): can do better here
if GOOD_MOOD == 1:
return True
return False
def do_something(**kwargs):
# Put a message on the mq
audit.send_message(**kwargs)
def start_audit(**kwargs):
#TODO(praneshp): fix bug here, where thread wakes up 0.0003 seconds
#before it should, and then sleeps off and cannot wake up in time.
#We lose the message this way.
now = datetime.datetime.now()
schedule = kwargs['schedule']
cron = croniter.croniter(schedule, now)
next_iteration = cron.get_next(datetime.datetime)
while True:
now = datetime.datetime.now()
logging.warning(str(now) + str(next_iteration))
if now > next_iteration:
do_something(**kwargs['mq_args'])
next_iteration = cron.get_next(datetime.datetime)
else:
sleep_time = (next_iteration - now).total_seconds()
logging.warning('Will sleep for ' + str(sleep_time))
time.sleep(sleep_time)
def register_audit(args):
logging.warning('Registering audit script')
#first check if you have all inputs
if not (args.conf or args.script):
logging.warning('Need path to script and json')
sys.exit(1)
# Now validate cfg
conf_file = os.path.join(SCRIPT_REPO, args.conf)
validate_cfg(conf_file)
# Now pick out relevant info
# TODO(praneshp) eventually this must become a function call
with open(conf_file, 'r') as json_data:
data = json.load(json_data)
# stuff for the message queue
mq_args = {'mq_host': data['mq_host'],
'mq_port': data['mq_port'],
'mq_user': data['mq_user'],
'mq_password': data['mq_password']}
# general stuff for the audit module
kwargs = {'sshkey': utils.get_key_path(),
'name': data['name'],
'schedule': data['cron-freq'],
'mq_args': mq_args}
#Start a thread to run a cron job for this audit script
t = threading.Thread(name=kwargs['name'], target=start_audit,
kwargs=kwargs)
t.start()
t.join()
#TODO(praneshp): add this to a cfg file, to recover in case of failure
def register_repair(args):
logging.warning('Registering repair script')
def init():
logging.warning('Initializing')
#TODO(praneshp): come up with to start all registered reaction scripts
def parse():
parser = argparse.ArgumentParser(description='entropy')
subparsers = parser.add_subparsers(dest='command',
help='commands')
register_audit_parser = subparsers.add_parser('register-audit')
register_audit_parser.add_argument('-f', dest='script',
action='store', help='Audit script')
register_audit_parser.add_argument('-c', dest='conf', action='store',
help='Audit conf')
register_audit_parser.set_defaults(func=register_audit)
register_repair_parser =\
subparsers.add_parser('register-repair',
help='Register a repair script')
register_repair_parser.add_argument('-f', dest='filename', action='store',
help='Repair script location')
register_repair_parser.set_defaults(func=register_repair)
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
#TODO(praneshp): AMQP, json->yaml, reaction scripts(after amqp)
logging.basicConfig(filename=os.path.join(
LOG_REPO, 'entropy-' + str(time.time()) + '.log'))
init()
parse()
|
Python
| 0.999946
|
@@ -1023,16 +1023,50 @@
'logs')%0A
+LOG = logging.getLogger(__name__)%0A
%0A%0Adef va
@@ -1984,39 +1984,35 @@
s()%0A
-logging
+LOG
.warning('Will s
@@ -2100,39 +2100,35 @@
udit(args):%0A
-logging
+LOG
.warning('Regist
@@ -2228,39 +2228,35 @@
cript):%0A
-logging
+LOG
.warning('Need p
@@ -3340,31 +3340,27 @@
(args):%0A
-logging
+LOG
.warning('Re
@@ -3399,31 +3399,27 @@
init():%0A
-logging
+LOG
.warning('In
|
c1a76b78b7c567f22e76372ae22380c511e7a2f1
|
Complete docstring of parse_branches
|
readthedocs/vcs_support/backends/hg.py
|
readthedocs/vcs_support/backends/hg.py
|
# -*- coding: utf-8 -*-
"""Mercurial-related utilities."""
from __future__ import absolute_import
from readthedocs.projects.exceptions import RepositoryError
from readthedocs.vcs_support.base import BaseVCS, VCSVersion
class Backend(BaseVCS):
"""Mercurial VCS backend."""
supports_tags = True
supports_branches = True
fallback_branch = 'default'
def update(self):
super(Backend, self).update()
retcode = self.run('hg', 'status', record=False)[0]
if retcode == 0:
return self.pull()
return self.clone()
def pull(self):
(pull_retcode, _, _) = self.run('hg', 'pull')
if pull_retcode != 0:
raise RepositoryError
(update_retcode, stdout, stderr) = self.run('hg', 'update', '--clean')
if update_retcode != 0:
raise RepositoryError
return (update_retcode, stdout, stderr)
def clone(self):
self.make_clean_working_dir()
output = self.run('hg', 'clone', self.repo_url, '.')
if output[0] != 0:
raise RepositoryError
return output
@property
def branches(self):
retcode, stdout = self.run(
'hg', 'branches', '--quiet', record_as_success=True)[:2]
# error (or no tags found)
if retcode != 0:
return []
return self.parse_branches(stdout)
def parse_branches(self, data):
"""Stable / default"""
names = [name.lstrip() for name in data.splitlines()]
return [VCSVersion(self, name, name) for name in names if name]
@property
def tags(self):
retcode, stdout = self.run('hg', 'tags', record_as_success=True)[:2]
# error (or no tags found)
if retcode != 0:
return []
return self.parse_tags(stdout)
def parse_tags(self, data):
"""
Parses output of `hg tags`, eg:
tip 278:c4b2d21db51a
0.2.2 152:6b0364d98837
0.2.1 117:a14b7b6ffa03
0.1 50:30c2c6b3a055
maintenance release 1 10:f83c32fe8126
Into VCSVersion objects with the tag name as verbose_name and the
commit hash as identifier.
"""
vcs_tags = []
tag_lines = [line.strip() for line in data.splitlines()]
# starting from the rhs of each line, split a single value (changeset)
# off at whitespace; the tag name is the string to the left of that
tag_pairs = [line.rsplit(None, 1) for line in tag_lines]
for row in tag_pairs:
if len(row) != 2:
continue
name, commit = row
if name == 'tip':
continue
_, commit_hash = commit.split(':')
vcs_tags.append(VCSVersion(self, commit_hash, name))
return vcs_tags
@property
def commit(self):
_, stdout = self.run('hg', 'identify', '--id')[:2]
return stdout.strip()
def checkout(self, identifier=None):
super(Backend, self).checkout()
if not identifier:
identifier = 'tip'
retcode = self.run('hg', 'status', record=False)[0]
if retcode == 0:
self.run('hg', 'pull')
else:
self.clone()
return self.run('hg', 'update', '--clean', identifier)
|
Python
| 0.000002
|
@@ -1421,24 +1421,212 @@
%22%22%22
-Stable / default
+%0A Parses output of %60hg branches --quiet%60, eg:%0A%0A default%0A 0.2%0A 0.1%0A%0A Into VCSVersion objects with branch name as verbose_name and%0A identifier.%0A
%22%22%22%0A
|
3f2fb693203fc66aeab4c7d4178929829c65fbfb
|
Fix a broken try/except/else clause.
|
reviewboard/scmtools/tests/testcases.py
|
reviewboard/scmtools/tests/testcases.py
|
import os
import unittest
from errno import ECONNREFUSED
from tempfile import mkdtemp
from paramiko.ssh_exception import NoValidConnectionsError
from reviewboard.scmtools.core import HEAD
from reviewboard.scmtools.errors import SCMError, AuthenticationError
from reviewboard.scmtools.models import Repository
from reviewboard.site.models import LocalSite
from reviewboard.ssh.client import SSHClient
from reviewboard.ssh.tests import SSHTestCase
class SCMTestCase(SSHTestCase):
"""Base class for test suites for SCMTools."""
ssh_client = None
_can_test_ssh = None
def setUp(self):
super(SCMTestCase, self).setUp()
self.tool = None
def _check_can_test_ssh(self):
"""Check whether SSH-based tests can be run.
This will check if the user's SSH keys is authorized by the local
machine, for authentication. If so, SSH-based tests can be attempted.
If SSH-based tests cannot be run, the current test will be flagged
as skipped.
"""
if SCMTestCase._can_test_ssh is None:
SCMTestCase.ssh_client = SSHClient()
key = self.ssh_client.get_user_key()
SCMTestCase._can_test_ssh = \
key is not None and self.ssh_client.is_key_authorized(key)
if not SCMTestCase._can_test_ssh:
raise unittest.SkipTest(
"Cannot perform SSH access tests. The local user's SSH "
"public key must be in the %s file and SSH must be enabled."
% os.path.join(self.ssh_client.storage.get_ssh_dir(),
'authorized_keys'))
def _test_ssh(self, repo_path, filename=None):
"""Helper for testing an SSH connection to a local repository.
This will attempt to SSH into the local machine and connect to the
given repository, checking it for validity and optionally fetching
a file.
If this is unable to connect to the local machine, the test will be
flagged as skipped.
Args:
repo_path (unicode):
The repository path to check.
filename (unicode, optional):
The optional file in the repository to fetch.
"""
self._check_can_test_ssh()
repo = Repository(name='SSH Test', path=repo_path,
tool=self.repository.tool)
tool = repo.get_scmtool()
try:
tool.check_repository(repo_path)
except NoValidConnectionsError:
# This box likely isn't set up for this test.
SCMTestCase._can_test_ssh = False
raise unittest.SkipTest(
'Cannot perform SSH access tests. No local SSH service is '
'running.')
else:
raise
if filename:
self.assertIsNotNone(tool.get_file(filename, HEAD))
def _test_ssh_with_site(self, repo_path, filename=None):
"""Helper for testing an SSH connection and using a Local Site.
This will attempt to SSH into the local machine and connect to the
given repository, using an SSH key and repository based on a Local
Site. It will check the repository for validity and optionally fetch
a file.
If this is unable to connect to the local machine, the test will be
flagged as skipped.
Args:
repo_path (unicode):
The repository path to check.
filename (unicode, optional):
The optional file in the repository to fetch.
"""
self._check_can_test_ssh()
# Get the user's .ssh key, for use in the tests
user_key = self.ssh_client.get_user_key()
self.assertIsNotNone(user_key)
# Switch to a new SSH directory.
self.tempdir = mkdtemp(prefix='rb-tests-home-')
sshdir = os.path.join(self.tempdir, '.ssh')
self._set_home(self.tempdir)
self.assertEqual(sshdir, self.ssh_client.storage.get_ssh_dir())
self.assertFalse(os.path.exists(os.path.join(sshdir, 'id_rsa')))
self.assertFalse(os.path.exists(os.path.join(sshdir, 'id_dsa')))
self.assertIsNone(self.ssh_client.get_user_key())
tool_class = self.repository.tool
# Make sure we aren't using the old SSH key. We want auth errors.
repo = Repository(name='SSH Test', path=repo_path, tool=tool_class)
tool = repo.get_scmtool()
self.assertRaises(AuthenticationError,
lambda: tool.check_repository(repo_path))
if filename:
self.assertRaises(SCMError,
lambda: tool.get_file(filename, HEAD))
for local_site_name in ('site-1',):
local_site = LocalSite(name=local_site_name)
local_site.save()
repo = Repository(name='SSH Test', path=repo_path, tool=tool_class,
local_site=local_site)
tool = repo.get_scmtool()
ssh_client = SSHClient(namespace=local_site_name)
self.assertEqual(ssh_client.storage.get_ssh_dir(),
os.path.join(sshdir, local_site_name))
ssh_client.import_user_key(user_key)
self.assertEqual(ssh_client.get_user_key(), user_key)
# Make sure we can verify the repository and access files.
tool.check_repository(repo_path, local_site_name=local_site_name)
if filename:
self.assertIsNotNone(tool.get_file(filename, HEAD))
|
Python
| 0.002586
|
@@ -2760,40 +2760,8 @@
g.')
-%0A else:%0A raise
%0A%0A
|
67bb13412118b3dddbee09d8df04788c50084506
|
Fix use of bt_device in find_ruuvitags (#42)
|
ruuvitag_sensor/ruuvi.py
|
ruuvitag_sensor/ruuvi.py
|
import sys
import os
import time
import logging
from ruuvitag_sensor.decoder import get_decoder
log = logging.getLogger(__name__)
if not sys.platform.startswith('linux') or os.environ.get('CI') == 'True':
# Use BleCommunicationDummy also for CI as it can't use bluez
from ruuvitag_sensor.ble_communication import BleCommunicationDummy
ble = BleCommunicationDummy()
else:
from ruuvitag_sensor.ble_communication import BleCommunicationNix
ble = BleCommunicationNix()
class RunFlag(object):
"""
Wrapper for boolean run flag
Attributes:
running (bool): Defines if function should continue execution
"""
running = True
class RuuviTagSensor(object):
"""
RuuviTag communication and data convert functionality
"""
@staticmethod
def get_data(mac, bt_device=''):
raw = ble.get_data(mac, bt_device)
return RuuviTagSensor.convert_data(raw)
@staticmethod
def convert_data(raw):
"""
Validate that data is from RuuviTag and get correct data part.
Returns:
tuple (int, string): Data Format type and Sensor data
"""
# TODO: Check from raw data correct data format
# Now this returns 2 also for Data Format 4
data = RuuviTagSensor._get_data_format_2and4(raw)
if data is not None:
return (2, data)
data = RuuviTagSensor._get_data_format_3(raw)
if data is not None:
return (3, data)
return (None, None)
@staticmethod
def find_ruuvitags(bt_device=''):
"""
Find all RuuviTags. Function will print the mac and the state of the sensors when found.
Function will execute as long as it is stopped. Stop ecexution with Crtl+C.
Returns:
dict: MAC and state of found sensors
"""
log.info('Finding RuuviTags. Stop with Ctrl+C.')
datas = dict()
for new_data in RuuviTagSensor._get_ruuvitag_datas(bt_device):
if new_data[0] in datas:
continue
datas[new_data[0]] = new_data[1]
log.info(new_data[0])
log.info(new_data[1])
return datas
@staticmethod
def get_data_for_sensors(macs=[], search_duratio_sec=5, bt_device=''):
"""
Get lates data for sensors in the MAC's list.
Args:
macs (array): MAC addresses
search_duratio_sec (int): Search duration in seconds. Default 5
bt_device (string): Bluetooth device id
Returns:
dict: MAC and state of found sensors
"""
log.info('Get latest data for sensors. Stop with Ctrl+C.')
log.info('Stops automatically in %ss', search_duratio_sec)
log.info('MACs: %s', macs)
datas = dict()
for new_data in RuuviTagSensor._get_ruuvitag_datas(macs, search_duratio_sec, bt_device=bt_device):
datas[new_data[0]] = new_data[1]
return datas
@staticmethod
def get_datas(callback, macs=[], run_flag=RunFlag(), bt_device=''):
"""
Get data for all ruuvitag sensors or sensors in the MAC's list.
Args:
callback (func): callback funcion to be called when new data is received
macs (list): MAC addresses
run_flag (object): RunFlag object. Function executes while run_flag.running
bt_device (string): Bluetooth device id
"""
log.info('Get latest data for sensors. Stop with Ctrl+C.')
log.info('MACs: %s', macs)
for new_data in RuuviTagSensor._get_ruuvitag_datas(macs, None, run_flag, bt_device):
callback(new_data)
@staticmethod
def _get_ruuvitag_datas(macs=[], search_duratio_sec=None, run_flag=RunFlag(), bt_device=''):
"""
Get data from BluetoothCommunication and handle data encoding.
Args:
macs (list): MAC addresses. Default empty list
search_duratio_sec (int): Search duration in seconds. Default None
run_flag (object): RunFlag object. Function executes while run_flag.running. Default new RunFlag
bt_device (string): Bluetooth device id
Yields:
tuple: MAC and State of RuuviTag sensor data
"""
mac_blacklist = []
start_time = time.time()
data_iter = ble.get_datas(mac_blacklist, bt_device)
for ble_data in data_iter:
# Check duration
if search_duratio_sec and time.time() - start_time > search_duratio_sec:
data_iter.send(StopIteration)
break
# Check running flag
if not run_flag.running:
data_iter.send(StopIteration)
break
# Check MAC whitelist
if macs and not ble_data[0] in macs:
continue
(data_format, data) = RuuviTagSensor.convert_data(ble_data[1])
# Check that encoded data is valid RuuviTag data and it is sensor data
# If data is not valid RuuviTag data add MAC to blacklist
if data is not None:
state = get_decoder(data_format).decode_data(data)
if state is not None:
yield (ble_data[0], state)
else:
mac_blacklist.append(ble_data[0])
@staticmethod
def _get_data_format_2and4(raw):
"""
Validate that data is from RuuviTag and is Data Format 2 or 4. Convert hexadcimal data to string.
Encoded data part is after ruu.vi/# or r/
Returns:
string: Encoded sensor data
"""
try:
# TODO: Fix conversion so convered data will show https://ruu.vi/# and htts://r/
# Now it has e.g. [Non_ASCII characters]ruu.vi/#AjwYAMFc
base16_split = [raw[i:i + 2] for i in range(0, len(raw), 2)]
selected_hexs = filter(lambda x: int(x, 16) < 128, base16_split)
characters = [chr(int(c, 16)) for c in selected_hexs]
data = ''.join(characters)
# take only part after ruu.vi/# or r/
index = data.find('ruu.vi/#')
if index > -1:
return data[(index + 8):]
else:
index = data.find('r/')
if index > -1:
return data[(index + 2):]
return None
except:
return None
@staticmethod
def _get_data_format_3(raw):
"""
Validate that data is from RuuviTag and is Data Format 3
Returns:
string: Sensor data
"""
# Search of FF990403 (Manufacturer Specific Data (FF) / Ruuvi Innovations ltd (9904) / Format 3 (03))
try:
if "FF990403" not in raw:
return None
payload_start = raw.index("FF990403") + 6;
return raw[payload_start:]
except:
return None
|
Python
| 0.000005
|
@@ -1912,33 +1912,32 @@
datas = dict()%0A
-%0A
for new_
@@ -1980,32 +1980,42 @@
_datas(bt_device
+=bt_device
):%0A i
|
ca35d04d841f55f9703cc2b2d7be1f88d5184fd0
|
Return True when valid, PEP8
|
oscarapi/views/basket.py
|
oscarapi/views/basket.py
|
from django.utils.translation import ugettext_lazy as _
from oscar.apps.basket import signals
from oscar.core.loading import get_model, get_class
from rest_framework import status, generics, exceptions
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.views import APIView
from oscarapi import serializers, permissions
from oscarapi.basket.operations import (
apply_offers,
get_basket
)
from oscarapi.views.mixin import PutIsPatchMixin
from oscarapi.views.utils import BasketPermissionMixin
__all__ = ('BasketView', 'LineList', 'LineDetail', 'AddProductView',
'add_voucher', 'shipping_methods')
Basket = get_model('basket', 'Basket')
Line = get_model('basket', 'Line')
Repository = get_class('shipping.repository', 'Repository')
class BasketView(APIView):
"""
Api for retrieving a user's basket.
GET:
Retrieve your basket.
"""
def get(self, request, format=None):
basket = get_basket(request)
ser = serializers.BasketSerializer(basket,
context={'request': request})
return Response(ser.data)
class AddProductView(APIView):
"""
Add a certain quantity of a product to the basket.
POST(url, quantity)
{
"url": "http://testserver.org/oscarapi/products/209/",
"quantity": 6
}
If you've got some options to configure for the product to add to the
basket, you should pass the optional ``options`` property:
{
"url": "http://testserver.org/oscarapi/products/209/",
"quantity": 6,
"options": [{
"option": "http://testserver.org/oscarapi/options/1/",
"value": "some value"
}]
}
"""
def validate(self, basket, product, quantity):
availability = basket.strategy.fetch_for_product(
product).availability
# check if product is available at all
if not availability.is_available_to_buy:
return False, availability.message
# check if we can buy this quantity
allowed, message = availability.is_purchase_permitted(quantity)
if not allowed:
return False, message
# check if there is a limit on amount
allowed, message = basket.is_quantity_allowed(quantity)
if not allowed:
return False, quantity
def post(self, request, format=None):
p_ser = serializers.AddProductSerializer(
data=request.DATA, context={'request': request})
if p_ser.is_valid():
basket = get_basket(request)
product = p_ser.object['url']
quantity = p_ser.init_data.get('quantity')
options = p_ser.object.get('options', [])
basket_valid, message = self.validate(basket, product, quantity)
if not basket_valid:
return Response(
{'reason': message},
status=status.HTTP_406_NOT_ACCEPTABLE)
basket.add_product(product, quantity=quantity, options=options)
apply_offers(request, basket)
ser = serializers.BasketSerializer(
basket, context={'request': request})
return Response(ser.data)
return Response(
{'reason': p_ser.errors}, status=status.HTTP_406_NOT_ACCEPTABLE)
@api_view(('POST',))
def add_voucher(request, format=None):
"""
Add a voucher to the basket.
POST(vouchercode)
{
"vouchercode": "kjadjhgadjgh7667"
}
Will return 200 and the voucher as json if succesful.
If unsuccessful, will return 406 with the error.
"""
v_ser = serializers.VoucherAddSerializer(data=request.DATA,
context={'request': request})
if v_ser.is_valid():
basket = get_basket(request)
voucher = v_ser.object
basket.vouchers.add(voucher)
signals.voucher_addition.send(
sender=None, basket=basket, voucher=voucher)
# Recalculate discounts to see if the voucher gives any
apply_offers(request, basket)
discounts_after = basket.offer_applications
# Look for discounts from this new voucher
for discount in discounts_after:
if discount['voucher'] and discount['voucher'] == voucher:
break
else:
basket.vouchers.remove(voucher)
return Response(
{'reason': _(
"Your basket does not qualify for a voucher discount")},
status=status.HTTP_406_NOT_ACCEPTABLE)
ser = serializers.VoucherSerializer(
voucher, context={'request': request})
return Response(ser.data)
return Response(v_ser.errors, status=status.HTTP_406_NOT_ACCEPTABLE)
@api_view(('GET',))
def shipping_methods(request, format=None):
"""
Get the available shipping methods and their cost for this order.
GET:
A list of shipping method details and the prices.
"""
basket = get_basket(request)
shiping_methods = Repository().get_shipping_methods(
basket=request.basket, user=request.user,
request=request)
ser = serializers.ShippingMethodSerializer(
shiping_methods, many=True, context={'basket': basket})
return Response(ser.data)
class LineList(BasketPermissionMixin, generics.ListCreateAPIView):
"""
Api for adding lines to a basket.
Permission will be checked,
Regular users may only access their own basket,
staff users may access any basket.
GET:
A list of basket lines.
POST(basket, line_reference, product, stockrecord,
quantity, price_currency, price_excl_tax, price_incl_tax):
Add a line to the basket, example::
{
"basket": "http://127.0.0.1:8000/oscarapi/baskets/100/",
"line_reference": "234_345",
"product": "http://127.0.0.1:8000/oscarapi/products/209/",
"stockrecord":
"http://127.0.0.1:8000/oscarapi/stockrecords/100/",
"quantity": 3,
"price_currency": "EUR",
"price_excl_tax": "100.0",
"price_incl_tax": "121.0"
}
"""
queryset = Line.objects.all()
serializer_class = serializers.LineSerializer
def get(self, request, pk=None, format=None):
if pk is not None:
self.check_basket_permission(request, pk)
self.queryset = self.queryset.filter(basket__id=pk)
elif not request.user.is_staff:
self.permission_denied(request)
return super(LineList, self).get(request, format)
def post(self, request, pk=None, format=None):
data_basket = self.get_data_basket(request.DATA, format)
self.check_basket_permission(request, basket=data_basket)
if pk is not None:
url_basket = self.check_basket_permission(request, basket_pk=pk)
if url_basket != data_basket:
raise exceptions.NotAcceptable(
_('Target basket inconsistent %s != %s') % (
url_basket.pk, data_basket.pk
)
)
elif not request.user.is_staff:
self.permission_denied(request)
return super(LineList, self).post(request, format=format)
class LineDetail(PutIsPatchMixin, generics.RetrieveUpdateDestroyAPIView):
queryset = Line.objects.all()
serializer_class = serializers.LineSerializer
permission_classes = (permissions.IsAdminUserOrRequestContainsLine,)
|
Python
| 0.999999
|
@@ -1757,16 +1757,17 @@
%7D%0A
+
%22%22%22%0A
@@ -2393,16 +2393,42 @@
quantity
+%0A return True, None
%0A%0A de
@@ -3229,17 +3229,16 @@
basket,
-
context
|
9d65b613384b1d4781efd65588639ad68261e8d7
|
Remove unused import.
|
cryptography/hazmat/primitives/hmac.py
|
cryptography/hazmat/primitives/hmac.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import binascii
import six
from cryptography.hazmat.primitives import interfaces
@interfaces.register(interfaces.HashContext)
class HMAC(object):
def __init__(self, key, algorithm, ctx=None, backend=None):
super(HMAC, self).__init__()
self.algorithm = algorithm
if backend is None:
from cryptography.hazmat.bindings import _default_backend
backend = _default_backend
self._backend = backend
self._key = key
if ctx is None:
self._ctx = self._backend.hmacs.create_ctx(key, self.algorithm)
else:
self._ctx = ctx
def update(self, msg):
if isinstance(msg, six.text_type):
raise TypeError("Unicode-objects must be encoded before hashing")
self._backend.hmacs.update_ctx(self._ctx, msg)
def copy(self):
return self.__class__(self._key, self.algorithm, backend=self._backend,
ctx=self._backend.hmacs.copy_ctx(self._ctx))
def finalize(self):
return self._backend.hmacs.finalize_ctx(self._ctx,
self.algorithm.digest_size)
|
Python
| 0
|
@@ -609,25 +609,8 @@
on%0A%0A
-import binascii%0A%0A
impo
|
1d7c70de13c70ce44cd6338b43b2ddc4aa348ac9
|
Support requests for versioned metadata
|
website/addons/osfstorage/views.py
|
website/addons/osfstorage/views.py
|
from __future__ import unicode_literals
import httplib
import logging
from modularodm import Q
from modularodm.storage.base import KeyExistsException
from flask import request
from framework.auth import Auth
from framework.exceptions import HTTPError
from framework.auth.decorators import must_be_signed
from website.models import User
from website.project.decorators import (
must_not_be_registration, must_have_addon,
)
from website.util import rubeus
from website.project.model import has_anonymous_link
from website.files import models
from website.addons.osfstorage import utils
from website.addons.osfstorage import decorators
from website.addons.osfstorage import settings as osf_storage_settings
logger = logging.getLogger(__name__)
def osf_storage_root(node_settings, auth, **kwargs):
"""Build HGrid JSON for root node. Note: include node URLs for client-side
URL creation for uploaded files.
"""
node = node_settings.owner
root = rubeus.build_addon_root(
node_settings=node_settings,
name='',
permissions=auth,
user=auth.user,
nodeUrl=node.url,
nodeApiUrl=node.api_url,
)
return [root]
def make_error(code, message_short=None, message_long=None):
data = {}
if message_short:
data['message_short'] = message_short
if message_long:
data['message_long'] = message_long
return HTTPError(code, data=data)
@must_be_signed
@must_have_addon('osfstorage', 'node')
def osfstorage_update_metadata(node_addon, payload, **kwargs):
try:
version_id = payload['version']
metadata = payload['metadata']
except KeyError:
raise HTTPError(httplib.BAD_REQUEST)
version = models.FileVersion.load(version_id)
if version is None:
raise HTTPError(httplib.NOT_FOUND)
version.update_metadata(metadata)
return {'status': 'success'}
@must_be_signed
@decorators.autoload_filenode(must_be='file')
def osfstorage_get_revisions(file_node, node_addon, payload, **kwargs):
is_anon = has_anonymous_link(node_addon.owner, Auth(private_key=request.args.get('view_only')))
# Return revisions in descending order
return {
'revisions': [
utils.serialize_revision(node_addon.owner, file_node, version, index=len(file_node.versions) - idx - 1, anon=is_anon)
for idx, version in enumerate(reversed(file_node.versions))
]
}
@decorators.waterbutler_opt_hook
def osfstorage_copy_hook(source, destination, name=None, **kwargs):
return source.copy_under(destination, name=name).serialize(), httplib.CREATED
@decorators.waterbutler_opt_hook
def osfstorage_move_hook(source, destination, name=None, **kwargs):
return source.move_under(destination, name=name).serialize(), httplib.OK
@must_be_signed
@decorators.autoload_filenode(default_root=True)
def osfstorage_get_lineage(file_node, node_addon, **kwargs):
#TODO Profile
list(models.OsfStorageFolder.find(Q('node', 'eq', node_addon.owner)))
lineage = []
while file_node:
lineage.append(file_node.serialize())
file_node = file_node.parent
return {'data': lineage}
@must_be_signed
@decorators.autoload_filenode(default_root=True)
def osfstorage_get_metadata(file_node, **kwargs):
return file_node.serialize(include_full=True)
@must_be_signed
@decorators.autoload_filenode(must_be='folder')
def osfstorage_get_children(file_node, **kwargs):
return [
child.serialize()
for child in file_node.children
]
@must_be_signed
@must_not_be_registration
@decorators.autoload_filenode(must_be='folder')
def osfstorage_create_child(file_node, payload, node_addon, **kwargs):
parent = file_node # Just for clarity
name = payload.get('name')
user = User.load(payload.get('user'))
is_folder = payload.get('kind') == 'folder'
if not (name or user) or '/' in name:
raise HTTPError(httplib.BAD_REQUEST)
try:
if is_folder:
created, file_node = True, parent.append_folder(name)
else:
created, file_node = True, parent.append_file(name)
except KeyExistsException:
created, file_node = False, parent.find_child_by_name(name, kind=int(not is_folder))
if not created and is_folder:
raise HTTPError(httplib.CONFLICT, data={
'message': 'Cannot create folder "{name}" because a file or folder already exists at path "{path}"'.format(
name=file_node.name,
path=file_node.materialized_path,
)
})
if not is_folder:
try:
version = file_node.create_version(
user,
dict(payload['settings'], **dict(
payload['worker'], **{
'object': payload['metadata']['name'],
'service': payload['metadata']['provider'],
})
),
dict(payload['metadata'], **payload['hashes'])
)
version_id = version._id
archive_exists = version.archive is not None
except KeyError:
raise HTTPError(httplib.BAD_REQUEST)
else:
version_id = None
archive_exists = False
return {
'status': 'success',
'archive': not archive_exists, # Should waterbutler also archive this file
'data': file_node.serialize(),
'version': version_id,
}, httplib.CREATED if created else httplib.OK
@must_be_signed
@must_not_be_registration
@decorators.autoload_filenode()
def osfstorage_delete(file_node, payload, node_addon, **kwargs):
auth = Auth(User.load(payload['user']))
#TODO Auth check?
if not auth:
raise HTTPError(httplib.BAD_REQUEST)
if file_node == node_addon.get_root():
raise HTTPError(httplib.BAD_REQUEST)
file_node.delete()
return {'status': 'success'}
@must_be_signed
@decorators.autoload_filenode(must_be='file')
def osfstorage_download(file_node, payload, node_addon, **kwargs):
try:
version_id = int(request.args.get('version') or 0) - 1
except ValueError:
raise make_error(httplib.BAD_REQUEST, 'Version must be an int or not specified')
version = file_node.get_version(version_id, required=True)
if request.args.get('mode') not in ('render', ):
if version_id < 0:
version_id = len(file_node.versions) + version_id
utils.update_analytics(node_addon.owner, file_node._id, version_id)
return {
'data': {
'name': file_node.name,
'path': version.location_hash,
},
'settings': {
osf_storage_settings.WATERBUTLER_RESOURCE: version.location[osf_storage_settings.WATERBUTLER_RESOURCE],
},
}
|
Python
| 0
|
@@ -3273,32 +3273,247 @@
ode, **kwargs):%0A
+ try:%0A # TODO This should change to version as its internal it can be changed anytime%0A version = int(request.args.get('revision'))%0A except ValueError: # If its not a number%0A version = -1%0A
return file_
@@ -3527,16 +3527,33 @@
rialize(
+version=version,
include_
|
229c427a935112975f9f3928d669fa94b34c47d5
|
Fix stupid bug
|
stash-space/python/store_space_usage.py
|
stash-space/python/store_space_usage.py
|
#!/usr/bin/env python
# Copyright 2015 University of Chicago
import sys
import argparse
import logging
import pytz
import os
import pwd
import grp
import datetime
import xattr
import elasticsearch
import elasticsearch.helpers
ES_NODES = ['uct2-es-head.mwt2.org:9200', 'uct2-es-door.mwt2.org:9200']
TZ_NAME = "US/Central"
TIMEZONE = pytz.timezone(TZ_NAME)
def get_es_client():
""" Instantiate DB client and pass connection back """
return elasticsearch.Elasticsearch(hosts=ES_NODES,
retry_on_timeout=True,
max_retries=10,
timeout=300)
def create_record(dirpath, num_files, date, dir_size=None, index=None):
"""
Query directory at dirpath for information and store information in ES
:param dirpath: string storing path to file
:parm num_files: number of files present in dirpath
:param date: datetime.date with date to use
:param dir_size: size of directory and contents in bytes
:param index: ES index for records
:return: dictionary with record if successful, empty dict otherwise
"""
if not os.path.isdir(dirpath):
return {}
if not index:
index = "stash-space-{0}-{1:0>2}".format(date.year, date.month)
dir_info = os.stat(dirpath)
uid = dir_info.st_uid
gid = dir_info.st_gid
ctime = datetime.datetime.fromtimestamp(dir_info.st_ctime, TIMEZONE)
mtime = datetime.datetime.fromtimestamp(dir_info.st_mtime, TIMEZONE)
try:
group = grp.getgrgid(gid).gr_name
except KeyError:
group = gid
try:
user = pwd.getpwuid(uid).pw_name
except KeyError:
user = uid
record_fields = {'@timestamp': date.isoformat(),
'size': dir_size,
'num_files': num_files,
'ctime': ctime.isoformat(),
'mtime': mtime.isoformat(),
'user': user,
'group': group,
'path': dirpath}
record = {'_index': index,
'_source': record_fields,
'_op_type': 'index',
'_type': 'space_usage_record'}
return record
def get_ceph_dir_size(root):
"""
Get the space used by a directory and it's contents on a CephFS
:param root: path to directory
:return: number of bytes used by the directory and it's contents
"""
return os.stat(root, follow_symlinks=False).st_size
def get_dir_size(root, inodes):
"""
Get the size of a directory and it's contents
:param root: path to the directory
:param inodes: a set with list of inodes that have been visited
:return: number of bytes used by the directory and it's contents
"""
total_size = 0
entries = os.listdir(root)
for entry in entries:
entry_name = os.path.join(root, entry)
try:
entry_stat = os.stat(entry_name)
except OSError:
continue
if entry_stat.st_ino in inodes:
continue
if os.path.isfile(entry_name) and not os.path.islink(entry_name):
try:
total_size += entry_stat.st_size
inodes.add(entry_stat.st_ino)
except OSError:
continue
total_size += os.stat(root).st_size
return total_size
def get_top_level_info(dirpath):
"""
Temporary function to just get top level info on user and project directories
:param dirpath: path to stash installation
:return: a list with dictionaries containing user/project information
"""
directories = []
if not os.path.isdir(dirpath):
return directories
for entry in os.listdir(os.path.join(dirpath, 'user')):
dir_info = {}
full_path = os.path.join(dirpath, 'user', entry)
if not os.path.isdir(full_path):
continue
dir_info['name'] = full_path
num_files = xattr.getxattr(full_path, 'ceph.dir.rfiles').strip()
# xattr occasionally returns an empty entry
if num_files == '':
dir_info['files'] = 0
dir_info['size'] = 0
directories.append(dir_info)
dir_info['files'] = num_files[:-1]
dir_info['size'] = int(xattr.getxattr(full_path,
'ceph.dir.rbytes')[:-1])
directories.append(dir_info)
for entry in os.listdir(os.path.join(dirpath, 'project/')):
full_path = os.path.join(dirpath, 'project', entry)
if not os.path.isdir(full_path):
continue
dir_info['name'] = full_path
num_files = xattr.getxattr(full_path, 'ceph.dir.rfiles').strip()
# xattr occasionally returns an empty entry
if num_files == '':
dir_info['files'] = 0
dir_info['size'] = 0
directories.append(dir_info)
dir_info['files'] = num_files[:-1]
dir_info['size'] = int(xattr.getxattr(full_path,
'ceph.dir.rbytes')[:-1])
directories.append(dir_info)
return directories
def traverse_directory(dirpath, index=None, ceph_fs=False):
"""
Traverse subdirectories and create a set of records
with space usage
:param dirpath: path to directory to
:param index: the ES index to store records in
:param ceph_fs: Whether the dirpath is a CephFS directory
:return: Nothing
"""
current_date = TIMEZONE.localize(datetime.datetime.combine(datetime.date.today(),
datetime.time(0, 0, 0)))
if not os.path.isdir(dirpath):
return
records = []
if ceph_fs:
# for ceph directories just get top-level user and project information
# for now, this will get removed to get more detailed information on the new
# Ceph filesystem
directories = get_top_level_info(dirpath)
for dir in directories:
record = create_record(dir['name'],
dir['files'],
current_date,
dir['size'],
index)
if record:
records.append(record)
save_records(records)
return
if ceph_fs:
topdown = True
else:
topdown = False
for root, dirs, files in os.walk(dirpath, topdown=topdown):
if not ceph_fs:
inodes = set()
size = get_dir_size(root, inodes)
else:
size = get_ceph_dir_size(root)
record = create_record(root, len(files), current_date, size, index)
if record:
records.append(record)
save_records(records)
def save_records(records=None):
"""
Save a job record to ES
"""
es_client = get_es_client()
elasticsearch.helpers.bulk(es_client,
records,
stats_only=False)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Get space usage on a given '
'filesystem and store in ES')
parser.add_argument('--index', dest='index', default=None,
help='ES index to store records in')
parser.add_argument('--is-ceph', dest='ceph_fs', default=False,
action='store_true',
help='Is the directory a CephFS directory?')
parser.add_argument("directory", default=None,
help="Directory to examine")
args = parser.parse_args(sys.argv[1:])
if not os.path.isdir(args.directory):
sys.stderr.write("{0} must be a directory\n".format(args.directory))
sys.exit(1)
traverse_directory(args.directory, args.index, args.ceph_fs)
|
Python
| 0.000016
|
@@ -4187,32 +4187,53 @@
ppend(dir_info)%0A
+ continue%0A
dir_info
@@ -4900,32 +4900,53 @@
ppend(dir_info)%0A
+ continue%0A
dir_info
|
38a2d86aed4ea1e94691993c5f49722f9a69ac8d
|
Remove Python < 3.6 version check
|
lisa/__init__.py
|
lisa/__init__.py
|
#! /usr/bin/env python3
import warnings
import os
import sys
from lisa.version import __version__
# Raise an exception when a deprecated API is used from within a lisa.*
# submodule. This ensures that we don't use any deprecated APIs internally, so
# they are only kept for external backward compatibility purposes.
warnings.filterwarnings(
action='error',
category=DeprecationWarning,
module=r'{}\..*'.format(__name__),
)
# When the deprecated APIs are used from __main__ (script or notebook), always
# show the warning
warnings.filterwarnings(
action='always',
category=DeprecationWarning,
module=r'__main__',
)
# Prevent matplotlib from trying to connect to X11 server, for headless testing.
# Must be done before importing matplotlib.pyplot or pylab
try:
import matplotlib
except ImportError:
pass
else:
if not os.getenv('DISPLAY'):
matplotlib.use('Agg')
if sys.version_info < (3, 6):
warnings.warn(
'Python 3.6 will soon be required to run LISA, please upgrade from {} to any version higher than 3.6'.format(
'.'.join(
map(str, tuple(sys.version_info)[:3])
),
),
DeprecationWarning,
)
# vim :set tabstop=4 shiftwidth=4 textwidth=80 expandtab
|
Python
| 0
|
@@ -907,312 +907,8 @@
')%0A%0A
-if sys.version_info %3C (3, 6):%0A warnings.warn(%0A 'Python 3.6 will soon be required to run LISA, please upgrade from %7B%7D to any version higher than 3.6'.format(%0A '.'.join(%0A map(str, tuple(sys.version_info)%5B:3%5D)%0A ),%0A ),%0A DeprecationWarning,%0A )%0A%0A
# vi
|
13298bd49d9c72b8db6650fb0f8b316998b302f0
|
Add permissions on TodoList model.
|
safarido/todos/models.py
|
safarido/todos/models.py
|
from django.conf import settings
from django.db import models
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
class TimestampedModel(models.Model):
created_on = models.DateTimeField(_('created on'), auto_now_add=True)
modified_on = models.DateTimeField(_('modified on'), auto_now=True)
class Meta:
abstract = True
class TodoList(TimestampedModel):
# owner = models.ForeignKey(
# settings.AUTH_USER_MODEL
# )
# users = models.ManyToManyField(
# settings.AUTH_USER_MODEL,
# verbose_name=_('users'),
# related_name=_('todo_lists'),
# )
title = models.CharField(
_('title'),
max_length=50,
)
slug = models.SlugField()
parent = models.ForeignKey(
'self',
blank=True,
null=True,
related_name='child',
)
def get_ancestors(self):
ancestors = []
if self.parent:
parent = self.parent
while parent:
ancestors.append(parent)
parent = parent.parent
ancestors.reverse()
ancestors = ancestors + [self, ]
return ancestors
def save(self, **kwargs):
# Generate a slug if there is not one
if self.title and not self.slug:
self.slug = slugify(self.name)
super(TodoList, self).save(**kwargs)
def __unicode__(self):
return self.title
class Meta:
ordering = ('title', )
class Todo(TimestampedModel):
list = models.ForeignKey(
TodoList,
related_name='todos',
)
title = models.CharField(
_('title'),
max_length=200)
description = models.TextField(
_('description'),
)
assigned_to = models.ManyToManyField(
settings.AUTH_USER_MODEL,
verbose_name=_('users'),
related_name='todos',
)
due_date = models.DateField(
verbose_name=_('due date'),
blank=True,
null=True,
)
is_done = models.DateField(
verbose_name=_('is done'),
blank=True,
null=True,
)
class Meta:
ordering = ('modified_on', 'created_on', )
|
Python
| 0
|
@@ -429,241 +429,8 @@
l):%0A
-# owner = models.ForeignKey(%0A# settings.AUTH_USER_MODEL%0A# )%0A# users = models.ManyToManyField(%0A# settings.AUTH_USER_MODEL,%0A# verbose_name=_('users'),%0A# related_name=_('todo_lists'),%0A# )%0A
@@ -1122,19 +1122,20 @@
fy(self.
-nam
+titl
e)%0A%0A
@@ -1277,16 +1277,138 @@
tle', )%0A
+ permissions = (%0A ('is_owner', 'Is Owner'),%0A ('view_todo_list', 'View Todo List'),%0A )%0A
%0A%0Aclass
|
73e69f86cbeb6bd883f0b66314b7c0b8caa349bb
|
Make euca-describe-instances show instance tags
|
euca2ools/utils.py
|
euca2ools/utils.py
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Neil Soman neil@eucalyptus.com
# Mitch Garnaat mgarnaat@eucalyptus.com
import os.path
import subprocess
import sys
from euca2ools import exceptions, __version__, __codename__
def check_prerequisite_command(command):
cmd = [command]
try:
output = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
except OSError, e:
error_string = '%s' % e
if 'No such' in error_string:
print >> sys.stderr, 'Command %s not found. Is it installed?' % command
raise exceptions.NotFoundError
else:
raise OSError(e)
def parse_config(config, dict, keylist):
fmt = ''
str = ''
for v in keylist:
str = '%s "${%s}" ' % (str, v)
fmt = fmt + '%s%s' % ('%s', '\\0')
cmd = ['bash', '-ec', ". '%s' >/dev/null; printf '%s' %s"
% (config, fmt, str)]
handle = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = handle.communicate()
if handle.returncode != 0:
raise exceptions.ParseError('Parsing config file %s failed:\n\t%s'
% (config, stderr))
values = stdout.split("\0")
for i in range(len(values) - 1):
if values[i] != '':
dict[keylist[i]] = values[i]
def print_instances(instances, nil=""):
# I was not able to correctly identify fields with an 'xx' below the
# descriptions at
# http://docs.amazonwebservices.com/AWSEC2/latest/CommandLineReference/ApiReference-cmd-DescribeInstances.html
# were not sufficiently detailed, even when coupled with some limited
# experimentation
#
# Additionally, in order to get 'hypervisor', the api request version
# in the make_ec2_connection method would need to be increased.
members=( "id", "image_id", "public_dns_name", "private_dns_name",
"state", "key_name", "ami_launch_index", "product_codes",
"instance_type", "launch_time", "placement", "kernel",
"ramdisk", "xx", "_monitoring", 'ip_address', 'private_ip_address',
"vpc_id", "subnet_id", "root_device_type", "xx", "xx", "xx", "xx",
"virtualizationType", "hypervisor", "xx", "_groupnames", "_groupids" )
for instance in instances:
# in old describe-instances, there was a check for 'if instance:'
# I (smoser) have carried this over, but dont know how instance
# could be false
if not instance: continue
items=[ ]
for member in members:
# boto's "monitoring" item is blank string
if member == "_monitoring":
if instance.monitored:
val = "monitoring-enabled"
else:
val = "monitoring-disabled"
elif member == "_groupids":
val = [x.name for x in instance.groups]
elif member == "_groupnames":
val = [x.id for x in instance.groups]
else:
val = getattr(instance,member,nil)
# product_codes is a list
if val is None: val = nil
if hasattr(val,'__iter__'):
val = ','.join(val)
items.append(val)
print "INSTANCE\t%s" % '\t'.join(items)
def print_version_if_necessary():
"""
If '--version' appears in sys.argv then print the version and exit
successfully.
This is a hackish workaround for a roboto limitation in boto 2.1.1.
"""
if '--version' in sys.argv:
print 'euca2ools %s (%s)' % (__version__, __codename__)
if os.path.isfile('/etc/eucalyptus/eucalyptus-version'):
with open('/etc/eucalyptus/eucalyptus-version') as version_file:
print 'eucalyptus %s' % version_file.readline().strip()
sys.exit()
|
Python
| 0
|
@@ -4725,16 +4725,252 @@
n(items)
+%0A if hasattr(instance, 'tags') and isinstance(instance.tags, dict):%0A for tag in instance.tags:%0A print '%5Ct'.join(('TAG', 'instance', instance.id, tag,%0A instance.tags%5Btag%5D))
%0A%0Adef pr
|
b16a1987075bd72cb7d31cf3dc7e529ce8d0e102
|
fix loop
|
wisely_project/get_courses_file.py
|
wisely_project/get_courses_file.py
|
import sys
import os
import traceback
from django import db
sys.path.append('/root/wisely/wisely_project/')
os.environ['DJANGO_SETTINGS_MODULE'] = 'wisely_project.settings.production'
from django.db.models import F
from django.utils import timezone
from users.tasks import get_coursera_courses, get_edx_courses, get_udemy_courses
__author__ = 'tmehta'
from users.models import CourseraProfile, EdxProfile, UdemyProfile
while True:
try:
for connection in db.connections.all():
if len(connection.queries) > 100:
db.reset_queries()
for user in CourseraProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.username != '' and user.incorrect_login == False:
print user.username
print "Start coursera"
get_coursera_courses(user)
user.last_updated = timezone.now()
user.save()
for user in EdxProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.email != '' and user.incorrect_login == False:
print user.email
print "Start edx"
get_edx_courses(user)
user.last_updated = timezone.now()
user.save()
for user in UdemyProfile.objects.filter(last_updated__lt=F('user__last_login')):
if user.email != '' and user.incorrect_login == False:
print user.email
print "Start udemy"
get_udemy_courses(user)
user.last_updated = timezone.now()
user.save()
except Exception as e:
print traceback.format_exc()
|
Python
| 0.000002
|
@@ -210,17 +210,20 @@
import F
+, Q
%0A
-
from dja
@@ -665,54 +665,54 @@
n'))
-:%0A if user.username != '' and user.
+.filter(~Q(email='')).filter(%0A
inco
@@ -718,39 +718,33 @@
orrect_login
- ==
+=
False
+)
:%0A
-
@@ -767,36 +767,32 @@
ame%0A
-
-
print %22Start cou
@@ -798,20 +798,16 @@
ursera%22%0A
-
@@ -841,36 +841,32 @@
er)%0A
-
-
user.last_update
@@ -876,36 +876,32 @@
timezone.now()%0A
-
user
@@ -997,51 +997,54 @@
n'))
-:%0A if user.email != '' and user.
+.filter(~Q(email='')).filter(%0A
inco
@@ -1050,33 +1050,31 @@
orrect_login
- ==
+=
False
+)
:%0A
@@ -1067,36 +1067,32 @@
e):%0A
-
-
print user.email
@@ -1084,36 +1084,32 @@
rint user.email%0A
-
prin
@@ -1130,28 +1130,24 @@
-
-
get_edx_cour
@@ -1148,36 +1148,32 @@
x_courses(user)%0A
-
user
@@ -1195,36 +1195,32 @@
timezone.now()%0A
-
user
@@ -1318,51 +1318,54 @@
n'))
-:%0A if user.email != '' and user.
+.filter(~Q(email='')).filter(%0A
inco
@@ -1379,23 +1379,17 @@
ogin
- ==
+=
False
+)
:%0A
-
@@ -1417,36 +1417,32 @@
ail%0A
-
-
print %22Start ude
@@ -1445,20 +1445,16 @@
udemy%22%0A
-
@@ -1485,36 +1485,32 @@
er)%0A
-
-
user.last_update
@@ -1520,36 +1520,32 @@
timezone.now()%0A
-
user
|
f5f6b93d34318a1d6f65082df8482376e92960c9
|
Revamp XML parsing method
|
wm_metrics/analyse_commons_dump.py
|
wm_metrics/analyse_commons_dump.py
|
# -*- coding: utf-8 -*-
import xml.dom.minidom
import re
import datetime
def handle_node(node, tag_name):
"""Return the contents of a tag based on his given name inside of a given node."""
element = node.getElementsByTagName(tag_name)
if element.length > 0:
if element.item(0).hasChildNodes():
return element.item(0).childNodes.item(0).data.rstrip()
return ""
def timestamp_to_date(date):
"""Return a datetime object representing the given MediaWiki timestamp."""
return datetime.datetime(int(date[0:4]),
int(date[5:7]),
int(date[8:10]),
int(date[11:13]),
int(date[14:16]),
int(date[17:19]),
)
def parse_xml_dump(xml_dump):
"""Return a list of the edits in a Wikimedia Commons dump."""
edits = []
doc = xml.dom.minidom.parse(xml_dump)
for mediawiki_node in doc.childNodes:
if mediawiki_node.localName == u'mediawiki':
for page_node in mediawiki_node.childNodes:
for revision_node in page_node.childNodes:
title = handle_node(page_node, u'title')
if revision_node.localName == u'revision':
username = handle_node(revision_node, u'username')
timestamp = handle_node(revision_node, u'timestamp')
edits.append((username, timestamp_to_date(timestamp), title))
return edits
def get_categories_from_text(edit):
"""Return the categories contained in a given wikitext."""
cat_pattern = r"\[\[Category:(?P<cat>.+?)(\|.*?)?\]\]"
return map(lambda x: x[0], re.findall(cat_pattern, edit))
def main():
pass
if __name__ == "__main__":
main()
|
Python
| 0.998547
|
@@ -862,48 +862,33 @@
n a
-list of the edits in a Wikimedia Commons
+dictionary from the given
dum
@@ -901,18 +901,23 @@
-edits = %5B%5D
+collection = %7B%7D
%0A
@@ -1126,50 +1126,102 @@
-for revision_node in page_node.childNodes:
+if page_node.localName == u'page':%0A page_id = handle_node(page_node, u'id')
%0A
@@ -1237,16 +1237,21 @@
+page_
title =
@@ -1307,50 +1307,304 @@
-if revision_node.localName == u'revision':
+revisions = %5B%5D%0A revision_nodes = %5Bnode for node in page_node.childNodes%0A if node.localName == u'revision'%5D%0A for revision_node in revision_nodes:%0A timestamp = handle_node(revision_node, u'timestamp')
%0A
@@ -1695,33 +1695,77 @@
-timestamp
+if not username:%0A username
= handle_no
@@ -1780,32 +1780,25 @@
ion_node, u'
-timestam
+i
p')%0A
@@ -1817,32 +1817,20 @@
-edits.append((username,
+revision = (
time
@@ -1859,32 +1859,193 @@
p),
-title))%0A return edits
+handle_node(revision_node, u'text'), username)%0A revisions.append(revision)%0A collection%5Bpage_id%5D = (page_title, revisions)%0A return collection
%0A%0A%0Ad
|
a2c484afc3951a77a6684f9c7323672c6db691aa
|
Fix name of celery queue
|
genomic_neuralnet/common/celery_slave.py
|
genomic_neuralnet/common/celery_slave.py
|
from __future__ import print_function
import os
import sys
import time
import numpy as np
import redis
import pickle
from itertools import chain
from genomic_neuralnet.common.base_compare import try_predictor
from genomic_neuralnet.util.ec2_util import get_master_dns
from celery import Celery
import celery.app.control as ctrl
name = 'parallel_predictors'
_host = get_master_dns(public=True)
backend = 'redis://{}/0'.format(_host)
broker = 'redis://{}/0'.format(_host)
app = Celery(name, backend=backend, broker=broker)
celery_try_predictor = app.task(try_predictor)
os.environ['BROKER_TRANSPORT_OPTIONS'] = "{'visibility_timeout': 900}"
_cache_dir = os.path.expanduser('~/work_cache')
if not os.path.isdir(_cache_dir):
os.makedirs(_cache_dir)
def disk_cache(result, id_num):
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
with open(file_path, 'wb') as f:
pickle.dump(result, f)
def load_and_clear_cache(id_nums):
for id_num in id_nums:
file_path = os.path.join(_cache_dir, '{}_out.pkl'.format(id_num))
os.unlink(file_path)
def get_num_workers():
stats_dict = ctrl.Control(app).inspect().stats()
if stats_dict is None:
return 0
else:
num_workers = 0
for instance, stats in stats_dict.iteritems():
num_workers += stats['pool']['max-concurrency']
return num_workers
def get_queue_length():
conn = redis.StrictRedis(_host)
return conn.llen('parallel_predictors')
def main():
# Start the worker.
app.worker_main(['--loglevel=DEBUG'])
if __name__ == '__main__':
main()
|
Python
| 0.999924
|
@@ -1463,35 +1463,22 @@
n.llen('
-parallel_predictors
+celery
')%0A%0Adef
|
e8003dbb1e6a7efe60b02c65207c7202236b1adb
|
Update InputDialogCtrl.py
|
helenae/gui/widgets/InputDialogCtrl.py
|
helenae/gui/widgets/InputDialogCtrl.py
|
# -*- coding: utf-8 -*-
import wx
from validators.FileValidator import FileValidator
class InputDialog(wx.Dialog):
def __init__(self, parent, id, title, ico_folder, validator):
wx.Dialog.__init__(self, parent, id, title, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.label = wx.StaticText(self, label="Имя каталога:", pos=(15, 20))
self.field = wx.TextCtrl(self, value="", size=(150, 20), pos=(105, 15), validator=validator)
self.button_ok = wx.Button(self, label="Ок", id=wx.ID_OK, pos=(75, 45))
self.button_cancel = wx.Button(self, label="Отмена", id=wx.ID_CANCEL, pos=(167, 45))
self.Bind(wx.EVT_BUTTON, self.onOK, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.onCancel, id=wx.ID_CANCEL)
self.icon = wx.Icon(ico_folder + '/icons/app.ico', wx.BITMAP_TYPE_ICO)
self.SetIcon(self.icon)
size = (275, 80)
self.SetSize(size)
self.result = None
def onOK(self, event):
if self.field.GetValidator().Validate(self.field):
self.result = self.field.GetValue()
self.Destroy()
def onCancel(self, event):
self.result = None
self.Destroy()
if __name__ =='__main__':
app = wx.App(0)
ico_folder = '..'
frame = InputDialog(None, -1, 'Ввод данных', ico_folder, FileValidator())
frame.Show()
app.MainLoop()
|
Python
| 0.000001
|
@@ -31,59 +31,8 @@
wx%0A
-from validators.FileValidator import FileValidator%0A
%0A%0Acl
@@ -283,15 +283,15 @@
%D0%98%D0%BC%D1%8F
-%D0%BA%D0%B0%D1%82%D0%B0%D0%BB%D0%BE%D0%B3
+%D1%8D%D0%BB%D0%B5%D0%BC%D0%B5%D0%BD%D1%82
%D0%B0:%22,
@@ -1161,16 +1161,71 @@
ain__':%0A
+ from validators.FileValidator import FileValidator%0A
app
|
484f845b50d1308dbf8b7f2496a1d565724f9e23
|
Rename jwt_token var as auth_token on authentication submodule #48
|
rest_framework_auth0/authentication.py
|
rest_framework_auth0/authentication.py
|
import base64
import jwt
from django.contrib.auth.backends import (
RemoteUserBackend,
get_user_model,
)
from django.contrib.auth.models import (
Group,
)
from django.utils.encoding import force_str
from django.utils.translation import ugettext as _
from rest_framework import exceptions
from rest_framework_auth0.settings import (
auth0_api_settings,
)
from rest_framework_auth0.utils import (
get_groups_from_payload,
)
from rest_framework.authentication import (
BaseAuthentication,
get_authorization_header
)
jwt_get_username_from_payload = auth0_api_settings.JWT_PAYLOAD_GET_USERNAME_HANDLER
class Auth0JSONWebTokenAuthentication(BaseAuthentication, RemoteUserBackend):
"""
Clients should authenticate by passing the token key in the "Authorization"
HTTP header, prepended with the string specified in the setting
`JWT_AUTH_HEADER_PREFIX`. For example:
Authorization: JWT eyJhbGciOiAiSFMyNTYiLCAidHlwIj
By default, the ``authenticate_credentials`` method creates ``User`` objects for
usernames that don't already exist in the database. Subclasses can disable
this behavior by setting the ``create_unknown_user`` attribute to
``False``.
"""
www_authenticate_realm = 'api'
# Create a User object if not already in the database?
create_unknown_user = True
def authenticate(self, request):
"""
You should pass a header of your request: clientcode: web
This function initialize the settings of JWT with the specific client's informations.
"""
client_code = request.META.get(
"HTTP_" + auth0_api_settings.CLIENT_CODE.upper()
) or 'default'
if client_code in auth0_api_settings.CLIENTS:
client = auth0_api_settings.CLIENTS[client_code]
else:
msg = _('Invalid Client Code.')
raise exceptions.AuthenticationFailed(msg)
# Code copied from rest_framework_jwt/authentication.py#L28
jwt_value = self.get_auth_token(request)
if jwt_value is None:
return None
try:
# RS256 Related configurations
if(client['AUTH0_ALGORITHM'].upper() == "RS256"):
payload = jwt.decode(
jwt_value,
client['PUBLIC_KEY'],
audience=client['AUTH0_AUDIENCE'],
algorithm=client['AUTH0_ALGORITHM'],
)
elif(client['AUTH0_ALGORITHM'].upper() == "HS256"):
client_secret = None
if client['CLIENT_SECRET_BASE64_ENCODED']:
client_secret = base64.b64decode(
client['AUTH0_CLIENT_SECRET'].replace("_", "/").replace("-", "+")
)
else:
client_secret = client['AUTH0_CLIENT_SECRET']
payload = jwt.decode(
jwt_value,
client_secret,
audience=auth0_api_settings.get('AUTH0_AUDIENCE'),
algorithm=client['AUTH0_ALGORITHM'],
)
else:
msg = _('Error decoding signature.')
raise exceptions.AuthenticationFailed(msg)
except jwt.ExpiredSignature:
msg = _('Signature has expired.')
raise exceptions.AuthenticationFailed(msg)
except jwt.DecodeError:
msg = _('Error decoding signature.')
raise exceptions.AuthenticationFailed(msg)
except jwt.InvalidTokenError:
raise exceptions.AuthenticationFailed()
# Add request param to authenticated_credentials() call
user = self.authenticate_credentials(request, payload)
return (user, payload)
def authenticate_credentials(self, request, payload):
"""
Returns an active user that matches the payload's user id and email.
"""
UserModel = get_user_model()
remote_user = jwt_get_username_from_payload(payload)
if not remote_user:
msg = _('Invalid payload.')
raise exceptions.AuthenticationFailed(msg)
# RemoteUserBackend behavior:
# return
user = None
if auth0_api_settings.REPLACE_PIPE_FOR_DOTS_IN_USERNAME:
username = self.clean_username(remote_user)
else:
username = remote_user
if self.create_unknown_user:
user, created = UserModel._default_manager.get_or_create(**{
UserModel.USERNAME_FIELD: username
})
if created:
user = self.configure_user(request, user)
else:
try:
user = UserModel._default_manager.get_by_natural_key(username)
except UserModel.DoesNotExist:
msg = _('Invalid signature.')
raise exceptions.AuthenticationFailed(msg)
# RemoteUserBackend behavior:
# pass
user = self.configure_user_permissions(user, payload)
return user if self.user_can_authenticate(user) else None
def authenticate_header(self, request):
"""
Return a string to be used as the value of the `WWW-Authenticate`
header in a `401 Unauthenticated` response, or `None` if the
authentication scheme should return `403 Permission Denied` responses.
"""
return '{0} realm="{1}"'.format(
auth0_api_settings.JWT_AUTH_HEADER_PREFIX,
self.www_authenticate_realm
)
def configure_user_permissions(self, user, payload):
"""
Validate if AUTHORIZATION_EXTENSION is enabled, defaults to False
If AUTHORIZATION_EXTENSION is enabled, created and associated groups
with the current user (the user of the token).
"""
if auth0_api_settings.AUTHORIZATION_EXTENSION:
user.groups.clear()
try:
groups = get_groups_from_payload(payload)
except Exception: # No groups where defined in Auth0?
return user
for user_group in groups:
group, created = Group.objects.get_or_create(name=user_group)
user.groups.add(group)
return user
def clean_username(self, username):
"""
Cleans the "username" prior to using it to get or create the user object.
Returns the cleaned username.
Auth0 default username (user_id) field returns, e.g. auth0|123456789...xyz
which contains illegal characters ('|').
"""
username = username.replace('|', '.')
return username
def get_auth_token(self, request):
auth = get_authorization_header(request).split()
auth_header_prefix = force_str(auth[0])
auth_token = force_str(auth[1])
expected_auth_header_prefix = auth0_api_settings.JWT_AUTH_HEADER_PREFIX
# If authorization header doesn't exists, use a cookie
if not auth:
if auth0_api_settings.JWT_AUTH_COOKIE:
return request.COOKIES.get(auth0_api_settings.JWT_AUTH_COOKIE)
return None
# If header prefix is diferent than expected, the user won't log in
if auth_header_prefix.lower() != expected_auth_header_prefix.lower():
return None
if len(auth) == 1:
msg = _('Invalid Authorization header. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = _('Invalid Authorization header. Credentials string '
'should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return auth_token
|
Python
| 0.000001
|
@@ -1995,25 +1995,26 @@
-jwt_value
+auth_token
= self.
@@ -2049,25 +2049,26 @@
if
-jwt_value
+auth_token
is None
@@ -2266,33 +2266,34 @@
-jwt_value
+auth_token
,%0A
@@ -2942,17 +2942,18 @@
-jwt_value
+auth_token
,%0A
|
a705892cd7e32a540c5fee61a2bf4c4d67abf477
|
add get_all_required_node_names method
|
xos/tosca/resources/xosresource.py
|
xos/tosca/resources/xosresource.py
|
class XOSResource(object):
xos_base_class = "XOSResource"
xos_model = None
provides = None
def __init__(self, user, nodetemplate):
self.dirty = False
self.user = user
self.nodetemplate = nodetemplate
def get_requirements(self, relationship_name, throw_exception=False):
""" helper to search the list of requirements for a particular relationship
type.
"""
results = []
for reqs in self.nodetemplate.requirements:
for (k,v) in reqs.items():
if (v["relationship"] == relationship_name):
results.append(v["node"])
if (not results) and throw_exception:
raise Exception("Failed to find requirement in %s using relationship %s" % (self.nodetemplate.name, relationship_name))
return results
def get_requirement(self, relationship_name, throw_exception=False):
reqs = self.get_requirements(relationship_name, throw_exception)
if not reqs:
return None
return reqs[0]
def get_xos_object(self, cls, **kwargs):
objs = cls.objects.filter(**kwargs)
if not objs:
raise Exception("Failed to find %s filtered by %s" % (cls.__name__, str(kwargs)))
return objs[0]
def get_existing_objs(self):
return self.xos_model.objects.filter(name = self.nodetemplate.name)
def get_xos_args(self):
return {}
def create_or_update(self):
existing_objs = self.get_existing_objs()
if existing_objs:
self.info("%s %s already exists" % (self.xos_model.__name__, self.nodetemplate.name))
self.update(existing_objs[0])
else:
self.create()
def create(self):
raise Exception("abstract method -- must override")
def update(self, obj):
pass
def info(self, s):
print s
|
Python
| 0.000003
|
@@ -235,16 +235,237 @@
mplate%0A%0A
+ def get_all_required_node_names(self):%0A results = %5B%5D%0A for reqs in self.nodetemplate.requirements:%0A for (k,v) in reqs.items():%0A results.append(v%5B%22node%22%5D)%0A return results%0A%0A
def
|
34facd924f34edc3e99af3687429b88d5b1cc43d
|
Rename method
|
packaging/build_tools.py
|
packaging/build_tools.py
|
import json
from subprocess import check_output, CalledProcessError
import subprocess
import logging
import shutil
import os
import tox.config
import tox.session
import virtualenv_tools
import delorean
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARK_CYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
def heading(x):
logging.info(BOLD + '========== ' + x + ' ==========' + END)
def error(x):
logging.error(RED + x + END)
def info(x):
logging.info(x)
def success(x):
logging.info(GREEN + x + END)
class Python(object):
def __init__(self, path):
self.path = path
def exists(self):
return os.path.exists(self.path)
def command(self, args=None):
if args is None:
args = []
return [self.path] + args
def run(self, args, env=None, cwd=None):
if env is None:
env = {}
env['PYTHONUNBUFFERED'] = '1'
command = self.command(args)
return run_command(command, env=env, cwd=cwd)
@classmethod
def clone(cls, path):
return cls(path)
class Virtualenv(object):
def __init__(self, path, python=None):
if python is None:
python = get_python()
self.path = path
self.python = python
venv_python_path = os.path.join(self.path, 'bin/python')
self.venv_python = python.clone(venv_python_path)
def create(self):
info('Creating virtualenv at %s ...' % self.path)
args = [
'-m', 'virtualenv',
'--quiet',
self.path
]
self.python.run(args)
def run(self, args, env=None, cwd=None):
return self.venv_python.run(args, env=env, cwd=cwd)
def update_paths(self, new_path):
virtualenv_tools.update_paths(self.path, new_path)
def install_package(self, package_name, env=None):
self.run(['-m', 'pip', 'install', package_name], env=env)
def install_requirements(self, path, env=None):
head, tail = os.path.split(path)
self.run(['-m', 'pip', 'install', '-r', tail], env=env, cwd=head)
def delete(self):
info('Deleting virtualenv at %s ...' % self.path)
shutil.rmtree(self.path)
def __enter__(self):
self.create()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.delete()
def run_command(args, env=None, cwd=None):
if cwd is None:
cwd = os.getcwd()
if env is None:
env = {}
info('Running {args} with environment {env} and working directory {cwd}'.format(
args=args,
env=env,
cwd=cwd,
))
p = subprocess.Popen(args, cwd=cwd, env=env, bufsize=1)
p.communicate()
if p.returncode != 0:
error('Command exited with code %d' % p.returncode)
raise SystemExit(1)
def get_python():
return Python('/usr/bin/python2.7')
def run_tox(args=None):
if args is None:
args = []
config = tox.config.parseconfig(args)
return_code = tox.session.Session(config).runcommand()
if return_code != 0:
raise SystemExit(1)
class Package(object):
def __init__(self, name, version, architecture, url):
self.name = name
self.version = version
self.architecture = architecture
self.before_install_script = None
self.url = url
self.dependencies = []
self.paths = []
self.config_files = []
def add_dependency(self, package_name):
self.dependencies.append(package_name)
def add_path(self, src, dst):
self.paths.append('%s=%s' % (src, dst))
def add_config_file(self, path):
self.config_files.append(path)
def build(self):
rpm_path = '%s-%s.%s.rpm' % (self.name, self.version, self.architecture)
args = [
'fpm',
'-s', 'dir',
'-t', 'rpm',
'--package', rpm_path,
'--name', self.name,
'--version', self.version,
'--url', self.url,
'--architecture', self.architecture,
'--epoch', '0',
'--force',
]
for package_name in self.dependencies:
args.extend(['--depends', package_name])
for path in self.config_files:
args.extend(['--config-files', path])
if self.before_install_script is not None:
args.extend(['--before-install', self.before_install_script])
for path in self.paths:
args.append(path)
run_command(args, env={'PATH': '/usr/local/bin:/usr/bin:/bin'})
return rpm_path
def get_version_from_package_json(path):
try:
package_data = open(path, 'rb').read()
except IOError:
return None
package = json.loads(package_data)
return package.get('version', None)
def get_api_src_path(root_path):
return os.path.join(root_path, 'api')
def get_radar_src_path(root_path):
return os.path.join(root_path, 'radar')
def get_client_src_path(root_path):
return os.path.join(root_path, 'client')
def get_commit_date():
output = check_output(['git', 'log', '-n', '1', '--format=%cd', '--date=iso-strict'])
return delorean.parse(output).datetime
|
Python
| 0.000002
|
@@ -5158,16 +5158,20 @@
def get_
+git_
commit_d
|
bd8219763cbccd8ffde79a200585ff65128a7f22
|
add defaults.update function
|
salt/modules/defaults.py
|
salt/modules/defaults.py
|
# -*- coding: utf-8 -*-
'''
Module to work with salt formula defaults files
'''
from __future__ import absolute_import
import copy
import json
import logging
import os
import yaml
import salt.fileclient
import salt.utils.data
import salt.utils.dictupdate as dictupdate
import salt.utils.files
import salt.utils.url
__virtualname__ = 'defaults'
log = logging.getLogger(__name__)
def _mk_client():
'''
Create a file client and add it to the context
'''
if 'cp.fileclient' not in __context__:
__context__['cp.fileclient'] = \
salt.fileclient.get_file_client(__opts__)
def _load(formula):
'''
Generates a list of salt://<formula>/defaults.(json|yaml) files
and fetches them from the Salt master.
Returns first defaults file as python dict.
'''
# Compute possibilities
_mk_client()
paths = []
for ext in ('yaml', 'json'):
source_url = salt.utils.url.create(formula + '/defaults.' + ext)
paths.append(source_url)
# Fetch files from master
defaults_files = __context__['cp.fileclient'].cache_files(paths)
for file_ in defaults_files:
if not file_:
# Skip empty string returned by cp.fileclient.cache_files.
continue
suffix = file_.rsplit('.', 1)[-1]
if suffix == 'yaml':
loader = yaml
elif suffix == 'json':
loader = json
else:
log.debug("Failed to determine loader for %r", file_)
continue
if os.path.exists(file_):
log.debug("Reading defaults from %r", file_)
with salt.utils.files.fopen(file_) as fhr:
defaults = loader.load(fhr)
log.debug("Read defaults %r", defaults)
return defaults or {}
def get(key, default=''):
'''
defaults.get is used much like pillar.get except that it will read
a default value for a pillar from defaults.json or defaults.yaml
files that are stored in the root of a salt formula.
CLI Example:
.. code-block:: bash
salt '*' defaults.get core:users:root
The defaults is computed from pillar key. The first entry is considered as
the formula namespace.
For example, querying ``core:users:root`` will try to load
``salt://core/defaults.yaml`` and ``salt://core/defaults.json``.
'''
# Determine formula namespace from query
if ':' in key:
namespace, key = key.split(':', 1)
else:
namespace, key = key, None
# Fetch and load defaults formula files from states.
defaults = _load(namespace)
# Fetch value
if key:
return salt.utils.data.traverse_dict_and_list(defaults, key, default)
else:
return defaults
def merge(dest, src, merge_lists=False, in_place=True):
'''
defaults.merge
Allows deep merging of dicts in formulas.
merge_lists : False
If True, it will also merge lists instead of replace their items.
in_place : True
If True, it will merge into dest dict,
if not it will make a new copy from that dict and return it.
CLI Example:
.. code-block:: bash
salt '*' default.merge a=b d=e
It is more typical to use this in a templating language in formulas,
instead of directly on the command-line.
'''
if in_place:
merged = dest
else:
merged = copy.deepcopy(dest)
return dictupdate.update(merged, src, merge_lists=merge_lists)
def deepcopy(source):
'''
defaults.deepcopy
Allows deep copy of objects in formulas.
By default, Python does not copy objects,
it creates bindings between a target and an object.
It is more typical to use this in a templating language in formulas,
instead of directly on the command-line.
'''
return copy.deepcopy(source)
|
Python
| 0.000003
|
@@ -3856,12 +3856,1616 @@
opy(source)%0A
+%0A%0Adef update(dest, defaults, merge_lists=True, in_place=True):%0A '''%0A defaults.update%0A Allows to set defaults for group of data set e.g. group for nodes.%0A%0A This function is a combination of defaults.merge%0A and defaults.deepcopy to avoid redundant in jinja.%0A%0A Example:%0A .. code-block:: yaml%0A%0A group01:%0A defaults:%0A enabled: True%0A extra:%0A - test%0A - stage%0A nodes:%0A host01:%0A index: foo%0A upstream: bar%0A host02:%0A index: foo2%0A upstream: bar2%0A%0A .. code-block::%0A %7B%25 do salt%5B'defaults.update'%5D(group01.nodes, group01.defaults) %25%7D%0A%0A Each node will look like the following:%0A .. code-block:: yaml%0A host01:%0A enabled: True%0A index: foo%0A upstream: bar%0A extra:%0A - test%0A - stage%0A%0A merge_lists : True%0A If True, it will also merge lists instead of replace their items.%0A%0A in_place : True%0A If True, it will merge into dest dict.%0A if not it will make a new copy from that dict and return it.%0A%0A It is more typical to use this in a templating language in formulas,%0A instead of directly on the command-line.%0A '''%0A%0A if in_place:%0A nodes = dest%0A else:%0A nodes = deepcopy(dest)%0A%0A for node_name, node_vars in nodes.items():%0A defaults_vars = deepcopy(defaults)%0A node_vars = merge(defaults_vars, node_vars, merge_lists=merge_lists)%0A nodes%5Bnode_name%5D = node_vars%0A%0A return nodes%0A
|
586faacdf9a80ab07cc9b8c717838e79540f1f3b
|
msgpack content-encoding must be 'binary' not 'utf-8'
|
kombu/serialization.py
|
kombu/serialization.py
|
"""
kombu.serialization
===================
Serialization utilities.
:copyright: (c) 2009 - 2010
:license: BSD, see LICENSE for more details.
"""
import codecs
class SerializerNotInstalled(StandardError):
"""Support for the requested serialization type is not installed"""
pass
class SerializerRegistry(object):
"""The registry keeps track of serialization methods."""
def __init__(self):
self._encoders = {}
self._decoders = {}
self._default_encode = None
self._default_content_type = None
self._default_content_encoding = None
def register(self, name, encoder, decoder, content_type,
content_encoding='utf-8'):
if encoder:
self._encoders[name] = (content_type, content_encoding, encoder)
if decoder:
self._decoders[content_type] = decoder
def _set_default_serializer(self, name):
"""
Set the default serialization method used by this library.
:param name: The name of the registered serialization method.
For example, `json` (default), `pickle`, `yaml`, `msgpack`,
or any custom methods registered using :meth:`register`.
:raises SerializerNotInstalled: If the serialization method
requested is not available.
"""
try:
(self._default_content_type, self._default_content_encoding,
self._default_encode) = self._encoders[name]
except KeyError:
raise SerializerNotInstalled(
"No encoder installed for %s" % name)
def encode(self, data, serializer=None):
if serializer == "raw":
return raw_encode(data)
if serializer and not self._encoders.get(serializer):
raise SerializerNotInstalled(
"No encoder installed for %s" % serializer)
# If a raw string was sent, assume binary encoding
# (it's likely either ASCII or a raw binary file, but 'binary'
# charset will encompass both, even if not ideal.
if not serializer and isinstance(data, str):
# In Python 3+, this would be "bytes"; allow binary data to be
# sent as a message without getting encoder errors
return "application/data", "binary", data
# For unicode objects, force it into a string
if not serializer and isinstance(data, unicode):
payload = data.encode("utf-8")
return "text/plain", "utf-8", payload
if serializer:
content_type, content_encoding, encoder = \
self._encoders[serializer]
else:
encoder = self._default_encode
content_type = self._default_content_type
content_encoding = self._default_content_encoding
payload = encoder(data)
return content_type, content_encoding, payload
def decode(self, data, content_type, content_encoding):
content_type = content_type or 'application/data'
content_encoding = (content_encoding or 'utf-8').lower()
# Don't decode 8-bit strings or unicode objects
if content_encoding not in ('binary', 'ascii-8bit') and \
not isinstance(data, unicode):
data = codecs.decode(data, content_encoding)
try:
decoder = self._decoders[content_type]
except KeyError:
return data
return decoder(data)
"""
.. data:: registry
Global registry of serializers/deserializers.
"""
registry = SerializerRegistry()
"""
.. function:: encode(data, serializer=default_serializer)
Serialize a data structure into a string suitable for sending
as an AMQP message body.
:param data: The message data to send. Can be a list,
dictionary or a string.
:keyword serializer: An optional string representing
the serialization method you want the data marshalled
into. (For example, `json`, `raw`, or `pickle`).
If :const:`None` (default), then json will be used, unless
`data` is a :class:`str` or :class:`unicode` object. In this
latter case, no serialization occurs as it would be
unnecessary.
Note that if `serializer` is specified, then that
serialization method will be used even if a :class:`str`
or :class:`unicode` object is passed in.
:returns: A three-item tuple containing the content type
(e.g., `application/json`), content encoding, (e.g.,
`utf-8`) and a string containing the serialized
data.
:raises SerializerNotInstalled: If the serialization method
requested is not available.
"""
encode = registry.encode
"""
.. function:: decode(data, content_type, content_encoding):
Deserialize a data stream as serialized using `encode`
based on `content_type`.
:param data: The message data to deserialize.
:param content_type: The content-type of the data.
(e.g., `application/json`).
:param content_encoding: The content-encoding of the data.
(e.g., `utf-8`, `binary`, or `us-ascii`).
:returns: The unserialized data.
"""
decode = registry.decode
"""
.. function:: register(name, encoder, decoder, content_type,
content_encoding="utf-8"):
Register a new encoder/decoder.
:param name: A convenience name for the serialization method.
:param encoder: A method that will be passed a python data structure
and should return a string representing the serialized data.
If :const:`None`, then only a decoder will be registered. Encoding
will not be possible.
:param decoder: A method that will be passed a string representing
serialized data and should return a python data structure.
If :const:`None`, then only an encoder will be registered.
Decoding will not be possible.
:param content_type: The mime-type describing the serialized
structure.
:param content_encoding: The content encoding (character set) that
the `decoder` method will be returning. Will usually be
utf-8`, `us-ascii`, or `binary`.
"""
register = registry.register
def raw_encode(data):
"""Special case serializer."""
content_type = 'application/data'
payload = data
if isinstance(payload, unicode):
content_encoding = 'utf-8'
payload = payload.encode(content_encoding)
else:
content_encoding = 'binary'
return content_type, content_encoding, payload
def register_json():
"""Register a encoder/decoder for JSON serialization."""
from anyjson import serialize as json_serialize
from anyjson import deserialize as json_deserialize
registry.register('json', json_serialize, json_deserialize,
content_type='application/json',
content_encoding='utf-8')
def register_yaml():
"""Register a encoder/decoder for YAML serialization.
It is slower than JSON, but allows for more data types
to be serialized. Useful if you need to send data such as dates"""
try:
import yaml
registry.register('yaml', yaml.safe_dump, yaml.safe_load,
content_type='application/x-yaml',
content_encoding='utf-8')
except ImportError:
def not_available(*args, **kwargs):
"""In case a client receives a yaml message, but yaml
isn't installed."""
raise SerializerNotInstalled(
"No decoder installed for YAML. Install the PyYAML library")
registry.register('yaml', None, not_available, 'application/x-yaml')
def register_pickle():
"""The fastest serialization method, but restricts
you to python clients."""
import cPickle
registry.register('pickle', cPickle.dumps, cPickle.loads,
content_type='application/x-python-serialize',
content_encoding='binary')
def register_msgpack():
"""See http://msgpack.sourceforge.net/"""
try:
import msgpack
registry.register('msgpack', msgpack.packs, msgpack.unpacks,
content_type='application/x-msgpack',
content_encoding='utf-8')
except ImportError:
def not_available(*args, **kwargs):
"""In case a client receives a msgpack message, but yaml
isn't installed."""
raise SerializerNotInstalled(
"No decoder installed for msgpack. "
"Install the msgpack library")
registry.register('msgpack', None, not_available,
'application/x-msgpack')
# Register the base serialization methods.
register_json()
register_pickle()
register_yaml()
register_msgpack()
# JSON is assumed to always be available, so is the default.
# (this matches the historical use of kombu.)
registry._set_default_serializer('json')
|
Python
| 0.998591
|
@@ -8244,37 +8244,38 @@
ntent_encoding='
-utf-8
+binary
')%0A except Im
|
52fe3b7055bac981ce79a30fd59ec67d064da819
|
mark test to run on cpu only
|
hoomd/hpmc/pytest/test_remove_drift.py
|
hoomd/hpmc/pytest/test_remove_drift.py
|
# Copyright (c) 2009-2021 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
"""Test hoomd.hpmc.update.RemoveDrift."""
import hoomd
from hoomd.conftest import operation_pickling_check
import pytest
import hoomd.hpmc.pytest.conftest
# note: The parameterized tests validate parameters so we can't pass in values
# here that require preprocessing
valid_constructor_args = [
dict(trigger=hoomd.trigger.Periodic(10),
ref_positions=[(0, 0, 0), (1, 0, 1)]),
dict(trigger=hoomd.trigger.After(10), ref_positions=[(0, 0, 0), (1, 0, 1)]),
dict(trigger=hoomd.trigger.Before(10), ref_positions=[(0, 0, 0), (1, 0, 1)])
]
valid_attrs = [('trigger', hoomd.trigger.Periodic(10000)),
('trigger', hoomd.trigger.After(100)),
('trigger', hoomd.trigger.Before(12345)),
('ref_positions', [(0, 0, 0), (1, 0, 1)])]
@pytest.mark.serial
@pytest.mark.parametrize("constructor_args", valid_constructor_args)
def test_valid_construction(device, constructor_args):
"""Test that RemoveDrift can be constructed with valid arguments."""
cl = hoomd.hpmc.update.RemoveDrift(**constructor_args)
# validate the params were set properly
for attr, value in constructor_args.items():
assert getattr(cl, attr) == value
@pytest.mark.serial
@pytest.mark.parametrize("constructor_args", valid_constructor_args)
def test_valid_construction_and_attach(device, simulation_factory,
two_particle_snapshot_factory,
constructor_args, valid_args):
"""Test that RemoveDrift can be attached with valid arguments."""
integrator = valid_args[0]
args = valid_args[1]
# Need to unpack union integrators
if isinstance(integrator, tuple):
inner_integrator = integrator[0]
integrator = integrator[1]
inner_mc = inner_integrator()
for i in range(len(args["shapes"])):
# This will fill in default values for the inner shape objects
inner_mc.shape["A"] = args["shapes"][i]
args["shapes"][i] = inner_mc.shape["A"]
mc = integrator()
mc.shape["A"] = args
mc.shape["B"] = args
cl = hoomd.hpmc.update.RemoveDrift(**constructor_args)
dim = 2 if 'polygon' in integrator.__name__.lower() else 3
sim = simulation_factory(
two_particle_snapshot_factory(particle_types=['A', 'B'],
dimensions=dim,
d=2,
L=50))
sim.operations.updaters.append(cl)
sim.operations.integrator = mc
sim.run(0)
# validate the params were set properly
for attr, value in constructor_args.items():
assert getattr(cl, attr) == value
@pytest.mark.serial
@pytest.mark.parametrize("attr,value", valid_attrs)
def test_valid_setattr(device, attr, value):
"""Test that RemoveDrift can get and set attributes."""
cl = hoomd.hpmc.update.RemoveDrift(trigger=hoomd.trigger.Periodic(10),
ref_positions=[(0, 0, 1), (-1, 0, 1)])
setattr(cl, attr, value)
assert getattr(cl, attr) == value
@pytest.mark.serial
@pytest.mark.parametrize("attr,value", valid_attrs)
def test_valid_setattr_attached(device, attr, value, simulation_factory,
two_particle_snapshot_factory, valid_args):
"""Test that RemoveDrift can get and set attributes while attached."""
integrator = valid_args[0]
args = valid_args[1]
# Need to unpack union integrators
if isinstance(integrator, tuple):
inner_integrator = integrator[0]
integrator = integrator[1]
inner_mc = inner_integrator()
for i in range(len(args["shapes"])):
# This will fill in default values for the inner shape objects
inner_mc.shape["A"] = args["shapes"][i]
args["shapes"][i] = inner_mc.shape["A"]
mc = integrator()
mc.shape["A"] = args
mc.shape["B"] = args
cl = hoomd.hpmc.update.RemoveDrift(trigger=hoomd.trigger.Periodic(10),
ref_positions=[(0, 0, 1), (-1, 0, 1)])
dim = 2 if 'polygon' in integrator.__name__.lower() else 3
sim = simulation_factory(
two_particle_snapshot_factory(particle_types=['A', 'B'],
dimensions=dim,
d=2,
L=50))
sim.operations.updaters.append(cl)
sim.operations.integrator = mc
sim.run(0)
setattr(cl, attr, value)
assert getattr(cl, attr) == value
@pytest.mark.serial
def test_pickling(simulation_factory, two_particle_snapshot_factory):
"""Test that RemoveDrift objects are picklable."""
sim = simulation_factory(two_particle_snapshot_factory())
mc = hoomd.hpmc.integrate.Sphere(d=0.1, a=0.1)
mc.shape['A'] = dict(diameter=1.1)
mc.shape['B'] = dict(diameter=1.3)
sim.operations.integrator = mc
cl = hoomd.hpmc.update.RemoveDrift(trigger=hoomd.trigger.Periodic(5),
ref_positions=[(0, 0, 1), (-1, 0, 1)])
operation_pickling_check(cl, sim)
|
Python
| 0
|
@@ -949,32 +949,49 @@
est.mark.serial%0A
+@pytest.mark.cpu%0A
@pytest.mark.par
@@ -1035,32 +1035,32 @@
nstructor_args)%0A
-
def test_valid_c
@@ -1380,32 +1380,49 @@
est.mark.serial%0A
+@pytest.mark.cpu%0A
@pytest.mark.par
@@ -2894,32 +2894,49 @@
est.mark.serial%0A
+@pytest.mark.cpu%0A
@pytest.mark.par
@@ -3311,32 +3311,49 @@
est.mark.serial%0A
+@pytest.mark.cpu%0A
@pytest.mark.par
@@ -4761,32 +4761,32 @@
ttr) == value%0A%0A%0A
-
@pytest.mark.ser
@@ -4789,16 +4789,33 @@
.serial%0A
+@pytest.mark.cpu%0A
def test
|
97f9f6570c8eda39f5cb824f46ca923c6291bbbe
|
create directories for logging
|
loggingServer.py
|
loggingServer.py
|
import json
import os, datetime
from flask import Flask, jsonify, render_template, request
app = Flask(__name__, template_folder='./')
class logserver:
firefile = ""
potfile = ""
missfile = ""
repfile = ""
confile = ""
conwfile = ""
def logFiring(self, log):
self.writeLog(self.firefile, log)
return "success"
def logPot(self, log):
self.writeLog(self.potfile, log)
return "success"
def logMiss(self, log):
self.writeLog(self.missfile, log)
return "success"
def logReplenish(self, log):
self.writeLog(self.repfile, log)
return "success"
def logConnection(self, log):
self.writeLog(self.confile, log)
return "success"
def logConW(self, log):
self.writeLog(self.conwfile, log)
return "success"
def writeLog(self, filename, data):
logfile = open(filename, 'a')
for entry in data:
logfile.write(entry)
del data[:]
logfile.close()
def createNewLogFiles(self):
logpath = os.path.dirname(os.getcwd())
logpath = os.path.normpath(os.path.join(logpath, 'BrainPowerLogs'))
conpath = os.path.normpath(os.path.join(logpath, 'Connections'))
conwpath = os.path.normpath(os.path.join(logpath, 'ConWeights'))
firepath = os.path.normpath(os.path.join(logpath, 'Firing'))
potpath = os.path.normpath(os.path.join(logpath, 'Potential'))
misspath = os.path.normpath(os.path.join(logpath, 'MissEnergy'))
reppath = os.path.normpath(os.path.join(logpath, 'ReplenishEnergy'))
timestampstr = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
self.firefile = os.path.join(firepath, timestampstr)
self.potfile = os.path.join(potpath, timestampstr)
self.missfile = os.path.join(misspath, timestampstr)
self.repfile = os.path.join(reppath, timestampstr)
self.confile = os.path.join(conpath, timestampstr)
self.conwfile = os.path.join(conwpath, timestampstr)
lfile = open(self.firefile, 'w')
lfile.close()
lfile = open(self.potfile, 'w')
lfile.close()
lfile = open(self.missfile, 'w')
lfile.close()
lfile = open(self.repfile, 'w')
lfile.close()
lfile = open(self.confile, 'w')
lfile.close()
lfile = open(self.conwfile, 'w')
lfile.close()
def main(self):
app.run(port=8080, debug=True)
logserv = logserver()
@app.route("/")
def index():
return render_template('./index.html')
@app.route("/purplebrain.html")
def brain():
return render_template('./purplebrain.html')
@app.route('/firing', methods=['POST'])
def logFiring():
log = request.get_json()
logserv.logFiring(log)
return "success"
@app.route('/potential', methods=['POST'])
def logPotential():
log = request.get_json()
logserv.logPot(log)
return "success"
@app.route('/miss', methods=['POST'])
def logMiss():
log = request.get_json()
logserv.logMiss(log)
return "success"
@app.route('/replenish', methods=['POST'])
def logRep():
log = request.get_json()
logserv.logReplenish(log)
return "success"
@app.route('/connection', methods=['POST'])
def logConnections():
log = request.get_json()
logserv.logConnection(log)
return "success"
@app.route('/conweights', methods=['POST'])
def logWeights():
log = request.get_json()
logserv.logConW(log)
return "success"
@app.route('/createLogs', methods=['POST'])
def createLogFiles():
logserv.createNewLogFiles()
return "success"
if __name__ == '__main__':
logserv.main()
|
Python
| 0.000001
|
@@ -1431,16 +1431,141 @@
nergy'))
+%0A%0A%09%09for dir in %5Bconpath, conwpath, firepath, potpath, misspath, reppath%5D:%0A%09%09%09if not os.path.exists(dir):%0A%09%09%09%09os.makedirs(dir)
%0A%09%0A%09%09tim
|
baee05c39e1d9424348d5634825788e001a01fea
|
Update generator
|
CarND-Behavioral-Cloning-P3/model.py
|
CarND-Behavioral-Cloning-P3/model.py
|
import csv
import cv2
import numpy as np
lines = []
# Open the csv log file and read the file name of the figure
with open('./data/driving_log.csv') as csvfile:
reader = csv.reader(csvfile)
for line in reader:
lines.append(line)
images = []
measurements = []
for line in lines:
for i in range(3):
source_path = line[i] # read the middle, left, right images
token = source_path.split('/')
filename = token[-1]
local_path = './data/IMG/' + filename # in order to run the code on AWS
image = cv2.imread(local_path)
images.append(image)
correction = 0.2
measurement = float(line[3])
measurements.append(measurement)
measurements.append(measurement+correction)
measurements.append(measurement-correction)
print(len(measurements))
augmented_images = []
augmented_measurements = []
# flip the images to generate more images
for image, measurement in zip(images, measurements):
augmented_images.append(image)
augmented_measurements.append(measurement)
flipped_image = cv2.flip(image, 1)
flipped_measurement = measurement * -1.0
augmented_images.append(flipped_image)
augmented_measurements.append(flipped_measurement)
print(len(augmented_measurements))
X_train = np.array(augmented_images)
y_train = np.array(augmented_measurements)
print(X_train.shape)
print(X_train[1].shape)
import keras
from keras.models import Sequential
from keras.layers import Flatten, Dense, Lambda
from keras.layers.convolutional import Convolution2D, Cropping2D
from keras.layers.pooling import MaxPooling2D
# Nvidia End to End Self-driving Car CNN
model = Sequential()
model.add(Lambda(lambda x: x/255.0 - 0.5, input_shape = (160, 320, 3)))
model.add(Cropping2D(cropping=((50,20),(0,0))))
model.add(Convolution2D(24,5,5, subsample=(2,2), activation='relu'))
model.add(Convolution2D(36,5,5, subsample=(2,2), activation='relu'))
model.add(Convolution2D(48,5,5, subsample=(2,2), activation='relu'))
model.add(Convolution2D(64,3,3, activation='relu'))
model.add(Convolution2D(64,3,3, activation='relu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Dense(50))
model.add(Dense(10))
model.add(Dense(1))
print('model ready')
model.compile(optimizer='adam', loss = 'mse')
model.fit(X_train, y_train, validation_split=0.2, shuffle=True, batch_size=128, nb_epoch = 5)
model.save('model.h5')
print('Model Saved!')
|
Python
| 0.000001
|
@@ -2261,146 +2261,511 @@
')%0A%0A
-model.compile(optimizer='adam', loss = 'mse')%0Amodel.fit(X_train, y_train, validation_split=0.2, shuffle=True, batch_size=128, nb_epoch = 5
+batch_size = 128%0Anb_epoch = 5%0A%0A# Save model weights after each epoch%0Acheckpointer = ModelCheckpoint(filepath=%22./tmp/v2-weights.%7Bepoch:02d%7D-%7Bval_loss:.2f%7D.hdf5%22, verbose=1, save_best_only=False)%0A%0A# Train model using generator%0Amodel.fit_generator(train_generator, %0A samples_per_epoch=len(train_samples), %0A validation_data=validation_generator,%0A nb_val_samples=len(validation_samples), nb_epoch=nb_epoch,%0A callbacks=%5Bcheckpointer%5D
)%0A%0Am
|
daa3762ed79e363841a2bb56dbd6a75e7a703dae
|
Fix too long lines in soc.models.org_app module.
|
app/soc/models/org_app.py
|
app/soc/models/org_app.py
|
#!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains the Organization Application Model."""
__authors__ = [
'"Todd Larsen" <tlarsen@google.com>',
]
from google.appengine.ext import db
from django.utils.translation import ugettext
from soc.models import licenses
import soc.models.document
import soc.models.group_app
import soc.models.user
class OrgApplication(soc.models.group_app.GroupApplication):
"""Specialized questions for the Organization application.
These questions are in addition to those in the GroupApplication Model.
Eventually, this will be replaced with a Question/Answer/Quiz/Response
approach. At that time, existing OrgApplication entities will be migrated
(converted) to their new representations in the Datastore.
"""
prior_participation = db.TextProperty(required=False, verbose_name=ugettext(
'Has your group participated previously?'
' If so, please summarize your involvement and any past successes'
' and failures.'))
prior_application = db.TextProperty(required=False, verbose_name=ugettext(
'If your group has not previously participated, have you applied in'
' the past? If so, for what sort of participation?'))
license_name = db.StringProperty(required=True, choices=licenses.LICENSES,
verbose_name=ugettext('What license does your organization use?'))
license_name.example_text=ugettext('See '
'<a href="http://www.opensource.org/licenses/alphabetical"> the official list</a>.')
ideas = db.LinkProperty(required=True, verbose_name=ugettext(
'What is the URL to the ideas list of your organization?'))
ideas.help_text = ugettext('For instance a link to a Melange public '
'document or some other URL')
dev_mailing_list = db.StringProperty(required=True, verbose_name=ugettext(
'What is the main development mailing list for your group?'))
dev_mailing_list.help_text = ugettext(
'Mailing list email address, URL to sign-up page, etc. If a mailing '
'list is not used please specify another method of communication used '
'within the group.')
contrib_template = db.TextProperty(required=False, verbose_name=ugettext(
'What is the application template you would like contributors'
' to your organization to use.'))
contrib_template.help_text = ugettext(
'This template can be used by contributors, such as students'
' and other non-member participants, when they apply to contribute'
' to the organization.')
contrib_disappears = db.TextProperty(required=True, verbose_name=ugettext(
'What is your plan for dealing with disappearing contributors?'))
contrib_disappears.help_text = ugettext(
'Contributors include students and other non-member participants.')
member_disappears = db.TextProperty(required=True, verbose_name=ugettext(
'What is your plan for dealing with disappearing members?'))
member_disappears.help_text = ugettext(
'Members include mentors, administrators, and the like.')
encourage_contribs = db.TextProperty(required=True, verbose_name=ugettext(
'What steps will you take to encourage contributors to interact with'
' your community before, during, and after the program?'))
encourage_contribs.help_text = contrib_disappears.help_text
continued_contribs = db.TextProperty(required=True, verbose_name=ugettext(
'What will you do to ensure that your accepted contributors stick'
' with the project after the program concludes?'))
continued_contribs.help_text = contrib_disappears.help_text
#: field storing whether the User has agreed to the site-wide Terms of Service.
#: (Not a required field because the Terms of Service might not be present
#: when the first User profile is created when bootstrapping the site.)
agreed_to_admin_agreement = db.BooleanProperty(required=False, default=False,
verbose_name=ugettext('I Agree to the Admin Agreement'))
agreed_to_admin_agreement.help_text = ugettext(
'Indicates whether the user agreed to the Admin Agreement.')
|
Python
| 0
|
@@ -1955,17 +1955,19 @@
ple_text
-=
+ =
ugettext
@@ -2040,16 +2040,25 @@
tical%22%3E
+'%0A '
the offi
@@ -4208,16 +4208,22 @@
te-wide
+%0A #:
Terms of
@@ -4231,21 +4231,16 @@
Service.
-%0A #:
(Not a
@@ -4271,24 +4271,30 @@
he Terms of
+%0A #:
Service migh
@@ -4309,21 +4309,16 @@
present
-%0A #:
when th
@@ -4341,16 +4341,22 @@
file is
+%0A #:
created
|
36a57b00d96e2ffed57572aaa3de074305e03244
|
Use attr** in solax (#62397)
|
homeassistant/components/solax/sensor.py
|
homeassistant/components/solax/sensor.py
|
"""Support for Solax inverter via local API."""
import asyncio
from datetime import timedelta
from solax import real_time_api
from solax.inverter import InverterError
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
SensorStateClass,
)
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT, TEMP_CELSIUS
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
DEFAULT_PORT = 80
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
SCAN_INTERVAL = timedelta(seconds=30)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Platform setup."""
api = await real_time_api(config[CONF_IP_ADDRESS], config[CONF_PORT])
endpoint = RealTimeDataEndpoint(hass, api)
resp = await api.get_data()
serial = resp.serial_number
hass.async_add_job(endpoint.async_refresh)
async_track_time_interval(hass, endpoint.async_refresh, SCAN_INTERVAL)
devices = []
for sensor, (idx, unit) in api.inverter.sensor_map().items():
device_class = state_class = None
if unit == "C":
device_class = SensorDeviceClass.TEMPERATURE
state_class = SensorStateClass.MEASUREMENT
unit = TEMP_CELSIUS
elif unit == "kWh":
device_class = SensorDeviceClass.ENERGY
state_class = SensorStateClass.TOTAL_INCREASING
elif unit == "V":
device_class = SensorDeviceClass.VOLTAGE
state_class = SensorStateClass.MEASUREMENT
elif unit == "A":
device_class = SensorDeviceClass.CURRENT
state_class = SensorStateClass.MEASUREMENT
elif unit == "W":
device_class = SensorDeviceClass.POWER
state_class = SensorStateClass.MEASUREMENT
elif unit == "%":
device_class = SensorDeviceClass.BATTERY
state_class = SensorStateClass.MEASUREMENT
uid = f"{serial}-{idx}"
devices.append(Inverter(uid, serial, sensor, unit, state_class, device_class))
endpoint.sensors = devices
async_add_entities(devices)
class RealTimeDataEndpoint:
"""Representation of a Sensor."""
def __init__(self, hass, api):
"""Initialize the sensor."""
self.hass = hass
self.api = api
self.ready = asyncio.Event()
self.sensors = []
async def async_refresh(self, now=None):
"""Fetch new state data for the sensor.
This is the only method that should fetch new data for Home Assistant.
"""
try:
api_response = await self.api.get_data()
self.ready.set()
except InverterError as err:
if now is not None:
self.ready.clear()
return
raise PlatformNotReady from err
data = api_response.data
for sensor in self.sensors:
if sensor.key in data:
sensor.value = data[sensor.key]
sensor.async_schedule_update_ha_state()
class Inverter(SensorEntity):
"""Class for a sensor."""
def __init__(
self,
uid,
serial,
key,
unit,
state_class=None,
device_class=None,
):
"""Initialize an inverter sensor."""
self.uid = uid
self.serial = serial
self.key = key
self.value = None
self.unit = unit
self._attr_state_class = state_class
self._attr_device_class = device_class
@property
def native_value(self):
"""State of this inverter attribute."""
return self.value
@property
def unique_id(self):
"""Return unique id."""
return self.uid
@property
def name(self):
"""Name of this inverter attribute."""
return f"Solax {self.serial} {self.key}"
@property
def native_unit_of_measurement(self):
"""Return the unit of measurement."""
return self.unit
@property
def should_poll(self):
"""No polling needed."""
return False
|
Python
| 0
|
@@ -3332,32 +3332,63 @@
r a sensor.%22%22%22%0A%0A
+ _attr_should_poll = False%0A%0A
def __init__
@@ -3389,16 +3389,16 @@
init__(%0A
-
@@ -3577,17 +3577,29 @@
self.
-u
+_attr_unique_
id = uid
@@ -3616,46 +3616,44 @@
elf.
-serial = serial%0A self.key =
+_attr_name = f%22Solax %7Bserial%7D %7B
key
+%7D%22
%0A
@@ -3666,37 +3666,39 @@
elf.
-value = None%0A self.uni
+_attr_native_unit_of_measuremen
t =
@@ -3793,16 +3793,65 @@
ce_class
+%0A self.key = key%0A self.value = None
%0A%0A @p
@@ -3926,32 +3926,32 @@
r attribute.%22%22%22%0A
+
return s
@@ -3964,455 +3964,4 @@
lue%0A
-%0A @property%0A def unique_id(self):%0A %22%22%22Return unique id.%22%22%22%0A return self.uid%0A%0A @property%0A def name(self):%0A %22%22%22Name of this inverter attribute.%22%22%22%0A return f%22Solax %7Bself.serial%7D %7Bself.key%7D%22%0A%0A @property%0A def native_unit_of_measurement(self):%0A %22%22%22Return the unit of measurement.%22%22%22%0A return self.unit%0A%0A @property%0A def should_poll(self):%0A %22%22%22No polling needed.%22%22%22%0A return False%0A
|
bc6772cb8990039479f6fe2b238304765aafab41
|
make 70_web.py better
|
examples/70_web.py
|
examples/70_web.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Run this script, then try the following urls:
#
# 1. http://127.0.0.1:5000/?person_id=mosky
# 2. http://127.0.0.1:5000/?name=Mosky Liu
# 3. http://127.0.0.1:5000/?name like=%Mosky%
#
import psycopg2
from flask import Flask, request, jsonify
from mosql.query import select, left_join
app = Flask(__name__)
conn = psycopg2.connect(host='127.0.0.1')
@app.route('/')
def index():
cur = conn.cursor()
cur.execute(select(
'person',
request.args or None,
joins = left_join('detail', using=('person_id', )),
))
rows = cur.fetchall()
cur.close()
return jsonify(data=rows)
if __name__ == '__main__':
app.run(debug=True)
|
Python
| 0.000151
|
@@ -330,72 +330,102 @@
oin%0A
-%0Aapp = Flask(__name__)%0A%0Aconn = psycopg2.connect(host='127.0.0.1'
+from mosql.db import Database%0A%0Adb = Database(psycopg2, host='127.0.0.1')%0A%0Aapp = Flask(__name__
)%0A%0A@
@@ -460,28 +460,28 @@
-cur = conn.cursor()%0A
+with db as cur:%0A
@@ -500,16 +500,20 @@
select(%0A
+
@@ -530,16 +530,20 @@
+
request.
@@ -556,16 +556,20 @@
r None,%0A
+
@@ -628,53 +628,19 @@
-))%0A
-rows = cur.fetchall()%0A cur.close()%0A
+))%0A
@@ -663,12 +663,17 @@
ata=
-rows
+list(cur)
)%0A%0Ai
|
c4e90fb96d57a5130f5e66958508f3c23ef24c4d
|
Correct ddt propagate invoice (almost all fields)
|
mexal_ddt/wizard/ddt_create_invoice.py
|
mexal_ddt/wizard/ddt_create_invoice.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Abstract (http://www.abstract.it)
# Copyright (C) 2014 Agile Business Group (http://www.agilebg.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, fields
from openerp.tools.translate import _
from openerp.exceptions import Warning
class DdTCreateInvoice(models.TransientModel):
_inherit = 'ddt.create.invoice'
# -------------------------------------------------------------------------
# Override
# -------------------------------------------------------------------------
# original function in l10n_it_ddt module for check and update more things:
def check_ddt_data(self, ddts):
''' Check that all DDT has common elements mandatory:
'''
i = 0
for ddt in ddts:
i += 1
if i == 1: # Update with first DDT data:
carriage_condition_id = ddt.carriage_condition_id.id
goods_description_id = ddt.goods_description_id.id
transportation_reason_id = ddt.transportation_reason_id.id
transportation_method_id = ddt.transportation_method_id.id
#mx_agent_id = ddt.mx_agent_id.id
payment_term_id = ddt.payment_term_id.id
used_bank_id = ddt.used_bank_id.id
default_carrier_id = ddt.default_carrier_id.id
#parcels = ddt.parcels # needed?
destination_partner_id = ddt.destination_partner_id.id
invoice_partner_id = ddt.invoice_partner_id.id
continue # check second DDT
if ddt.carriage_condition_id.id != carriage_condition_id:
raise Warning(
_('Selected DDTs have different Carriage Conditions'))
if ddt.goods_description_id.id != goods_description_id:
raise Warning(
_('Selected DDTs have different Descriptions of Goods'))
if ddt.transportation_reason_id.id != transportation_reason_id:
raise Warning(
_('Selected DDTs have different Transportation Reasons'))
if ddt.transportation_method_id.id != transportation_method_id:
raise Warning(
_('Selected DDTs have different Transportation Methods'))
#if ddt.mx_agent_id.id != mx_agent_id:
# raise Warning(
# _('Selected DDTs have different Agent'))
if ddt.payment_term_id.id != payment_term_id:
raise Warning(
_('Selected DDTs have different Payment terms'))
if ddt.used_bank_id.id != used_bank_id:
raise Warning(
_('Selected DDTs have different bank account'))
if ddt.destination_partner_id.id != destination_partner_id:
raise Warning(
_('Selected DDTs have different destination'))
if ddt.invoice_partner_id.id != invoice_partner_id:
raise Warning(
_('Selected DDTs have different invoice partner'))
@api.multi
def create_invoice(self):
ddt_model = self.env['stock.ddt']
picking_pool = self.pool['stock.picking']
ddts = ddt_model.browse(self.env.context['active_ids'])
partners = set([ddt.partner_id for ddt in ddts])
if len(partners) > 1:
raise Warning(_('Selected DDTs belong to different partners'))
# TODO check also destination and invoice address?!?!
pickings = []
self.check_ddt_data(ddts)
# TODO check if there's some DDT yet invoiced!!!
for ddt in ddts:
if ddt.invoice_id:
raise Warning(_('There\' DDT yet invoiced: %s') % ddt.number)
for picking in ddt.picking_ids:
pickings.append(picking.id)
#for move in picking.move_lines:
# # XXX forced as in done state!!!!
# if move.invoice_state != '2binvoiced':
# raise Warning(
# _('Move %s is not invoiceable') % move.name)
invoices = picking_pool.action_invoice_create(
self.env.cr,
self.env.uid,
pickings,
self.journal_id.id, group=True, context=None)
# Save invoice created in ddt document (to no reinvoice again)
if not invoices:
raise Warning('No invoice created!!!')
return # XXX error!!
ddts.write({'invoice_id': invoices[0]})
# Update with extra data taken from DDT elements:
invoice_obj = self.env['account.invoice'].browse(invoices)
# TODO complete with correct fields:
invoice_obj.write({
'carriage_condition_id': ddts[0].carriage_condition_id.id,
'goods_description_id': ddts[0].goods_description_id.id,
'transportation_reason_id': ddts[0].transportation_reason_id.id,
'transportation_method_id': ddts[0].transportation_method_id.id,
'mx_agent_id': ddts[0].partner_id.agent_id.id,
'payment_term_id': ddts[0].payment_term_id.id, # TODO remove?
'payment_term': ddts[0].payment_term_id.id,
'partner_bank_id': ddts[0].used_bank_id.id,
'used_bank_id': ddts[0].used_bank_id.id, # TODO remove?
'default_carrier_id': ddts[0].default_carrier_id.id,
'destination_partner_id': ddts[0].destination_partner_id.id,
'invoice_partner_id': ddts[0].invoice_partner_id.id,
# date?
# TODO 'parcels': ddts[0].parcels, # calculate
})
ir_model_data = self.env['ir.model.data']
form_res = ir_model_data.get_object_reference(
'account', 'invoice_form',)
form_id = form_res and form_res[1] or False
tree_res = ir_model_data.get_object_reference(
'account', 'invoice_tree')
tree_id = tree_res and tree_res[1] or False
return {
'name': 'Invoice',
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'account.invoice',
'res_id': invoices[0],
'view_id': False,
'views': [(form_id, 'form'), (tree_id, 'tree')],
'type': 'ir.actions.act_window',
}
|
Python
| 0
|
@@ -2015,24 +2015,90 @@
agent_id.id%0A
+ #parcels = ddt.parcels # needed?%0A %0A
@@ -2260,57 +2260,8 @@
.id%0A
- #parcels = ddt.parcels # needed?%0A
@@ -3311,24 +3311,37 @@
nt Agent'))%0A
+ %0A
@@ -6055,32 +6055,33 @@
d,%0A%0A
+#
'payment_term_id
@@ -6540,32 +6540,94 @@
_partner_id.id,%0A
+ 'direct_invoice': False,%0A %0A
# da
|
cb5aeedc651773d1c298b167b07aa535dfd7beca
|
Fix typos/spelling in serializer docstrings (#2420)
|
parsl/serialize/concretes.py
|
parsl/serialize/concretes.py
|
import dill
import pickle
import logging
logger = logging.getLogger(__name__)
from parsl.serialize.base import SerializerBase
class PickleSerializer(SerializerBase):
""" Pickle serialization covers most python objects, with some notable exceptions:
* functions defined in a interpretor/notebook
* classes defined in local context and not importable using a fully qualified name
* clojures, generators and coroutines
* [sometimes] issues with wrapped/decorated functions
"""
_identifier = b'01\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = pickle.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = pickle.loads(chomped)
return data
class DillSerializer(SerializerBase):
""" Dill serialization works on a superset of object including the ones covered by pickle.
However for most cases pickle is faster. For most callable objects the additional overhead
of dill can be amortized with an lru_cache. Here's items that dill handles that pickle
doesn't:
* functions defined in a interpretor/notebook
* classes defined in local context and not importable using a fully qualified name
* functions that are wrapped/decorated by other functions/classes
* clojures
"""
_identifier = b'02\n'
_for_code = True
_for_data = True
def serialize(self, data):
x = dill.dumps(data)
return self.identifier + x
def deserialize(self, payload):
chomped = self.chomp(payload)
data = dill.loads(chomped)
return data
|
Python
| 0.000005
|
@@ -280,33 +280,33 @@
d in a interpret
-o
+e
r/notebook%0A *
@@ -392,25 +392,25 @@
me%0A * clo
-j
+s
ures, genera
@@ -1167,17 +1167,17 @@
nterpret
-o
+e
r/notebo
@@ -1345,17 +1345,17 @@
* clo
-j
+s
ures%0A
|
6891f7e7cf78d71d430341ea90fa8f1d65e29fdf
|
Fix super methods
|
VMEncryption/main/oscrypto/ubuntu_1404/Ubuntu1404EncryptionStateMachine.py
|
VMEncryption/main/oscrypto/ubuntu_1404/Ubuntu1404EncryptionStateMachine.py
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
import inspect
import os
import sys
import traceback
from time import sleep
scriptdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
maindir = os.path.abspath(os.path.join(scriptdir, '../../'))
sys.path.append(maindir)
transitionsdir = os.path.abspath(os.path.join(scriptdir, '../../transitions'))
sys.path.append(transitionsdir)
from oscrypto import *
from encryptstates import *
from Common import *
from CommandExecutor import *
from DiskUtil import *
from transitions import *
class Ubuntu1404EncryptionStateMachine(OSEncryptionStateMachine):
states = [
State(name='uninitialized'),
State(name='prereq', on_enter='on_enter_state'),
State(name='stripdown', on_enter='on_enter_state'),
State(name='unmount_oldroot', on_enter='on_enter_state'),
State(name='split_root_partition', on_enter='on_enter_state'),
State(name='encrypt_block_device', on_enter='on_enter_state'),
State(name='patch_boot_system', on_enter='on_enter_state'),
State(name='completed'),
]
transitions = [
{
'trigger': 'skip_encryption',
'source': 'uninitialized',
'dest': 'completed'
},
{
'trigger': 'enter_prereq',
'source': 'uninitialized',
'dest': 'prereq'
},
{
'trigger': 'enter_stripdown',
'source': 'prereq',
'dest': 'stripdown',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_unmount_oldroot',
'source': 'stripdown',
'dest': 'unmount_oldroot',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'retry_unmount_oldroot',
'source': 'unmount_oldroot',
'dest': 'unmount_oldroot',
'before': 'on_enter_state'
},
{
'trigger': 'enter_split_root_partition',
'source': 'unmount_oldroot',
'dest': 'split_root_partition',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_encrypt_block_device',
'source': 'split_root_partition',
'dest': 'encrypt_block_device',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'enter_patch_boot_system',
'source': 'encrypt_block_device',
'dest': 'patch_boot_system',
'before': 'on_enter_state',
'conditions': 'should_exit_previous_state'
},
{
'trigger': 'stop_machine',
'source': 'patch_boot_system',
'dest': 'completed',
'conditions': 'should_exit_previous_state'
},
]
def on_enter_state(self):
super(Ubuntu1604EncryptionStateMachine, self).on_enter_state()
def should_exit_previous_state(self):
# when this is called, self.state is still the "source" state in the transition
return super(Ubuntu1604EncryptionStateMachine, self).should_exit_previous_state()
def __init__(self, hutil, distro_patcher, logger, encryption_environment):
super(Ubuntu1604EncryptionStateMachine, self).__init__(hutil, distro_patcher, logger, encryption_environment)
self.state_objs = {
'prereq': PrereqState(self.context),
'stripdown': StripdownState(self.context),
'unmount_oldroot': UnmountOldrootState(self.context),
'split_root_partition': SplitRootPartitionState(self.context),
'encrypt_block_device': EncryptBlockDeviceState(self.context),
'patch_boot_system': PatchBootSystemState(self.context),
}
self.state_machine = Machine(model=self,
states=Ubuntu1604EncryptionStateMachine.states,
transitions=Ubuntu1604EncryptionStateMachine.transitions,
initial='uninitialized')
def start_encryption(self):
proc_comm = ProcessCommunicator()
self.command_executor.Execute(command_to_execute="mount",
raise_exception_on_failure=True,
communicator=proc_comm)
if '/dev/mapper/osencrypt' in proc_comm.stdout:
self.logger.log("OS volume is already encrypted")
self.skip_encryption()
self.log_machine_state()
return
self.log_machine_state()
self.enter_prereq()
self.log_machine_state()
self.enter_stripdown()
self.log_machine_state()
oldroot_unmounted_successfully = False
attempt = 1
while not oldroot_unmounted_successfully:
self.logger.log("Attempt #{0} to unmount /oldroot".format(attempt))
try:
if attempt == 1:
self.enter_unmount_oldroot()
elif attempt > 10:
raise Exception("Could not unmount /oldroot in 10 attempts")
else:
self.retry_unmount_oldroot()
self.log_machine_state()
except Exception as e:
message = "Attempt #{0} to unmount /oldroot failed with error: {1}, stack trace: {2}".format(attempt,
e,
traceback.format_exc())
self.logger.log(msg=message)
self.hutil.do_status_report(operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_error_status,
status_code=str(CommonVariables.unmount_oldroot_error),
message=message)
sleep(10)
else:
oldroot_unmounted_successfully = True
finally:
attempt += 1
self.enter_split_root_partition()
self.log_machine_state()
self.enter_encrypt_block_device()
self.log_machine_state()
self.enter_patch_boot_system()
self.log_machine_state()
self.stop_machine()
self.log_machine_state()
|
Python
| 0.000414
|
@@ -3656,33 +3656,33 @@
super(Ubuntu1
-6
+4
04EncryptionStat
@@ -3865,33 +3865,33 @@
rn super(Ubuntu1
-6
+4
04EncryptionStat
@@ -4032,25 +4032,25 @@
uper(Ubuntu1
-6
+4
04Encryption
|
ef3969c84e1dea783c8b5d3389ec22d133601be8
|
Load supertypes, types, and subtypes into memory
|
src/magic_cards/utils/initial_import.py
|
src/magic_cards/utils/initial_import.py
|
import io
import json
import zipfile
from contextlib import closing
import requests
from django.db import transaction
from magic_cards.models import Artist, Card, CardSubtype, CardSupertype, CardType, Printing, Set
MTG_JSON_URL = 'https://mtgjson.com/json/AllSets-x.json.zip'
FALLBACK_MTG_JSON_URL = 'http://mtgjson.com/json/AllSets-x.json.zip'
class Everything:
"""
Sentinel value for downloading all sets (i.e. skipping nothing).
"""
pass
def fetch_data():
try:
r = requests.get(MTG_JSON_URL)
except requests.ConnectionError:
r = requests.get(FALLBACK_MTG_JSON_URL)
with closing(r), zipfile.ZipFile(io.BytesIO(r.content)) as archive:
unzipped_files = archive.infolist()
if len(unzipped_files) != 1:
raise RuntimeError("Found an unexpected number of files in the MTGJSON archive.")
data = archive.read(archive.infolist()[0])
decoded_data = data.decode('utf-8')
sets_data = json.loads(decoded_data)
return sets_data
def parse_rarity(string):
if string == 'Mythic Rare':
return Printing.Rarity.MYTHIC
elif string == 'Rare':
return Printing.Rarity.RARE
elif string == 'Uncommon':
return Printing.Rarity.UNCOMMON
elif string == 'Common':
return Printing.Rarity.COMMON
elif string == 'Basic Land':
return Printing.Rarity.BASIC_LAND
else:
return Printing.Rarity.SPECIAL
def parse_data(sets_data, set_codes):
# Process the data set-by-set
for code, data in sets_data.items():
# Skip sets that have not been chosen
if set_codes is not Everything and code not in set_codes:
continue
# Create the set
magic_set, _ = Set.objects.get_or_create(code=code, name=data['name'])
# Create cards
all_cards_data = data['cards']
for card_data in all_cards_data:
# Skip tokens
layout = card_data['layout']
if layout == 'token':
continue
# Card info
name = card_data['name']
mana_cost = card_data.get('manaCost', '')
text = card_data.get('text', '')
power = card_data.get('power', '')
toughness = card_data.get('toughness', '')
card, _ = Card.objects.get_or_create(
name=name, defaults={
'mana_cost': mana_cost,
'text': text,
'power': power,
'toughness': toughness,
})
supertypes = card_data.get('supertypes', [])
types = card_data['types']
subtypes = card_data.get('subtypes', [])
for supertype_name in supertypes:
supertype, _ = CardSupertype.objects.get_or_create(name=supertype_name)
card.supertypes.add(supertype)
for type_name in types:
card_type, _ = CardType.objects.get_or_create(name=type_name)
card.types.add(card_type)
for subtype_name in subtypes:
subtype, _ = CardSubtype.objects.get_or_create(name=subtype_name)
card.subtypes.add(subtype)
# Printing info
artist_name = card_data['artist']
artist, _ = Artist.objects.get_or_create(full_name=artist_name)
multiverse_id = card_data.get('multiverseid', None) # Missing on certain sets
flavor_text = card_data.get('flavor_text', '')
rarity = card_data['rarity']
number = card_data.get('number', '') # Absent on old sets
Printing.objects.get_or_create(
card=card,
set=magic_set,
rarity=parse_rarity(rarity),
flavor_text=flavor_text,
artist=artist,
number=number,
multiverse_id=multiverse_id)
@transaction.atomic
def import_cards(set_codes=Everything):
sets_data = fetch_data()
parse_data(sets_data, set_codes)
if __name__ == "__main__":
import_cards()
|
Python
| 0.000001
|
@@ -1471,16 +1471,268 @@
codes):%0A
+ # Load supertypes, types, and subtypes into memory%0A supertype_objs = %7Bt.name: t for t in CardSupertype.objects.all()%7D%0A type_objs = %7Bt.name: t for t in CardType.objects.all()%7D%0A subtype_objs = %7Bt.name: t for t in CardSubtype.objects.all()%7D%0A%0A
# Pr
@@ -2998,32 +2998,35 @@
+if
supertype, _ = C
@@ -3014,27 +3014,163 @@
if supertype
-, _
+_name in supertype_objs:%0A supertype = supertype_objs%5Bsupertype_name%5D%0A else:%0A supertype
= CardSuper
@@ -3174,39 +3174,32 @@
pertype.objects.
-get_or_
create(name=supe
@@ -3202,32 +3202,95 @@
supertype_name)%0A
+ supertype_objs%5Bsupertype_name%5D = supertype%0A
@@ -3368,24 +3368,146 @@
+if type_name in type_objs:%0A card_type = type_objs%5Btype_name%5D%0A else:%0A
card_type, _
@@ -3499,27 +3499,24 @@
card_type
-, _
= CardType.
@@ -3515,39 +3515,32 @@
ardType.objects.
-get_or_
create(name=type
@@ -3538,32 +3538,85 @@
name=type_name)%0A
+ type_objs%5Btype_name%5D = card_type%0A
@@ -3703,18 +3703,147 @@
+if
subtype
-, _
+_name in subtype_objs:%0A subtype = subtype_objs%5Bsubtype_name%5D%0A else:%0A subtype
= C
@@ -3857,31 +3857,24 @@
ype.objects.
-get_or_
create(name=
@@ -3879,32 +3879,89 @@
e=subtype_name)%0A
+ subtype_objs%5Bsubtype_name%5D = subtype%0A
|
7fd76d87cfda8f02912985cb3cf650ee8ff2e11e
|
Remove py2 Ska.DBI assert in report test
|
mica/report/tests/test_write_report.py
|
mica/report/tests/test_write_report.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import tempfile
import os
import shutil
import pytest
from .. import report
try:
import Ska.DBI
with Ska.DBI.DBI(server='sqlsao', dbi='sybase', user='aca_ops', database='axafocat') as db:
assert db.conn._is_connected == 1
HAS_SYBASE_ACCESS = True
except:
HAS_SYBASE_ACCESS = False
HAS_SC_ARCHIVE = os.path.exists(report.starcheck.FILES['data_root'])
@pytest.mark.skipif('not HAS_SYBASE_ACCESS', reason='Report test requires Sybase/OCAT access')
@pytest.mark.skipif('not HAS_SC_ARCHIVE', reason='Report test requires mica starcheck archive')
def test_write_reports():
"""
Make a report and database
"""
tempdir = tempfile.mkdtemp()
# Get a temporary file, but then delete it, because report.py will only
# make a new table if the supplied file doesn't exist
fh, fn = tempfile.mkstemp(dir=tempdir, suffix='.db3')
os.unlink(fn)
report.REPORT_ROOT = tempdir
report.REPORT_SERVER = fn
for obsid in [20001, 15175, 54778]:
report.main(obsid)
os.unlink(fn)
shutil.rmtree(tempdir)
|
Python
| 0
|
@@ -259,50 +259,8 @@
db:%0A
- assert db.conn._is_connected == 1%0A
|
bd4965042c251319d986fc918345a1453844ebeb
|
Fix log
|
laboratory/settings.py
|
laboratory/settings.py
|
import logging
import os
import sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'sbib5ss_=z^qngyjqw1om5)4w5l@_ba@pin(7ee^k=#6q=0b)!'
DEBUG = "DLIS" in os.environ
INTERNAL_IPS = ['127.0.0.1', '192.168.0.200', '192.168.0.101', '192.168.102.4', '192.168.0.128']
ALLOWED_HOSTS = ['lis.fc-ismu.local', 'lis', '127.0.0.1', 'localhost', 'testserver']
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_HSTS_SECONDS = 1
X_FRAME_OPTIONS = 'ALLOWALL'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admindocs',
'ajax_select',
'health',
'appconf',
'clients',
'users',
'dashboard',
'podrazdeleniya',
'results',
'researches',
'directions',
'receivematerial',
'construct',
'slog',
'directory',
'statistic',
'api',
'discharge',
'rmis_integration',
'debug_toolbar',
'debug_panel',
'rest_framework',
'cachalot',
'django_logtail',
)
MIDDLEWARE = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.admindocs.middleware.XViewMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'debug_panel.middleware.DebugPanelMiddleware',
'django.middleware.common.CommonMiddleware',
]
ROOT_URLCONF = 'laboratory.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'context_processors.utils.card_bases',
]
},
},
]
WSGI_APPLICATION = 'laboratory.wsgi.application'
LOGIN_URL = '/'
LOGIN_REDIRECT_URL = '/dashboard/'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'l2',
'USER': 'postgres',
'PASSWORD': '123456',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
'KEY_PREFIX': 'lis' + ("" if not DEBUG else "_DBG")
},
'debug-panel': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/var/tmp/debug-panel-cache-2',
'TIMEOUT': 300,
'OPTIONS': {
'MAX_ENTRIES': 200
}
}
}
LANGUAGE_CODE = 'ru-ru'
DATE_FORMAT = 'd.m.Y'
TIME_FORMAT = 'd.m.Y'
USE_TZ = True
TIME_ZONE = 'Asia/Irkutsk'
USE_I18N = True
USE_L10N = True
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
FIXTURE_DIRS = (os.path.join(BASE_DIR, 'fixtures'),)
AUTH_PROFILE_MODULE = 'users.models.DoctorsProfile'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(BASE_DIR, 'logs') + '/log.txt',
},
},
'loggers': {
'django.request': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'cachalot.panels.CachalotPanel',
)
LDAP = {
"enable": False,
"server": {
"host": "192.168.0.254",
"port": 389,
"user": "cn=Admin,dc=fc-ismu,dc=local",
"password": ""
},
"user_object": "(objectClass=*)",
"base": "dc=fc-ismu,dc=local"
}
SESSION_SAVE_EVERY_REQUEST = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_AGE = 15 * 60 * 60
class DisableMigrations(object):
def __contains__(self, item):
return True
def __getitem__(self, item):
return "notmigrations"
TESTS_IN_PROGRESS = False
if 'test' in sys.argv[1:] or 'jenkins' in sys.argv[1:]:
logging.disable(logging.CRITICAL)
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
DEBUG = False
TEMPLATE_DEBUG = False
TESTS_IN_PROGRESS = True
MIGRATION_MODULES = DisableMigrations()
CACHALOT_ENABLED = True
import warnings
warnings.filterwarnings('ignore', message='DateTimeField*', category=RuntimeWarning)
MAX_UPLOAD_SIZE = DATA_UPLOAD_MAX_MEMORY_SIZE = 104857600
DEBUG = False
LOGTAIL_FILES = {
'L2': os.path.join(BASE_DIR, 'logs', 'log.txt')
}
try:
from laboratory.local_settings import *
except ImportError:
pass
|
Python
| 0.000002
|
@@ -3706,36 +3706,38 @@
'level': '
-DEBU
+WARNIN
G',%0A
@@ -3956,20 +3956,22 @@
evel': '
-DEBU
+WARNIN
G',%0A
|
319d457e1e6511f6c240f5f4f5479181647f7cf6
|
Fix bug with test
|
scopus/tests/test_AffiliationSearch.py
|
scopus/tests/test_AffiliationSearch.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `AffiliationSearch` module."""
from collections import namedtuple
from nose.tools import assert_equal, assert_true
import scopus
s = scopus.AffiliationSearch('af-id(60021784)', refresh=True)
def test_affiliations():
received = s.affiliations
assert_true(isinstance(received, list))
order = 'eid name variant documents city country parent'
Affiliation = namedtuple('Affiliation', order)
expected = [Affiliation(eid='10-s2.0-60021784', name='New York University',
variant='', documents='101148', city='New York',
country='United States', parent='0')]
assert_true(int(received.documents) >= 101148)
assert_equal(received._replace(documents="0"), expected)
|
Python
| 0
|
@@ -348,17 +348,16 @@
list))%0A
-%0A
orde
@@ -479,9 +479,8 @@
d =
-%5B
Affi
@@ -539,24 +539,16 @@
rsity',%0A
-
@@ -570,22 +570,17 @@
uments='
-101148
+0
', city=
@@ -594,24 +594,8 @@
rk',
-%0A
cou
@@ -615,16 +615,24 @@
States',
+%0A
parent=
@@ -639,9 +639,8 @@
'0')
-%5D
%0A
@@ -664,16 +664,19 @@
received
+%5B0%5D
.documen
@@ -715,16 +715,19 @@
received
+%5B0%5D
._replac
|
28c29b6f3a481a0344f69c69490f49e020cacc73
|
remove some leftover comments
|
lala/plugins/quotes.py
|
lala/plugins/quotes.py
|
import sqlite3
import logging
import os
from time import sleep
from lala.util import command, initplz, msg
#@initplz
#class Plugin(object):
#def __init__(self):
#self._con = sqlite3.connect(
#os.path.join(os.path.expanduser("~/.lala"),"quotes.sqlite3"))
#self._con.execute("CREATE TABLE IF NOT EXISTS quotes(\
#quote TEXT);")
#self._con.commit()
#self._con.text_factory = sqlite3.OptimizedUnicode
#def __del__(self):
#self._con.close()
db_connection = None
@initplz
def setup():
global db_connection
db_connection = sqlite3.connect(
os.path.join(os.path.expanduser("~/.lala"),"quotes.sqlite3"))
db_connection.execute("CREATE TABLE IF NOT EXISTS quotes(\
quote TEXT);")
db_connection.commit()
db_connection.text_factory = sqlite3.OptimizedUnicode
@command("getquote")
def getquote( user, channel, text):
s_text = text.split()
if len(s_text) > 1:
quotenumber = s_text[1]
logging.debug("Trying to get quote number %s" % quotenumber)
with db_connection:
q = db_connection.execute("SELECT quote FROM quotes\
WHERE rowid = ?;", [quotenumber]).fetchall()
if len(q) > 0:
msg(channel, "[%s] %s" % (quotenumber, q[0][0]))
else:
msg(channel, "%s: There's no quote #%s" % (user,
quotenumber))
@command("addquote")
def addquote( user, channel, text):
s_text = text.split()
if len(s_text) > 1:
text = " ".join(s_text[1:])
logging.debug("Adding quote: %s" % text)
with db_connection:
c = db_connection.execute("INSERT INTO quotes (quote) values (?);",
[text])
msg(channel, "New quote: %s" % c.lastrowid)
else:
msg(channel, "%s: You didn't give me any text to quote " % user)
@command("delquote")
def delquote( user, channel, text):
s_text = text.split()
if len(s_text) > 1:
quotenumber = s_text[1]
logging.debug("Deleting quote: %s" % quotenumber)
with db_connection:
c = db_connection.execute("DELETE FROM quotes where ROWID = (?);",
[quotenumber]).fetchall()
db_connection.commit()
else:
msg(channel, "%s: There's no quote #%s" % (user,
quotenumber))
@command("lastquote")
def lastquote( user, channel, text):
with db_connection:
try:
(id, quote) = db_connection.execute("SELECT rowid, quote FROM quotes\
ORDER BY rowid DESC LIMIT 1;").fetchall()[0]
except IndexError, e:
return
msg(channel, "[%s] %s" % (id, quote))
@command("rquote")
def randomquote( user, channel, text):
with db_connection:
try:
(id, quote) = db_connection.execute("SELECT rowid, quote FROM quotes ORDER\
BY random() LIMIT 1;").fetchall()[0]
except IndexError, e:
return
msg(channel, "[%s] %s" % (id, quote))
@command("searchquote")
def searchquote( user, channel, text):
s_text = text.split()
logging.debug(s_text[1:])
with db_connection:
quotes = db_connection.execute("SELECT rowid, quote FROM quotes\
WHERE quote LIKE (?)", [
"".join(("%",
" ".join(s_text[1:]),
"%"))]
).fetchall()
for (id, quote) in quotes:
msg(channel, "[%s] %s" % (id, quote))
# TODO get rid of this ugly thing
sleep(1)
def join( event):
user = event[0][0]
channel = event[1]
with db_connection:
try:
(id, quote) = db_connection.execute("SELECT rowid, quote FROM quotes\
WHERE quote LIKE (?) ORDER BY random() LIMIT\
1;", ["".join(["%", user, "%"])]).fetchall()[0]
except IndexError, e:
# There's no matching quote,
return
msg(channel, "[%s] %s" % (id, quote))
|
Python
| 0
|
@@ -106,414 +106,8 @@
sg%0A%0A
-#@initplz%0A#class Plugin(object):%0A #def __init__(self):%0A #self._con = sqlite3.connect(%0A #os.path.join(os.path.expanduser(%22~/.lala%22),%22quotes.sqlite3%22))%0A #self._con.execute(%22CREATE TABLE IF NOT EXISTS quotes(%5C%0A #quote TEXT);%22)%0A #self._con.commit()%0A #self._con.text_factory = sqlite3.OptimizedUnicode%0A%0A #def __del__(self):%0A #self._con.close()%0A
db_c
|
ed0821bd41a10dd00727f09cf9ba82123bd2cf93
|
Fix output of permissions import script
|
scripts/import_permissions_and_roles.py
|
scripts/import_permissions_and_roles.py
|
#!/usr/bin/env python
"""Import permissions, roles, and their relations from a TOML file.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import click
from byceps.services.authorization import impex_service
from byceps.util.system import get_config_filename_from_env_or_exit
from _util import app_context
@click.command()
@click.argument('data_file', type=click.File())
def execute(data_file):
permission_count, role_count = impex_service.import_from_file(data_file)
click.secho(
'Imported {permission_count} permissions and {role_count} roles.',
fg='green',
)
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
|
Python
| 0.000003
|
@@ -550,16 +550,17 @@
+f
'Importe
|
52b6abfa11354792c01316fad7f886a4d892e013
|
drugs sorted alphabetically
|
gui_py3.py
|
gui_py3.py
|
from tkinter import Tk, RIGHT, LEFT, TOP, BOTTOM, BOTH, FLAT, RAISED, SUNKEN, RIDGE, END, SW, W, Listbox, StringVar, Toplevel
from tkinter import ttk
#ttk import Frame, Button, Style, Label
import sqlite3
class Database(object):
def __init__(self, filename, table):
self.connection = sqlite3.connect(filename)
with self.connection:
self.connection.row_factory = sqlite3.Row
self.cursor = self.connection.cursor()
self.cursor.execute("SELECT * FROM %s" % table)
self.contents = self.cursor.fetchall()
self.cursor.execute("SELECT Name FROM ANTIPSYCHOTICS")
self.drugs = self.cursor.fetchall()
def get_drug(self, query):
for drug in self.contents:
if query in drug:
return drug
def dict_gen(list):
d = {}
for i, row in enumerate(list):
l = []
for col in range(0, len(row)):
l.append(row[col])
d[i] = l
return d
def dump_drugs(self):
drug_list = []
for drug in self.drugs:
drug_list.append(drug)
return drug_list
class Example(ttk.Frame):
def __init__(self, parent):
ttk.Frame.__init__(self, parent)
self.parent = parent
self.initUI()
def initUI(self):
self.parent.title("Transmute")
self.style = ttk.Style()
self.style.theme_use("default")
box1 = Listbox(self, height=23)
box2 = Listbox(self, height=23)
self.antipsychotics = Database("drugs.sqlite", "ANTIPSYCHOTICS")
dumped = self.antipsychotics.dump_drugs()
d = {}
for i, row in enumerate(dumped):
l = []
for col in range(0, len(row)):
l.append(row[col])
d[i] = l
for drug in d.values():
drug_string = str(drug)
drug_string = drug_string.replace("'","").replace("[","").replace("]","")
box1.insert(END, drug_string)
box2.insert(END, drug_string)
box1.bind("<<ListboxSelect>>", self.onSelect1)
box1.pack(side=LEFT, padx=5, pady=3)
box2.bind("<<ListboxSelect>>", self.onSelect2)
box2.pack(side=LEFT)
self.pack(fill=BOTH, expand=1, side=LEFT)
self.dose_entry = ttk.Entry(self, width=35)
self.dose_entry.pack(side=TOP, anchor=W, padx=5, pady=3)
self.convert_from = StringVar()
self.convert_from.set("mg of one antipsychotic roughly equates to:")
self.label_from = ttk.Label(self, anchor=W, textvariable=self.convert_from, width=35)
self.label_from.pack(side=TOP, anchor=W, padx=5, pady=3)
self.result_given = StringVar()
self.result_given.set("?")
self.result_label = ttk.Label(self, anchor=W, textvariable=self.result_given, width=35, relief=RAISED, background="white")
self.result_label.pack(side=TOP, anchor=W, padx=5, pady=3)
self.convert_to = StringVar()
self.convert_to.set("mg of another antipsychotic.")
self.label_to = ttk.Label(self, anchor=W, textvariable=self.convert_to)
self.label_to.pack(side=TOP, anchor=W, padx=5, pady=3)
frame = ttk.Frame(self, relief=FLAT, borderwidth=1)
frame.pack(fill=BOTH, expand=1)
convert_button = ttk.Button(self, text="Convert", command = self.convert)
convert_button.pack(side=LEFT, padx=5, pady=3)
exit_button = ttk.Button(self, text="Exit", command = self.kill)
exit_button.pack(side=LEFT, padx=5, pady=3)
def give_result(self, result):
toplevel = Toplevel()
frame = ttk.Frame(self, borderwidth=1)
frame.pack()
label1 = ttk.Label(toplevel, text=result)
label1.pack()
def convert(self):
db_from = self.antipsychotics.get_drug(from_drug)
db_to = self.antipsychotics.get_drug(to_drug)
dose = self.dose_entry.get()
multiplier = float(db_from["CF"])/float(db_to["CF"])
result = float(dose) * multiplier
self.result_given.set(result)
# self.give_result(result)
def onSelect1(self, val):
sender = val.widget
idx = sender.curselection()
global from_drug
from_drug = sender.get(idx)
self.convert_from.set("mg of "+from_drug+" roughly equates to:")
def onSelect2(self, val):
sender = val.widget
idx = sender.curselection()
global to_drug
to_drug = sender.get(idx)
self.convert_to.set("mg of "+to_drug+".")
def kill(self):
self.quit()
def main():
root = Tk()
app = Example(root)
root.mainloop()
main()
|
Python
| 0.999229
|
@@ -1,28 +1,60 @@
+#transmute: antipsychotic tool%0A%0A
from tkinter import Tk, RIGH
@@ -180,48 +180,8 @@
tk%0A%0A
-#ttk import Frame, Button, Style, Label%0A
impo
@@ -1161,23 +1161,39 @@
%0A%0Aclass
-Example
+TransmuteAntipsychotics
(ttk.Fra
@@ -1379,16 +1379,32 @@
ransmute
+: Antipsychotics
%22)%0A
@@ -1882,16 +1882,23 @@
drug in
+sorted(
d.values
@@ -1899,16 +1899,17 @@
values()
+)
:%0A
@@ -3999,24 +3999,41 @@
ntry.get()%0A%0A
+ try:%0A
mult
@@ -4081,16 +4081,20 @@
%5B%22CF%22%5D)%0A
+
@@ -4127,32 +4127,36 @@
tiplier%0A
+
self.result_give
@@ -4169,16 +4169,96 @@
result)%0A
+ except:%0A self.result_given.set(%22Enter a numeric dose above.%22)
%0A#
@@ -4800,15 +4800,31 @@
p =
-Example
+TransmuteAntipsychotics
(roo
|
cf94a8a51d4e8eb3fd96ca3587af0f4c38e2deec
|
Fix kdtree example
|
examples/kdtree.py
|
examples/kdtree.py
|
from __future__ import print_function
import numpy as np
import pcl
pc_1 = pcl.PointCloud()
pc_1.from_array(points_1)
pc_2 = pcl.PointCloud()
pc_2.from_array(points_2)
kd = pcl.KdTreeFLANN(pc_1)
points_1 = np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 1, 0]], dtype=np.float32)
points_2 = np.array([[0, 0, 0.2],
[1, 0, 0],
[0, 1, 0],
[1.1, 1, 0.5]], dtype=np.float32)
print('pc_1:')
print(points_1)
print('\npc_2:')
print(points_2)
print('\n')
pc_1 = pcl.PointCloud(points_1)
pc_2 = pcl.PointCloud(points_2)
kd = pc_1.make_kdtree_flann()
# find the single closest points to each point in point cloud 2
# (and the sqr distances)
indices, sqr_distances = kd.nearest_k_search_for_cloud(pc_2, 1)
for i in range(pc_1.size):
print('index of the closest point in pc_1 to point %d in pc_2 is %d'
% (i, indices[i, 0]))
print('the squared distance between these two points is %f'
% sqr_distances[i, 0])
|
Python
| 0.000028
|
@@ -67,136 +67,8 @@
cl%0A%0A
-pc_1 = pcl.PointCloud()%0Apc_1.from_array(points_1)%0Apc_2 = pcl.PointCloud()%0Apc_2.from_array(points_2)%0Akd = pcl.KdTreeFLANN(pc_1)%0A%0A
poin
@@ -364,16 +364,144 @@
oat32)%0A%0A
+pc_1 = pcl.PointCloud()%0Apc_1.from_array(points_1)%0Apc_2 = pcl.PointCloud()%0Apc_2.from_array(points_2)%0Akd = pcl.KdTreeFLANN(pc_1)%0A%0A
print('p
|
881290c3d29ad28fb0fddfdd895fe493d7909262
|
make the wsgi.py work with all providers
|
django_deployer/paas_templates/wsgi.py
|
django_deployer/paas_templates/wsgi.py
|
import os
import sys
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__),'{{ project_name }}')))
os.environ['DJANGO_SETTINGS_MODULE'] = '{{ django_settings }}_stackato'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
Python
| 0
|
@@ -178,16 +178,22 @@
%7D%7D_
-stackato
+%7B%7B provider %7D%7D
'%0Aim
|
b7b32005867b0bba06dd132713aebce4ed48f6c3
|
update Param API
|
hwtLib/examples/hierarchy/unitWrapper.py
|
hwtLib/examples/hierarchy/unitWrapper.py
|
from hwt.hdl.constants import INTF_DIRECTION
from hwt.synthesizer.unit import Unit
from hwt.synthesizer.param import Param
class UnitWrapper(Unit):
"""
Class which creates wrapper around original unit instance,
original unit will be stored inside as subunit named baseUnit
:note: This is example of lazy loaded interfaces
and generating of external interfaces based on internal stucture.
"""
def __init__(self, baseUnit):
super(UnitWrapper, self).__init__()
self._baseUnit = baseUnit
def _copyParamsAndInterfaces(self):
for p_name in self._baseUnit._params:
myP = Param(getattr(self._baseUnit, p_name))
self._registerParameter(p_name, myP)
object.__setattr__(self, myP.name, myP)
origToWrapInfMap = {}
for intf in self.baseUnit._interfaces:
# clone interface
myIntf = intf.__copy__()
# subinterfaces are not instanciated yet
# myIntf._direction = intf._direction
myIntf._direction = INTF_DIRECTION.opposite(intf._direction)
self._registerInterface(intf._name, myIntf)
object.__setattr__(self, intf._name, myIntf)
origToWrapInfMap[intf] = myIntf
for i in self._interfaces:
self._loadInterface(i, True)
return origToWrapInfMap
def _getDefaultName(self):
return self._baseUnit.__class__.__name__
def _lazyLoadParamsAndInterfaces(self):
self._ctx.params = self._buildParams()
# prepare signals for interfaces
for i in self._interfaces:
assert i._isExtern
signals = i._signalsForInterface(self._ctx)
self._externInterf.extend(signals)
def _connectBaseUnitToThisWrap(self, origToWrapInfMap):
for baseIntf, wrapIntf in origToWrapInfMap.items():
if baseIntf._direction is INTF_DIRECTION.MASTER:
if isinstance(wrapIntf, list):
for i, _wrapIntf in enumerate(wrapIntf):
_wrapIntf(baseIntf[i])
else:
wrapIntf(baseIntf)
else:
if isinstance(wrapIntf, list):
for i, _wrapIntf in enumerate(wrapIntf):
baseIntf[i](_wrapIntf)
else:
baseIntf(wrapIntf)
def _impl(self):
self.baseUnit = self._baseUnit
origToWrapInfMap = self._copyParamsAndInterfaces()
self._lazyLoadParamsAndInterfaces()
self._connectBaseUnitToThisWrap(origToWrapInfMap)
|
Python
| 0.000001
|
@@ -583,21 +583,16 @@
for p
-_name
in self
@@ -660,17 +660,17 @@
eUnit, p
-_
+.
name))%0A
@@ -705,17 +705,17 @@
ameter(p
-_
+.
name, my
|
7b84c2bd59f455050a249da795d1a73021b12581
|
Add an import
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from ._error import NullNameError
from ._error import InvalidCharError
from ._error import InvalidLengthError
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
Python
| 0.000005
|
@@ -184,24 +184,68 @@
idCharError%0A
+from ._error import InvalidCharWindowsError%0A
from ._error
|
a18262572150e24416088cc548268edcd3468b79
|
Add .json to eventAPI path
|
sdk/python-sdk/predictionio/__init__.py
|
sdk/python-sdk/predictionio/__init__.py
|
__version__ = "0.8.0-SNAPSHOT"
# import packages
import re
try:
import httplib
except ImportError:
# pylint: disable=F0401
# http is a Python3 module, replacing httplib
from http import client as httplib
import json
import urllib
from predictionio.connection import Connection
from predictionio.connection import AsyncRequest
from predictionio.connection import PredictionIOAPIError
class NotCreatedError(PredictionIOAPIError):
pass
class NotFoundError(PredictionIOAPIError):
pass
class BaseClient(object):
def __init__(self, url, threads=1,
apiversion="", qsize=0, timeout=5):
"""Constructor of Client object.
"""
self.threads = threads
self.url = url
self.apiversion = apiversion
self.qsize = qsize
self.timeout = timeout
# check connection type
https_pattern = r'^https://(.*)'
http_pattern = r'^http://(.*)'
m = re.match(https_pattern, url)
self.https = True
if m is None: # not matching https
m = re.match(http_pattern, url)
self.https = False
if m is None: # not matching http either
raise InvalidArgumentError("url is not valid: %s" % url)
self.host = m.group(1)
self._uid = None # identified uid
self._connection = Connection(host=self.host, threads=self.threads,
qsize=self.qsize, https=self.https,
timeout=self.timeout)
def close(self):
"""Close this client and the connection.
Call this method when you want to completely terminate the connection
with PredictionIO.
It will wait for all pending requests to finish.
"""
self._connection.close()
def pending_requests(self):
"""Return the number of pending requests.
:returns:
The number of pending requests of this client.
"""
return self._connection.pending_requests()
def get_status(self):
"""Get the status of the PredictionIO API Server
:returns:
status message.
:raises:
ServerStatusError.
"""
path = "/"
request = AsyncRequest("GET", path)
request.set_rfunc(self._aget_resp)
self._connection.make_request(request)
result = request.get_response()
return result
def _acreate_resp(self, response):
if response.error is not None:
raise NotCreatedError("Exception happened: %s for request %s" %
(response.error, response.request))
elif response.status != httplib.CREATED:
raise NotCreatedError("request: %s status: %s body: %s" %
(response.request, response.status,
response.body))
return response
def _aget_resp(self, response):
if response.error is not None:
raise NotFoundError("Exception happened: %s for request %s" %
(response.error, response.request))
elif response.status != httplib.OK:
raise NotFoundError("request: %s status: %s body: %s" %
(response.request, response.status,
response.body))
data = json.loads(response.body) # convert json string to dict
return data
class DataClient(BaseClient):
"""Client for importing data into PredictionIO DataAPI Server."""
def __init__(self, app_id, data_url="http://localhost:7070",
threads=1, apiversion="", qsize=0, timeout=5):
super(DataClient, self).__init__(
data_url, threads, apiversion, qsize, timeout)
self.app_id = app_id
def acreate_event(self, data):
path = "/events"
request = AsyncRequest("POST", path, **data)
request.set_rfunc(self._acreate_resp)
self._connection.make_request(request)
return request
def create_event(self, data):
return self.acreate_event(data).get_response()
def aget_event(self, event_id):
enc_event_id = urllib.quote(event_id, "") # replace special char with %xx
path = "/events/%s" % enc_event_id
request = AsyncRequest("GET", path)
requset.set_rfunc(self._aget_resp)
self._connection.make_request(request)
return request
def aset_user(self, uid, properties={}):
"""set properties of an user"""
return self.acreate_event({
"event" : "$set",
"entityType" : "pio_user",
"entityId" : uid,
"properties" : properties,
"appId" : self.app_id
})
def aunset_user(self, uid, properties={}):
"""unset properties of an user"""
return self.acreate_event({
"event" : "$unset",
"entityType" : "pio_user",
"entityId" : uid,
"properties" : properties,
"appId" : self.app_id
})
def aset_item(self, iid, properties={}):
return self.acreate_event({
"event" : "$set",
"entityType" : "pio_item",
"entityId" : iid,
"properties" : properties,
"appId" : self.app_id
})
def aunset_item(self, iid, properties={}):
return self.acreate_event({
"event" : "$unset",
"entityType" : "pio_item",
"entityId" : iid,
"properties" : properties,
"appId" : self.app_id
})
def arecord_user_action_on_item(self, action, uid, iid, properties={}):
return self.acreate_event({
"event" : action,
"entityType" : "pio_user",
"entityId" : uid,
"targetEntityType" : "pio_item",
"targetEntityId": iid,
"properties" : properties,
"appId" : self.app_id
})
def set_user(self, uid, properties={}):
return self.aset_user(uid, properties).get_response()
def set_item(self, iid, properties={}):
return self.aset_item(iid, properties).get_response()
def unset_user(self, uid, properties={}):
return self.aunset_user(uid, properties).get_response()
def unset_item(self, iid, properties={}):
return self.aunset_item(iid, properties).get_response()
def record_user_action_on_item(self, action, uid, iid, properties={}):
return self.arecord_user_action_on_item(
action, uid, iid, properties).get_response()
class PredictionClient(BaseClient):
"""Client for extracting prediction results from PredictionIO Engine."""
def __init__(self, url="http://localhost:8000", threads=1,
apiversion="", qsize=0, timeout=5):
super(PredictionClient, self).__init__(
url, threads, apiversion, qsize, timeout)
def asend_query(self, data):
path = "/"
request = AsyncRequest("POST", path, **data)
request.set_rfunc(self._aget_resp)
self._connection.make_request(request)
return request
def send_query(self, data):
return self.asend_query(data).get_response()
|
Python
| 0.000001
|
@@ -3471,16 +3471,21 @@
%22/events
+.json
%22%0A re
@@ -3850,16 +3850,21 @@
vents/%25s
+.json
%22 %25 enc_
|
343ed4b5ce1685c00eb611982319e48047c46361
|
remove dead code
|
samples/test_a/test_a.py
|
samples/test_a/test_a.py
|
class project:
title = "Main Project"
estimate = 3
class task_a:
estimate = 8
class task_b:
title = "Task B"
estimate = 4
def deps(): return [project.task_a, other.task_d]
class task_c:
estimate = 6
def deps(): return [project.task_a]
class other:
class task_d:
class task_e:
estimate = 1
if __name__ == "__main__":
import sys
sys.path.append('../../modules')
import qplan
def print_tasks(tasks):
for name in sorted(tasks.keys()):
task = tasks[name]
print('{0} :'.format(name))
print(' deps:')
for dep_name in task.deps:
print(' ' + dep_name)
print(' waiters:')
for waiter_name in task.waiters:
print(' ' + waiter_name)
tasks = qplan.get_tasks(project)
schedule_items = qplan.schedule_naively(tasks, project)
for item in schedule_items:
print('{item.task.name}: {item.start_time} - {item.end_time}'.format(**locals()))
qplan.plot_gantt(schedule_items)
|
Python
| 0.999454
|
@@ -480,380 +480,8 @@
an%0A%0A
- def print_tasks(tasks):%0A for name in sorted(tasks.keys()):%0A task = tasks%5Bname%5D%0A print('%7B0%7D :'.format(name))%0A print(' deps:')%0A for dep_name in task.deps:%0A print(' ' + dep_name)%0A print(' waiters:')%0A for waiter_name in task.waiters:%0A print(' ' + waiter_name)%0A%0A
|
64becacba2458587ee524aeaef1f9c1995e0c921
|
fix cymrulib unicode problems
|
intelmq/bots/experts/cymru/cymrulib.py
|
intelmq/bots/experts/cymru/cymrulib.py
|
import binascii, dns.resolver
from intelmq.lib.utils import reverse_ip, decode
'''
Reference: http://www.team-cymru.org/Services/ip-to-asn.html#dns
'''
IP_QUERY = "%s.origin%s.asn.cymru.com"
ASN_QUERY = "AS%s.asn.cymru.com"
class Cymru():
@staticmethod
def query(ip, ip_version):
raw_result = Cymru.__ip_query(ip, ip_version)
result = Cymru.__ip_query_parse(raw_result)
if "asn" in result:
raw_result = Cymru.__asn_query(result['asn'])
extra_info = Cymru.__asn_query_parse(raw_result)
result.update(extra_info)
return result
@staticmethod
def __query(query):
try:
for query_result in dns.resolver.query(query, rdtype='TXT'):
return decode(query_result)
except dns.exception.DNSException:
return None
@staticmethod
def __ip_query(ip, ip_version):
reversed_ip = reverse_ip(ip)
version = ""
if ip_version == 6:
version = "6"
query = IP_QUERY % (reversed_ip, version)
return Cymru.__query(query)
@staticmethod
def __asn_query(asn):
query = ASN_QUERY % (asn)
return Cymru.__query(query)
@staticmethod
def __query_parse(text):
items = list()
for item in text.split('|'):
item = item.replace('"','')
item = item.strip()
items.append(item)
return items
@staticmethod
def __ip_query_parse(text):
# Example: "1930 | 193.136.0.0/15 | PT | ripencc |"
# Exception: "9395 17431 | 219.234.80.0/20 | CN | apnic | 2002-04-17"
result = dict()
if not text:
return result
items = Cymru.__query_parse(text)
if items[0].split(' ')[0] != "":
result['asn'] = items[0].split(' ')[0]
# In case of multiple ASNs received, get the first one.
if items[1] != "":
result['bgp_prefix'] = items[1]
if items[2] != "":
result['cc'] = items[2]
if items[3] != "":
result['registry'] = items[3]
if items[4] != "":
result['allocated'] = items[4]
return result
@staticmethod
def __asn_query_parse(text):
# Example: "23028 | US | arin | 2002-01-04 | TEAM-CYMRU - Team Cymru Inc.,US"
# Exception: "1930 | EU | ripencc | | RCCN Rede Ciencia Tecnologia e Sociedade (RCTS),PT"
result = dict()
if not text:
return result
items = Cymru.__query_parse(text)
if items[4] != "":
result['as_name'] = items[4]
return result
|
Python
| 0.000014
|
@@ -22,16 +22,26 @@
resolver
+, StringIO
%0Afrom in
@@ -77,16 +77,8 @@
e_ip
-, decode
%0A%0A''
@@ -402,25 +402,16 @@
result)%0A
- %0A
@@ -745,16 +745,20 @@
='TXT'):
+
%0A
@@ -770,35 +770,225 @@
-return decode(query_result)
+fp = StringIO.StringIO()%0A query_result.to_wire(fp)%0A value = fp.getvalue().decode('utf-8')%0A fp.close()%0A return value%5B1:%5D # ignore first character%0A
%0A
|
e3693a267df703a76210e7ba2011421a6bc533a8
|
Add, update and remove returns bool
|
pelix/remote/registry.py
|
pelix/remote/registry.py
|
#!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
Pelix remote services: Imported end points registry
:author: Thomas Calmant
:copyright: Copyright 2013, isandlaTech
:license: Apache License 2.0
:version: 0.1.1
:status: Beta
..
Copyright 2013 isandlaTech
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Module version
__version_info__ = (0, 1, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
# Remote Services constants
import pelix.remote
# iPOPO decorators
from pelix.ipopo.decorators import ComponentFactory, Requires, Provides, \
Instantiate, Invalidate, Validate, BindField
# Standard library
import logging
import threading
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
@ComponentFactory('pelix-remote-imports-registry-factory')
@Provides(pelix.remote.SERVICE_REGISTRY)
@Requires('_listeners', pelix.remote.SERVICE_IMPORT_ENDPOINT_LISTENER,
aggregate=True, optional=True)
@Instantiate('pelix-remote-imports-registry')
class ImportsRegistry(object):
"""
Registry of discovered end points. End points are identified by their UID
"""
def __init__(self):
"""
Sets up the component
"""
# Listeners (injected)
self._listeners = []
# Framework UID
self._fw_uid = None
# Framework UID -> [Endpoints]
self._frameworks = {}
# End point UID -> Endpoint
self._registry = {}
# Lock
self.__lock = threading.Lock()
# Validation flag
self.__validated = False
@BindField('_listeners')
def _bind_listener(self, field, listener, svc_ref):
"""
New listener bound
"""
with self.__lock:
if self.__validated:
# Late listener
for endpoint in self._registry.values():
try:
listener.endpoint_added(endpoint)
except Exception as ex:
_logger.exception("Error calling listener: %s", ex)
def add(self, endpoint):
"""
Registers an end point and notifies listeners. Does nothing if the
endpoint UID was already known.
:param endpoint: An ImportedEndpoint object
:return: True if the end point has been added
"""
with self.__lock:
# Check framework UID (avoid to import our own services)
if endpoint.framework == self._fw_uid:
return False
# Check if the end point already exists
if endpoint.uid in self._registry:
# Already known end point: do nothing
_logger.debug("Already known endpoint")
return False
# Store the end point
self._registry[endpoint.uid] = endpoint
if endpoint.framework:
self._frameworks.setdefault(endpoint.framework, []) \
.append(endpoint)
# Notify listeners (out of lock)
if self._listeners:
for listener in self._listeners[:]:
try:
listener.endpoint_added(endpoint)
except Exception as ex:
_logger.exception("Error calling listener: %s", ex)
return True
def update(self, uid, new_properties):
"""
Updates an end point and notifies listeners
:param uid: The UID of the end point
:param new_properties: The new properties of the end point
"""
try:
with self.__lock:
# Update the stored end point
stored_endpoint = self._registry[uid]
# Replace the stored properties
old_properties = stored_endpoint.properties.copy()
stored_endpoint.properties = new_properties.copy()
except KeyError:
# Unknown end point: ignore it
return
else:
# Notify listeners
if self._listeners:
for listener in self._listeners[:]:
try:
listener.endpoint_updated(stored_endpoint,
old_properties)
except Exception as ex:
_logger.exception("Error calling listener: %s", ex)
def remove(self, uid):
"""
Unregisters an end point and notifies listeners
:param uid: The UID of the end point to unregister
"""
# Remove the end point from the individual storage
try:
endpoint = self._registry.pop(uid)
except KeyError:
# Unknown end point
_logger.warning("Unknown end point UID: %s", uid)
return
# Remove it from its framework storage, if any
try:
framework_endpoints = self._frameworks[endpoint.framework]
if endpoint in framework_endpoints:
framework_endpoints.remove(endpoint)
if not framework_endpoints:
# Remove framework entry if there is no more endpoint
# from it
del self._frameworks[endpoint.framework]
except (KeyError, ValueError):
# Ignore the absence of reference in the framework storage
pass
# Notify listeners
if self._listeners:
for listener in self._listeners[:]:
try:
listener.endpoint_removed(endpoint)
except Exception as ex:
_logger.exception("Error calling listener: %s", ex)
def lost_framework(self, uid):
"""
Unregisters all the end points associated to the given framework UID
:param uid: The UID of a framework
"""
# Get the end points of this framework
endpoints = self._frameworks.pop(uid, None)
if endpoints:
for endpoint in endpoints:
with self.__lock:
# Remove endpoint from registry
try:
del self._registry[endpoint.uid]
except KeyError:
# The endpoint may have been removed by a listener
pass
# Notify listeners
if self._listeners:
for listener in self._listeners[:]:
try:
listener.endpoint_removed(endpoint)
except Exception as ex:
_logger.exception("Error calling listener: %s", ex)
@Validate
def validate(self, context):
"""
Component validated
"""
# Get the framework UID
self._fw_uid = context.get_property(pelix.framework.FRAMEWORK_UID)
# We are now validated
self.__validated = True
@Invalidate
def invalidate(self, context):
"""
Component invalidated: clean up storage
"""
# Update the validation flag
self.__validated = False
# Clean up
self._fw_uid = None
self._frameworks.clear()
self._registry.clear()
|
Python
| 0.99984
|
@@ -4353,32 +4353,91 @@
f the end point%0A
+ :return: True if the endpoint is known, else False%0A
%22%22%22%0A
@@ -4424,32 +4424,32 @@
lse%0A %22%22%22%0A
-
try:%0A
@@ -4837,32 +4837,38 @@
return
+ False
%0A%0A else:%0A
@@ -5254,32 +5254,57 @@
ener: %25s%22, ex)%0A%0A
+ return True%0A%0A
%0A def remove(
@@ -5443,16 +5443,64 @@
egister%0A
+ :return: True if the endpoint was known%0A
@@ -5760,16 +5760,22 @@
return
+ False
%0A%0A
@@ -6560,32 +6560,32 @@
xception as ex:%0A
-
@@ -6633,32 +6633,53 @@
ener: %25s%22, ex)%0A%0A
+ return True%0A%0A
%0A def lost_fr
|
de21965a8e94e1f73efa8bd420a8d39367fa7f26
|
fix for class based routing
|
sanic_openapi/openapi.py
|
sanic_openapi/openapi.py
|
import re
from itertools import repeat
from sanic.blueprints import Blueprint
from sanic.response import json
from sanic.views import CompositionView
from .doc import route_specs, RouteSpec, serialize_schema, definitions
blueprint = Blueprint('openapi', url_prefix='openapi')
_spec = {}
# Removes all null values from a dictionary
def remove_nulls(dictionary, deep=True):
return {
k: remove_nulls(v, deep) if deep and type(v) is dict else v
for k, v in dictionary.items()
if v is not None
}
@blueprint.listener('before_server_start')
def build_spec(app, loop):
_spec['swagger'] = '2.0'
_spec['info'] = {
"version": getattr(app.config, 'API_VERSION', '1.0.0'),
"title": getattr(app.config, 'API_TITLE', 'API'),
"description": getattr(app.config, 'API_DESCRIPTION', ''),
"termsOfService": getattr(app.config, 'API_TERMS_OF_SERVICE', None),
"contact": {
"email": getattr(app.config, 'API_CONTACT_EMAIL', None)
},
"license": {
"email": getattr(app.config, 'API_LICENSE_NAME', None),
"url": getattr(app.config, 'API_LICENSE_URL', None)
}
}
_spec['schemes'] = getattr(app.config, 'API_SCHEMES', ['http'])
# --------------------------------------------------------------- #
# Blueprint Tags
# --------------------------------------------------------------- #
for blueprint in app.blueprints.values():
if hasattr(blueprint, 'routes'):
for route in blueprint.routes:
route_spec = route_specs[route.handler]
route_spec.blueprint = blueprint
if not route_spec.tags:
route_spec.tags.append(blueprint.name)
paths = {}
for uri, route in app.router.routes_all.items():
if uri.startswith("/swagger") or uri.startswith("/openapi") \
or '<file_uri' in uri:
# TODO: add static flag in sanic routes
continue
# --------------------------------------------------------------- #
# Methods
# --------------------------------------------------------------- #
# Build list of methods and their handler functions
handler_type = type(route.handler)
if handler_type is CompositionView:
view = route.handler
method_handlers = view.handlers.items()
else:
method_handlers = zip(route.methods, repeat(route.handler))
methods = {}
for _method, _handler in method_handlers:
route_spec = route_specs.get(_handler) or RouteSpec()
if _method == 'OPTIONS' or route_spec.exclude:
continue
consumes_content_types = route_spec.consumes_content_type or \
getattr(app.config, 'API_CONSUMES_CONTENT_TYPES', ['application/json'])
produces_content_types = route_spec.produces_content_type or \
getattr(app.config, 'API_PRODUCES_CONTENT_TYPES', ['application/json'])
# Parameters - Path & Query String
route_parameters = []
for parameter in route.parameters:
route_parameters.append({
**serialize_schema(parameter.cast),
'required': True,
'in': 'path',
'name': parameter.name
})
for consumer in route_spec.consumes:
spec = serialize_schema(consumer.field)
if 'properties' in spec:
for name, prop_spec in spec['properties'].items():
route_param = {
**prop_spec,
'required': consumer.required,
'in': consumer.location,
'name': name
}
else:
route_param = {
**spec,
'required': consumer.required,
'in': consumer.location,
'name': consumer.field.name if hasattr(consumer.field, 'name') else 'body'
}
if '$ref' in route_param:
route_param["schema"] = {'$ref': route_param['$ref']}
del route_param['$ref']
route_parameters.append(route_param)
endpoint = remove_nulls({
'operationId': route_spec.operation or route.name,
'summary': route_spec.summary,
'description': route_spec.description,
'consumes': consumes_content_types,
'produces': produces_content_types,
'tags': route_spec.tags or None,
'parameters': route_parameters,
'responses': {
"200": {
"description": None,
"examples": None,
"schema": serialize_schema(route_spec.produces) if route_spec.produces else None
}
},
})
methods[_method.lower()] = endpoint
uri_parsed = uri
for parameter in route.parameters:
uri_parsed = re.sub('<'+parameter.name+'.*?>', '{'+parameter.name+'}', uri_parsed)
paths[uri_parsed] = methods
# --------------------------------------------------------------- #
# Definitions
# --------------------------------------------------------------- #
_spec['definitions'] = {obj.object_name: definition for cls, (obj, definition) in definitions.items()}
# --------------------------------------------------------------- #
# Tags
# --------------------------------------------------------------- #
# TODO: figure out how to get descriptions in these
tags = {}
for route_spec in route_specs.values():
if route_spec.blueprint and route_spec.blueprint.name in ('swagger', 'openapi'):
# TODO: add static flag in sanic routes
continue
for tag in route_spec.tags:
tags[tag] = True
_spec['tags'] = [{"name": name} for name in tags.keys()]
_spec['paths'] = paths
@blueprint.route('/spec.json')
def spec(request):
return json(_spec)
|
Python
| 0
|
@@ -143,16 +143,57 @@
tionView
+%0Afrom sanic.constants import HTTP_METHODS
%0A%0Afrom .
@@ -1583,24 +1583,611 @@
int.routes:%0A
+ if hasattr(route.handler, 'view_class'):%0A # class based view%0A view = route.handler.view_class%0A for http_method in HTTP_METHODS:%0A _handler = getattr(view, http_method.lower(), None)%0A if _handler:%0A route_spec = route_specs%5B_handler%5D%0A route_spec.blueprint = blueprint%0A if not route_spec.tags:%0A route_spec.tags.append(blueprint.name)%0A else:%0A
@@ -2222,32 +2222,36 @@
%5Broute.handler%5D%0A
+
@@ -2291,32 +2291,36 @@
+
if not route_spe
@@ -2319,32 +2319,36 @@
oute_spec.tags:%0A
+
@@ -3203,24 +3203,245 @@
d_handlers:%0A
+ if hasattr(_handler, 'view_class'):%0A view_handler = getattr(_handler.view_class, _method.lower())%0A route_spec = route_specs.get(view_handler) or RouteSpec()%0A else:%0A
|
7b0f431a896ce8cfb39e11c2a6f3e802085f2e09
|
Correct target file suffixes.
|
__init__.py
|
__init__.py
|
import SCons.Builder
import SCons.Scanner
def asciidoc_scanner(node, env, path):
"""Scans AsciiDoc files for include::[] directives"""
import os
import re
fname = str(node)
# TODO: maybe raise an error here?
if not os.path.isfile(fname):
return []
reg = re.compile('include::(.+)\[\]')
res = reg.findall(node.get_contents())
return res
# TODO: finish the emitter; it is mostly (only?) needed for temporary files left
# over when a2x fails (e.g., when the xmllint fails) so that SCons can clean
# them up
def asciidoc_emitter(target, source, env):
pass
def asciidoc_builder(env):
"""Returns an AsciiDoc builder"""
# TODO: experiment with docbook, as I have no experience with it
def gen_suffix(*kargs, **kwargs):
html_like = ('xhtml11', 'html', 'html4', 'html5', 'slidy', 'wordpress')
if env['ASCIIDOCBACKEND'] == 'pdf':
return '.pdf'
elif env['ASCIIDOCBACKEND'] == 'latex':
return '.tex'
elif env['ASCIIDOCBACKEND'].startswith('docbook'):
return '.xml'
elif env['ASCIIDOCBACKEND'] in html_like:
return '.html'
ad_action = '${ASCIIDOC} \
-b ${ASCIIDOCBACKEND} ${ASCIIDOCFLAGS} \
-o ${TARGET} ${SOURCE}'
ad_scanner = SCons.Scanner.Scanner(asciidoc_scanner, recursive=True)
asciidoc = SCons.Builder.Builder(
action = ad_action,
suffix = gen_suffix,
single_source = True,
source_scanner = ad_scanner,
)
return asciidoc
def a2x_builder(env):
"""Returns an a2x builder"""
# needed in case you want to do something with the target
# TODO: figure out chunked, docbook, htmlhelp and manpage
def gen_suffix(*kargs, **kwargs):
if env['A2XFORMAT'] == 'chunked':
return ''
elif env['A2XFORMAT'] == 'docbook':
return '.xml' # TODO: is it really one file?
elif env['A2XFORMAT'] == 'dvi':
return '.dvi'
elif env['A2XFORMAT'] == 'epub':
return '.epub'
elif env['A2XFORMAT'] == 'htmlhelp':
return ''
elif env['A2XFORMAT'] == 'manpage':
return ''
elif env['A2XFORMAT'] == 'pdf':
return '.pdf'
elif env['A2XFORMAT'] == 'ps':
return '.ps'
elif env['A2XFORMAT'] == 'tex':
return '.tex'
elif env['A2XFORMAT'] == 'text':
return '.txt'
elif env['A2XFORMAT'] == 'xhtml':
return '.html'
ad_scanner = SCons.Scanner.Scanner(asciidoc_scanner, recursive=True)
a2x = SCons.Builder.Builder(
action = '${A2X} -f ${A2XFORMAT} ${A2XFLAGS} ${SOURCE}',
suffix = gen_suffix,
single_source = True,
source_scanner = ad_scanner,
# emitter = asciidoc_emitter,
)
return a2x
def generate(env):
env['BUILDERS']['AsciiDoc'] = asciidoc_builder(env)
env['BUILDERS']['A2X'] = a2x_builder(env)
# set defaults; should match the asciidoc/a2x defaults
env['ASCIIDOC'] = 'asciidoc'
env['ASCIIDOCBACKEND'] = 'html'
env['A2X'] = 'a2x'
env['A2XFORMAT'] = 'pdf'
def exists(env):
# expect a2x to be there if asciidoc is
if not env.WhereIs("asciidoc"):
return None
return True
|
Python
| 0
|
@@ -1824,34 +1824,42 @@
return '
+.chunked
'%0A
-
elif env
@@ -2134,32 +2134,36 @@
return '
+.hhp
'%0A elif e
@@ -2212,16 +2212,20 @@
return '
+.man
'%0A
@@ -2455,16 +2455,16 @@
'text':%0A
-
@@ -2477,16 +2477,17 @@
turn '.t
+e
xt'%0A
|
b964efe5a0c7a620e1efb9a7ab43aa772f6eb5ff
|
Change config file path from user home to maya script directory Change config file format from plain text to json
|
__init__.py
|
__init__.py
|
from maya import cmds
import logging
import json
import sys
import imp
import os
# level = logging.DEBUG
level = logging.ERROR
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
logger.addHandler(handler)
logger.setLevel(level)
handler.setLevel(level)
def loadConfig():
""" Load config file
Return:
config(list): List of path module paths
"""
userDir = os.path.expanduser("~")
configPath = os.path.normpath(os.path.join(userDir, ".rushConfig"))
defaultModulePath = os.path.normpath(os.path.join(
cmds.internalVar(userScriptDir=True), 'rush', 'module'))
config = [defaultModulePath]
# Use only default module path if confi file does not exist
if not os.path.exists(configPath):
return config
# Open and load config file in use home dir and append it to the
# config list
try:
f = open(configPath, 'r')
extra_config = f.read().split()
f.close()
except IOError:
logger.debug("Failed to load config file")
config.extend(extra_config)
return config
def getModulePath(path):
""" Create and return a list of module paths
Args:
path (str): directory path to search modules
Return:
mods (list): List of module paths
None: if the path doesn't exist
"""
if not os.path.exists(path):
return None
# Get all files in the directory
allFiles = [os.path.join(root, f) for root, _, files in os.walk(path)
for f in files]
# Get only python files
pythonFiles = [i for i in allFiles if i.endswith(".py")]
# Remove __init__ and main plugin file
mods = [f for f in pythonFiles
if not f.endswith("__init__.py") and not f.endswith("Rush.py")]
return mods
def loadModule(path):
""" Load module
Args:
path (str): Full path to the python module
Return:
mod (module object): command module
None: if path doesn't exist
"""
# Create module names for import, for exapmle ...
#
# "rush/template"
# "animation/animate"
# "common/create"
# "common/display"
normpath = os.path.normpath(path)
if sys.platform == "win32":
name = os.path.splitext(normpath)[0].split("\\")
else:
name = os.path.splitext(normpath)[0].split("/")
name = "/".join(name[-2:])
try:
mod = imp.load_source(name, path)
return mod
except Exception:
logger.debug("Failed to load module : %s" % path)
return None
def getClassList(config):
""" Create and return a list of command classes
Args:
config (list): List of paths
Return:
commandClassList: list of classes
"""
# Create a single list of module paths
moduleList = []
for path in config:
logger.debug("Module path: %s " % path)
pathList = getModulePath(path)
if pathList is not None:
moduleList.extend(pathList)
# Create a list of module objects
moduleObjectList = []
for path in moduleList:
m = loadModule(path)
if m is not None:
moduleObjectList.append(m)
# Class only for the reload command
class Reload(object):
commandDict = {}
@classmethod
def _reloadRush(cls):
try:
cmds.unloadPlugin("Rush.py")
cmds.loadPlugin("Rush.py")
except Exception:
print "Failed to reload plugin"
commandDict['reloadRush'] = "sphere.png"
# Crate a list of classes
commandClassList = [i.Commands for i in moduleObjectList]
commandClassList.append(Reload)
logger.debug("All command classes: %s" % str(commandClassList))
# Create and write a list of all commands for the completer in main plugin
cmdsDict = {}
for c in commandClassList:
module_path = c.__module__
# Create temp dict for each command to store basic information
# about the command
tempDict = {}
for cmd in c.commandDict:
command_data = {}
command_data[cmd] = {}
command_data[cmd]['icon'] = c.commandDict[cmd]
command_data[cmd]['path'] = module_path
tempDict.update(command_data)
cmdsDict.update(tempDict)
outPath = os.path.normpath(
os.path.join(
cmds.internalVar(userScriptDir=True),
"rushCmds.json"))
saveCommands(outPath, cmdsDict)
return commandClassList
def saveCommands(path, cmdsDict):
""" Save all commands as a json file in the maya user directory
Args:
path (str): output path
cmdsDict (dict): All commands
Return:
None
"""
logger.debug("Saving command file to %s" % path)
try:
with open(path, 'w') as outFile:
json.dump(
cmdsDict,
outFile,
indent=4,
separators=(',', ':'),
sort_keys=True)
except IOError:
logger.debug("Failed to save command file")
class Temp(object):
pass
# Re-difine Temp class to inherit all comamnd classes
# Commands is new class definition, not class instance object
Commands = type('RushCommands', tuple(getClassList(loadConfig())), dict(Temp.__dict__))
|
Python
| 0
|
@@ -387,46 +387,8 @@
%22%22%22%0A
- userDir = os.path.expanduser(%22~%22)%0A
@@ -389,24 +389,28 @@
%22%0A config
+File
Path = os.pa
@@ -438,30 +438,66 @@
oin(
-userDir, %22.rushConfig%22
+%0A cmds.internalVar(userScriptDir=True), 'rush.json'
))%0A%0A
@@ -694,16 +694,17 @@
if confi
+g
file do
@@ -752,15 +752,97 @@
nfig
+File
Path):%0A
+ logger.debug(%22Additional config file not found: %25s%22 %25 configFilePath)%0A
@@ -979,16 +979,20 @@
n(config
+File
Path, 'r
@@ -1021,24 +1021,67 @@
g =
-f.read().split()
+json.load(f)%0A additionalPaths = extra_config%5B%22path%22%5D
%0A
@@ -1185,28 +1185,31 @@
.extend(
-extra_config
+additionalPaths
)%0A%0A r
@@ -5226,17 +5226,16 @@
ile%22)%0A%0A%0A
-%0A
class Te
@@ -5460,8 +5460,9 @@
dict__))
+%0A
|
ba0f68221ed0aa3d0fcf99efcb3180ddd9d89e0b
|
add imports to magenta/music/__init__.py for notebook functions (#246)
|
__init__.py
|
__init__.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Imports objects from music modules into the top-level music namespace."""
from constants import * # pylint: disable=wildcard-import
from melodies_lib import BadNoteException
from melodies_lib import extract_melodies
from melodies_lib import MelodyEncoderDecoder
from melodies_lib import MonophonicMelody
from melodies_lib import OneHotMelodyEncoderDecoder
from melodies_lib import PolyphonicMelodyException
from midi_io import midi_file_to_sequence_proto
from midi_io import midi_to_sequence_proto
from midi_io import MIDIConversionError
from midi_io import sequence_proto_to_midi_file
from sequence_generator import BaseSequenceGenerator
from sequence_generator import SequenceGeneratorException
from sequence_generator_bundle import GeneratorBundleParseException
from sequence_generator_bundle import read_bundle_file
from sequences_lib import BadTimeSignatureException
from sequences_lib import MultipleTimeSignatureException
from sequences_lib import NegativeTimeException
from sequences_lib import QuantizedSequence
|
Python
| 0
|
@@ -1179,24 +1179,135 @@
_midi_file%0A%0A
+from midi_synth import fluidsynth%0Afrom midi_synth import synthesize%0A%0Afrom notebook_utils import play_sequence%0A%0A
from sequenc
|
f21e1daa1324ec09e010ac7aa5b64134a97320db
|
Add test that runs `locust --help`
|
locust/test/test_main.py
|
locust/test/test_main.py
|
import os
from locust import main
from locust.argument_parser import parse_options
from locust.main import create_environment
from locust.core import HttpLocust, Locust, TaskSet
from .testcases import LocustTestCase
from .mock_locustfile import mock_locustfile
class TestLoadLocustfile(LocustTestCase):
def test_is_locust(self):
self.assertFalse(main.is_locust(Locust))
self.assertFalse(main.is_locust(HttpLocust))
self.assertFalse(main.is_locust({}))
self.assertFalse(main.is_locust([]))
class MyTaskSet(TaskSet):
pass
class MyHttpLocust(HttpLocust):
tasks = [MyTaskSet]
class MyLocust(Locust):
tasks = [MyTaskSet]
self.assertTrue(main.is_locust(MyHttpLocust))
self.assertTrue(main.is_locust(MyLocust))
class ThriftLocust(Locust):
abstract = True
self.assertFalse(main.is_locust(ThriftLocust))
def test_load_locust_file_from_absolute_path(self):
with mock_locustfile() as mocked:
docstring, locusts = main.load_locustfile(mocked.file_path)
self.assertIn('LocustSubclass', locusts)
self.assertNotIn('NotLocustSubclass', locusts)
def test_load_locust_file_from_relative_path(self):
with mock_locustfile() as mocked:
docstring, locusts = main.load_locustfile(os.path.join('./locust/test/', mocked.filename))
def test_load_locust_file_with_a_dot_in_filename(self):
with mock_locustfile(filename_prefix="mocked.locust.file") as mocked:
docstring, locusts = main.load_locustfile(mocked.file_path)
def test_return_docstring_and_locusts(self):
with mock_locustfile() as mocked:
docstring, locusts = main.load_locustfile(mocked.file_path)
self.assertEqual("This is a mock locust file for unit testing", docstring)
self.assertIn('LocustSubclass', locusts)
self.assertNotIn('NotLocustSubclass', locusts)
def test_create_environment(self):
options = parse_options(args=[
"--host", "https://custom-host",
"--reset-stats",
])
env = create_environment([], options)
self.assertEqual("https://custom-host", env.host)
self.assertTrue(env.reset_stats)
options = parse_options(args=[])
env = create_environment([], options)
self.assertEqual(None, env.host)
self.assertFalse(env.reset_stats)
|
Python
| 0.000001
|
@@ -2,16 +2,64 @@
mport os
+%0Aimport subprocess%0Afrom unittest import TestCase
%0A%0Afrom l
@@ -2589,12 +2589,584 @@
set_stats)%0A%0A
+%0Aclass LocustProcessIntegrationTest(TestCase):%0A def test_help_arg(self):%0A output = subprocess.check_output(%0A %5B%22locust%22, %22--help%22%5D, %0A stderr=subprocess.STDOUT,%0A ).decode(%22utf-8%22).strip()%0A self.assertTrue(output.startswith(%22Usage: locust %5BOPTIONS%5D %5BLocustClass ...%5D%22))%0A self.assertIn(%22Common options:%22, output)%0A self.assertIn(%22-f LOCUSTFILE, --locustfile LOCUSTFILE%22, output)%0A self.assertIn(%22Logging options:%22, output)%0A self.assertIn(%22--skip-log-setup Disable Locust's logging setup.%22, output)%0A%0A
|
0890014c18cd51f85534b096866c4d8b7cba6c3e
|
Create pool of processes and apply async
|
scripts4PAML/macse4cdsOrthofiles_TA.py
|
scripts4PAML/macse4cdsOrthofiles_TA.py
|
#!/usr/bin/python3
"""A program for aligning CDSs of a orthogroup.
USAGE: ./macse4cdsOrthofiles_TA.py
Author: Taruna Aggarwal
Affiliation: University of New Hampshire, Durham, NH, USA
Date: 01/27/2016
Purpose is
"""
import sys
import os
import subprocess
import argparse
from multiprocessing import pool
def runMACSE(input_file, rootdir="./", align_NT_dir="./NT",
align_AA_dir="./AA"):
fpath_full = os.path.join(rootdir, input_file)
runMACSE(args.root + currentFile,
NT_output_file = os.path.join(align_NT_dir,
input_file[:-3]+"_NT_aligned.fa")
AA_output_file = os.path.join(align_AA_dir,
input_file[:-3]+"_AA_aligned.fa")
MACSE_command = "java -jar /fungi/taruna/shared/bin/MACSE/macse_v1.01b.jar "
MACSE_command += "-prog alignSequences "
MACSE_command += "-seq {0} -out_NT {1} -out_AA {2}".format(fpath_full,
NT_output_file, AA_output_file)
# print(MACSE_command)
subprocess.call(MACSE_command, shell=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="This script runs aligns "
"coding sequences in files in a given directory.")
parser.add_argument('--root', default="/fungi/taruna/shared/testing_macse/",
help="PATH to the directory containing CDS orthogroup files.")
parser.add_argument('--align_NT_dir',
default="/fungi/taruna/shared/testing_macse/NT_aligned/",
help="PATH to the directory for NT aligned CDS orthogroup files.")
parser.add_argument('--align_AA_dir',
default="/fungi/taruna/shared/testing_macse/AA_aligned/",
help="PATH to the directory for AA aligned CDS orthogroup files.")
args = parser.parse_args()
Orig_file_dir = args.root
NT_align_file_dir = args.align_NT_dir
AA_align_file_dir = args.align_AA_dir
try:
os.makedirs(NT_align_file_dir)
os.makedirs(AA_align_file_dir)
except FileExistsError as e:
print(e)
# Create a list of files to apply the runMACSE function to
flist = [fname for fname in os.listdir(args.root) if fname.endswith(".fa")]
with Pool(10) as p:
# this is where I get lost. I don't even know if this is correct.
|
Python
| 0
|
@@ -270,12 +270,14 @@
rse%0A
-from
+import
mul
@@ -293,19 +293,13 @@
ing
-import pool
+as mp
%0A%0A%0Ad
@@ -1737,24 +1737,105 @@
up files.%22)%0A
+ parser.add_argument(%22--nproc%22, %22-n%22, default=2, help=%22Number of processors%22)%0A
args = p
@@ -2255,103 +2255,237 @@
)%5D%0A%0A
-%0A
-with Pool(10) as p:%0A # this is where I get lost. I don't even know if this is correct.
+# Create a pool%0A pool = mp.Pool(processes=args.nproc)%0A for f in flist:%0A pool.apply_async(runMACSE, args=(f, args.root, NT_align_file_dir,%0A AA_align_file_dir))%0A pool.close()
%0A
|
ddd80dc52599b1295b39759d7657ae5d17f08faa
|
Correct end events
|
marsem/opencv.py
|
marsem/opencv.py
|
#!/usr/bin/python3.4 -tt
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import marsem.protocol.car as car
import marsem.protocol.config as cfg
class Color():
def __init__(self):
""" Defaults to red color """
self.min = create_color_range([17, 15, 140])
self.max = create_color_range([50, 56, 200])
def set_min_max(self, xa, xb):
self.set_min(xa)
self.set_max(xb)
def set_min(self, xs):
self.min = create_color_range(xs)
def set_max(self, xs):
self.max = create_color_range(xs)
video_capture = cv2.VideoCapture()
kernel = np.ones((5,5), np.uint8)
current_frame = None
def create_color_range(lst):
return np.array(lst, dtype='uint8')
def update_current_frame(f):
global current_frame
current_frame = f
# Connects the video capture to its video source.
def connect(callback=None):
""" Connects to the videostream on the raspberry pi """
if video_capture.open(cfg.stream_file):
print("Success in connecting to remote file")
return True
else:
if callback:
callback()
print("Failed to open remote file, make sure the server is running and not busy")
return False
# This needs to be threaded, to prevent main thread block
def run(color=Color() ,samples=[], callback=None, timeout=60):
# Get the point in time where this def. was called to count from this point.
global current_frame
t_end = time.time() + timeout
while video_capture.isOpened() and t_end =< time.time():
ret, frame = video_capture.read()
mask = cv2.inRange(frame, color.min, color.max)
blue = cv2.bitwise_and(frame, frame, mask=mask)
gray = cv2.cvtColor(blue, cv2.COLOR_BGR2GRAY)
(thresh, im_bw) = cv2.threshold(gray, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)
im_bw = cv2.threshold(gray, thresh, 255, cv2.THRESH_BINARY)[1]
dilation = cv2.dilate(im_bw, kernel, iterations=10)
erosion = cv2.erode(dilation, kernel, iterations=14)
(_, contours, heir) = cv2.findContours(erosion.copy(), cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
if contours:
contour = contours[0]
x, y, w, h = cv2.boundingRect(contour)
samples.append(x)
center = x + int(w / 2)
cv2.rectangle(frame, (center, 0), (center, 480), (0, 255, 0), 2)
else:
samples.append(0)
# At this point, the green line has been added to the frame and the frame can be made available.
update_global_frame(frame)
move_car(samples)
samples = []
cv2.imshow('M.A.R.S.E.M Vision', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
if callback:
stop(callback=callback)
Clock.unschedule(partial_def)
car.stream(False)
else:
stop()
Clock.unschedule(partial_def)
car.stream(False)
# Checking running time of OpenCV:
current_time = timer() # Current execution time to be compared with start_time.
diff = current_time - start_time # Calculate the difference.
if diff > 60.0: # If the difference is more than the set threshold, abort.
stop()
Clock.unschedule(partial_def)
car.stream(False)
def move_car(samples):
if len(samples) == 2:
value = sum(samples) / len(samples)
if value > 45:
car.move_right()
if value < 45:
car.move_forward()
# Returns a 'single' prepared frame from OpenCV
def get_video(callback=None):
if video_capture.isOpened():
return current_frame
else:
if callback:
callback() # If things are not connected
# Stops video capturing with OpenCV and stops the car stream (closes the camera).
def stop(callback=None):
video_capture.release()
# NEW, can we keep this?
car.stream(False)
if callback:
callback()
|
Python
| 0.999176
|
@@ -2818,608 +2818,37 @@
- Clock.unschedule(partial_def)%0A car.stream(False)%0A else:%0A stop()%0A Clock.unschedule(partial_def)%0A car.stream(False)%0A%0A # Checking running time of OpenCV:%0A current_time = timer() # Current execution time to be compared with start_time.%0A diff = current_time - start_time # Calculate the difference.%0A%0A if diff %3E 60.0: # If the difference is more than the set threshold, abort.%0A stop()%0A Clock.unschedule(partial_def)%0A car.stream(False)
+else:%0A stop()%0A
%0A%0Ade
@@ -2938,17 +2938,16 @@
amples)%0A
-%0A
@@ -3423,53 +3423,30 @@
-# NEW, can we keep this?%0A car.stream(False
+cv2.destroyAllWindows(
)%0A
|
3afd38c8ed9596e18bbefdb2ce964d3e029bd061
|
Fix #1 reversion not found bug
|
maslow/models.py
|
maslow/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.contrib.contenttypes.models import ContentType
from django.contrib.flatpages.models import FlatPage
from django.contrib.postgres.fields import JSONField
from mptt.models import MPTTModel, TreeForeignKey, TreeManyToManyField
from mptt.managers import TreeManager
from reversion import revisions as reversion
class NaturalManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name=name)
class ThingManager(models.Manager):
def get_by_natural_key(self, name, parent):
return self.get(name=name, parent__name=parent)
class NaturalModel(models.Model):
name = models.CharField(max_length=120, unique=True)
def natural_key(self):
return self.name,
objects = NaturalManager()
def __str__(self):
return self.name
class Meta:
abstract = True
class DataForm(NaturalModel):
form = models.TextField(blank=True, verbose_name=_('Data form'))
# calculated_values = ArrayField(models.CharField(max_length=100))
# action = models.CharField()
class Meta:
abstract = True
class DataMixin(models.Model):
description = models.TextField(verbose_name=_('Description'), blank=True)
extra_data = JSONField(verbose_name=_('Extra data'), null=True, blank=True)
# data_form = models.ForeignKey(DataForm, null=True, blank=True)
class Meta:
abstract = True
class AuditMixin(models.Model):
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now=True)
# created_on = models.DateTimeField(default=timezone.now)
# updated_on = models.DateTimeField()
# def save(self, *args, **kwargs):
# self.updated_on = timezone.now()
# super().save(*args, **kwargs)
class Meta:
abstract = True
class NaturalDataModel(DataMixin, NaturalModel):
class Meta:
abstract = True
class MPTTMetaBase:
"""Base Meta class for MPTT models
"""
ordering = ('position', 'tree_id', 'lft')
unique_together = ('name', 'parent')
class Thing(MPTTModel):
""" AbstractThing as an abstract base class for all MPTT based
hierarchical models. It also defines a structural link type and
a jsonb datafield and form to extend models.
"""
name = models.CharField(max_length=250)
description = models.TextField(blank=True, default='')
AP = 'AP'
EC = 'EC'
GS = 'GS'
CI = 'CI'
SL = 'SL'
STRUCTURAL_LINK_OPTIONS = (
(AP, _('part')), # _('Aggregation Participation')),
(EC, _('characteristic')), # _('Exhibition Characterization')),
(GS, _('type')), # _('Generalization Specialization')),
(CI, _('instance')), # _('Classification Instantiation')),
(SL, _('state'))) # _('State')),)
link_type = models.CharField(
max_length=2, choices=STRUCTURAL_LINK_OPTIONS,
default=GS, verbose_name=_('Structural Link Type'),
help_text=_('https://en.wikipedia.org/wiki/Object_Process_Methodology#Structural_and_Procedural_Links'))
PHYSICAL = 'physical'
INFORMATIONAL = 'informational'
ESSENCE_OPTIONS = (
(PHYSICAL, _('physical')),
(INFORMATIONAL, _('informational')))
essence = models.CharField(
max_length=15, choices=ESSENCE_OPTIONS,
default=INFORMATIONAL, verbose_name=_('Is the object physical or informatical?'),
help_text=_('https://en.wikipedia.org/wiki/Object_Process_Methodology#OPM_Things'))
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', db_index=True)
data = JSONField(blank=True, null=True)
data_form = models.ForeignKey(FlatPage, null=True, blank=True, related_name='%(class)s_data_form')
# TODO: Make order mean something.
position = models.PositiveIntegerField(blank=True, default=0)
# def get_absolute_url(self):
# return reverse('assessment:thing_detail', kwargs={'pk': str(self.id)})
def __str__(self):
return self.name
def save(self, *args, **kwargs):
with reversion.create_revision():
reversion.set_comment('Import or backend changes')
super().save(*args, **kwargs)
objects = models.Manager()
tree = TreeManager()
class MPTTMeta:
order_insertion_by = ['position']
class Meta:
abstract = True
ONE = 1
HUNDRED = 100
THOUSAND = 1000
# HUNDRED_THOUSAND = 100000
MILLION = 1000000
# BILLION = '1 000 000'
MULTIPLIER_OPTIONS = (
(ONE, _('one')),
(HUNDRED, _('hundred')),
(THOUSAND, _('thousand')),
# (HUNDRED_THOUSAND, _('hundred thousand')),
(MILLION, _('million')),
# (BILLION, _('hundred million')),
)
|
Python
| 0
|
@@ -432,16 +432,17 @@
ersion%0A%0A
+%0A
class Na
@@ -1058,16 +1058,17 @@
form'))%0A
+%0A
# ca
@@ -1392,24 +1392,25 @@
blank=True)%0A
+%0A
# data_f
@@ -1918,24 +1918,24 @@
ct = True%0A%0A%0A
+
class Natura
@@ -1971,17 +1971,16 @@
Model):%0A
-%0A
clas
@@ -4461,17 +4461,16 @@
True%0A%0A%0A
-%0A
ONE = 1%0A
|
b500c1277645350eb66610d796ffca3170e1bdc5
|
Add options for excluding files
|
hashDir.py
|
hashDir.py
|
import argparse
import sys
from Furtive import Furtive
''' '''
def main():
# If Python 2.6 or lower, should import optparse but erroring out now
if sys.hexversion < 0x02070000:
raise SystemError("Python version 2.7.0 or greater is required. You are running " + sys.version)
parser = argparse.ArgumentParser(description='Get Hash Values of Files within a directory.')
parser.add_argument('--dir', action="store", default=".",
help='''Directory containing files that will be
checked. Default: .''')
parser.add_argument('--hashes', action="store", dest="hashes", default=[''],
help="Hash algorithm to use. Currently supports sha1")
parser.add_argument('--manifest', action="store", dest="manifest",
default='.manifest.db',
help='''Location of the manifast file. Manifests may
be located outside the directory indicated by
--dir. Must provide path and filename of
the manifest file. Default: DIR/.manifest.db''')
parser.add_argument('--update-manifest',action="store_true",
default=False, help='''When this flag is present
update manifest with changes.
Default: False''')
parser.add_argument('--verbose',action="store_true",
default=False, help="Be verbose")
parser.add_argument('--version', action='version',
version='%(prog)s 1.0')
args = parser.parse_args()
hashes = Furtive(args.dir, args.verbose)
# Setting manifest file is optional. Defaults to ./manifest.db
hashes.set_manifest(args.manifest)
hashes.compare()
#hashList = hashes.hashFiles(fileSet)
#previousHashes = hashes.getPreviousHashes()
#report = hashes.compareFileLists(hashList,previousHashes)
print "Added: "
for file in hashes.added:
print " " + hashes.get_hash(file) + " " + file
print "Removed: "
for file in hashes.removed:
print " " + hashes.get_previous_hash(file) + " " + file
print "Unchanged: "
for file in hashes.unchanged:
print " " + hashes.get_previous_hash(file) + " " + hashes.get_hash(file) + " " + file
print "Changed: "
for file in hashes.changed:
print " " + hashes.get_previous_hash(file) + " " + hashes.get_hash(file) + " " + file
if args.update_manifest == True:
hashes.update_manifest()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -726,16 +726,196 @@
rts sha1
+ only.%22)%0A parser.add_argument('--exclude', action=%22store%22, dest=%22excludes%22, default=%5B''%5D, %0A help=%22Patterns to exclude files and directories from manifest.
%22)%0A p
|
3db5577f860ac5c931fd267092433c3f547693ab
|
Upgrade selenium to version 2.52.0
|
seleniumbase/core/selenium_launcher.py
|
seleniumbase/core/selenium_launcher.py
|
""" Download and run the selenium server jar file """
import subprocess
import os
import socket
import urllib
import time
SELENIUM_JAR = ("http://selenium-release.storage.googleapis.com"
"/2.50/selenium-server-standalone-2.50.1.jar")
JAR_FILE = "selenium-server-standalone-2.50.1.jar"
def download_selenium():
"""
Downloads the selenium server jar file from its
online location and stores it locally.
"""
try:
local_file = open(JAR_FILE, 'wb')
remote_file = urllib.urlopen(SELENIUM_JAR)
print 'Please wait, downloading Selenium...\n'
local_file.write(remote_file.read())
local_file.close()
remote_file.close()
except Exception, details:
raise Exception("Error while downloading Selenium Server. Details: %s"
% details)
def is_running_locally(host, port):
socket_s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
socket_s.connect((host, port))
socket_s.close()
return True
except:
return False
def is_available_locally():
return os.path.isfile(JAR_FILE)
def start_selenium_server(selenium_jar_location, port, file_path):
"""
Starts selenium on the specified port
and configures the output and error files.
Throws an exeption if the server does not start.
"""
process_args = None
process_args = ["java", "-jar", selenium_jar_location, "-port", port]
selenium_exec = subprocess.Popen(
process_args,
stdout=open("%s/log_seleniumOutput.txt" % (file_path), "w"),
stderr=open("%s/log_seleniumError.txt" % (file_path), "w"))
time.sleep(2)
if selenium_exec.poll() == 1:
raise StartSeleniumException("The selenium server did not start."
"Do you already have one runing?")
return selenium_exec
def stop_selenium_server(selenium_server_process):
"""Kills the selenium server. We are expecting an error 143"""
try:
selenium_server_process.terminate()
return selenium_server_process.poll() == 143
except Exception, details:
raise Exception("Cannot kill selenium process, details: " + details)
class StartSeleniumException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def execute_selenium(host, port, file_path):
if is_running_locally(host, port):
return
if not is_available_locally():
download_selenium()
try:
return start_selenium_server(JAR_FILE, port, file_path)
except StartSeleniumException:
print "Selenium Server might already be running. Continuing... "
|
Python
| 0
|
@@ -203,17 +203,17 @@
%22/2.5
-0
+2
/seleniu
@@ -231,27 +231,27 @@
andalone-2.5
-0.1
+2.0
.jar%22)%0AJAR_F
@@ -291,11 +291,11 @@
-2.5
-0.1
+2.0
.jar
|
1fd2623c1e718a1b6685c82f40d4bcb11dd8541d
|
Add a get_collection method.
|
matgendb/util.py
|
matgendb/util.py
|
"""
Utility functions used across scripts
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Dec 1, 2012"
import bson
import datetime
import json
import os
DEFAULT_PORT = 27017
DEFAULT_SETTINGS = [
("host", "localhost"),
("port", DEFAULT_PORT),
("database", "vasp"),
("admin_user", None),
("admin_password", None),
("readonly_user", None),
("readonly_password", None),
("collection", "tasks"),
("aliases_config", None),
("mapi_key", None)
]
def get_settings(config_file):
if config_file:
with open(config_file) as f:
return json.load(f)
elif os.path.exists("db.json"):
with open("db.json") as f:
return json.load(f)
else:
return dict(DEFAULT_SETTINGS)
class MongoJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, bson.objectid.ObjectId):
return str(o)
if isinstance(o, datetime.datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
|
Python
| 0
|
@@ -283,16 +283,48 @@
port os%0A
+from pymongo import Connection%0A%0A
%0ADEFAULT
@@ -917,16 +917,366 @@
INGS)%0A%0A%0A
+def get_collection(config_file, admin=False):%0A d = get_settings(config_file)%0A conn = Connection(d%5B%22host%22%5D, d%5B%22port%22%5D)%0A db = conn%5Bd%5B%22database%22%5D%5D%0A user = d%5B%22admin_user%22%5D if admin else d%5B%22readonly_user%22%5D%0A passwd = d%5B%22admin_password%22%5D if admin else d%5B%22readonly_password%22%5D%0A db.authenticate(user, passwd)%0A return db%5Bd%5B%22collection%22%5D%5D%0A%0A%0A
class Mo
|
0477f78f830f1d569ea31f9b1745fe373ad67c2b
|
correct double logging
|
pimat_server/__main__.py
|
pimat_server/__main__.py
|
#!/usr/bin/python
import datetime
import logging
import signal
import sys
import time
import Adafruit_DHT
import RPi.GPIO as GPIO
import configparser
import scheduler
from relays import Relays
from sqlalchemy import Column, Integer, String, Float, DateTime
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
GPIO.setmode(GPIO.BCM)
# Bind the DB engine to the metadata of the Base class
Base = declarative_base()
class Schedules(Base):
__tablename__ = 'schedules'
id = Column('id', Integer, primary_key=True)
relay = Column(String(10))
switch = Column(String(50))
start_time = Column(String(5))
stop_time = Column(String(5))
enabled = Column(String(10))
class Sensors(Base):
__tablename__ = 'sensors'
id = Column('id', Integer, primary_key=True)
timestamp = Column(DateTime)
temperature1 = Column(Float)
temperature2 = Column(Float)
humidity = Column(Float)
light1 = Column(Float)
pressure = Column(Float)
altitude = Column(Float)
source = Column(String(100))
def __init__(self, temperature1, humidity, light1):
self.timestamp = datetime.datetime.now()
self.temperature1 = temperature1
self.humidity = humidity
self.light1 = light1
self.source = 'pimat_server'
def get_now():
# get the current date and time as a string
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
def sigterm_handler(_signo, _stack_frame):
# When sysvinit sends the TERM signal, cleanup before exiting.
print("[" + get_now() + "] received signal {}, exiting...".format(_signo))
GPIO.cleanup()
scheduler.remove_all()
sys.exit(0)
def rc_time(pin_to_circuit):
count = 0
# Output on the pin for
GPIO.setup(pin_to_circuit, GPIO.OUT)
GPIO.output(pin_to_circuit, GPIO.LOW)
time.sleep(0.1)
# Change the pin back to input
GPIO.setup(pin_to_circuit, GPIO.IN)
# Count until the pin goes high
while GPIO.input(pin_to_circuit) == GPIO.LOW:
count += 1
return count
def main():
relay_config = configparser.ConfigParser()
relay_config.read('/opt/pimat/relays.ini')
pimat_config = configparser.ConfigParser()
pimat_config.read('/opt/pimat/config.ini')
sensors_log = logging.getLogger()
handler = logging.FileHandler('/var/log/pimat/sensors.log')
formatter = logging.Formatter('[%(levelname)s] [%(asctime)-15s] [PID: %(process)d] [%(name)s] %(message)s')
handler.setFormatter(formatter)
sensors_log.addHandler(handler)
sensors_log.setLevel(logging.DEBUG)
server_log = logging.getLogger()
handler = logging.FileHandler('/var/log/pimat/pimat-server.log')
formatter = logging.Formatter('[%(levelname)s] [%(asctime)-15s] [PID: %(process)d] [%(name)s] %(message)s')
handler.setFormatter(formatter)
server_log.addHandler(handler)
server_log.setLevel(logging.DEBUG)
server_log.info('Starting booting sequence at {0}'.format(get_now()))
engine = create_engine(pimat_config['database']['engine'])
session = sessionmaker(bind=engine)
db = session()
# Clean cron
scheduler.remove_all()
for relay in relay_config['pins']:
for pin in relay_config['pins'][relay]:
relay_object = Relays(relay, pin)
mode = relay_object.set_mode()
server_log.info('Setting mode {0} for {1}'.format(mode, relay))
time.sleep(0.5)
if relay_config['status'][relay] == '1':
status = relay_object.start()
server_log.info('{0} was {1}'.format(relay, status))
elif relay_config['status'][relay] == '0':
status = relay_object.stop()
server_log.info('{0} was {1}'.format(relay, status))
else:
server_log.error('Wrong status on ini file must be 1 or 0')
raise Exception('Wrong status on ini file must be 1 or 0')
schedules = db.query(Schedules).all()
for schedule in schedules:
server_log.info('Adding schedule with ID: {0} for {1}'.format(schedule.id, schedule.relay))
cron_schedule = scheduler.Cron(schedule.id)
cron_schedule.add_schedule(schedule.relay, schedule.start_time, schedule.stop_time)
try:
server_log.info('Pimat server started, collecting sensors data.')
while True:
total = 0
for x in range(0, 9):
total += rc_time(int(pimat_config['pins']['ldr_sensor']))
average = total / 10
try:
light = (1 / float(average)) * 10000
except ZeroDivisionError:
light = 10000
if light > 10000:
light = 10000
humidity, temperature = Adafruit_DHT.read_retry(Adafruit_DHT.AM2302, int(pimat_config['pins']['temp_sensor']))
if humidity is not None and temperature is not None and light is not None:
sensors_log.info('Temp={0:0.1f}* Humidity={1:0.1f}% Light={2:0.2f}'.format(temperature, humidity, light))
reading = Sensors(temperature, humidity, light)
db.add(reading)
db.commit()
else:
server_log.error('Failed to get reading. Try again!')
GPIO.cleanup()
scheduler.remove_all()
db.close()
raise Exception('Failed to get reading')
time.sleep(120)
except KeyboardInterrupt:
print('Program received a Ctrl+C signal')
finally:
GPIO.cleanup()
scheduler.remove_all()
db.close()
signal.signal(signal.SIGTERM, sigterm_handler)
if __name__ == '__main__':
main()
|
Python
| 0.000006
|
@@ -2324,335 +2324,8 @@
')%0A%0A
- sensors_log = logging.getLogger()%0A handler = logging.FileHandler('/var/log/pimat/sensors.log')%0A formatter = logging.Formatter('%5B%25(levelname)s%5D %5B%25(asctime)-15s%5D %5BPID: %25(process)d%5D %5B%25(name)s%5D %25(message)s')%0A handler.setFormatter(formatter)%0A sensors_log.addHandler(handler)%0A sensors_log.setLevel(logging.DEBUG)%0A%0A
@@ -4699,21 +4699,20 @@
se
-nsors
+rver
_log.inf
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.