commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
00bfbae48af80fd12db31aecc663373dce3fa1a8
Format code
megaprojects/core/models.py
megaprojects/core/models.py
import uuid from django.conf import settings from django.db import models class TimeStampedModel(models.Model): """ An abstract base class model that provides self-updating ``created`` and ``modified`` fields. """ created = models.DateTimeField( auto_now_add=True, help_text='The time when this entity was created.') changed = models.DateTimeField( auto_now=True, help_text='The time when this entity was most recently saved.') class Meta: abstract = True class BaseModel(TimeStampedModel): """ Abstract model for main entities. Provides a ``title`` and ``uuid`` field. """ title = models.CharField( max_length=255, help_text='The title of this entity, always treated as non-markup plain text.') uuid = models.CharField('UUID', max_length=255, unique=True, help_text='Unique Key: Universally unique identifier for this entity.') def __unicode__(self): return self.title def save(self, *args, **kwargs): if not self.uuid: self.uuid = uuid.uuid4() super(BaseModel, self).save(*args, **kwargs) class Meta: abstract = True class AuthorModel(BaseModel): """ Builds upon ``BaseModel`` by adding a ``author`` field. """ author = models.ForeignKey( settings.AUTH_USER_MODEL, help_text='The user that owns this entity; Initially, this is the user that created it.') class Meta: abstract = True class ImageModel(BaseModel): """ Abstract base class model for Image fields. """ alt = models.CharField(max_length=255, blank=True, help_text='Alternative image text, for the image\'s \'alt\' attribute.') status = models.BooleanField( default=True, help_text='Boolean indicating whether the entity is published (visible to non-administrators).') reviewed = models.BooleanField( help_text='Object has been reviewed (quality control).') thumbnail = models.BooleanField(help_text='Set as main object thumbnail.') def __unicode__(self): return self.uuid class Meta: abstract = True
Python
0.000002
@@ -2053,26 +2053,63 @@ xt=' -Set as main object +Boolean indicating whether the entity is the main model thu
93b25421bb1cca24e6927304d23501edf1484a22
Add sstable count metric
metartg/checks/cassandra.py
metartg/checks/cassandra.py
#!/usr/bin/env python import simplejson as json from time import time import subprocess import os def tpstats_metrics(): p = subprocess.Popen([ '/usr/bin/java', '-jar', '/usr/share/metartg/contrib/GenericJMXLogJSON.jar', 'localhost', '8080', 'org.apache.cassandra.concurrent:*', ], stdout=subprocess.PIPE) stdout, stderr = p.communicate() now = int(time()) metrics = {} for line in stdout.split('\n'): if not line: continue line = json.loads(line) name = line['name'].split('=', 1)[1] for label in ('ActiveCount', 'PendingTasks', 'CompletedTasks'): if label == 'CompletedTasks': datatype = 'COUNTER' else: datatype = 'GAUGE' metrics['%s_%s' % (name, label)] = { 'ts': now, 'type': datatype, 'value': line[label], } return metrics def sstables_metrics(): metrics = {} for keyspace in os.listdir('/mnt/var/lib/penelope/data'): now = int(time()) sizes = {} for filename in os.listdir('/mnt/var/lib/penelope/data/' + keyspace): if not filename.endswith('-Data.db'): continue columnfamily = filename.split('-', 1)[0] if not columnfamily in sizes: sizes[columnfamily] = [] st = os.stat('/mnt/var/lib/penelope/data/%s/%s' % (keyspace, filename)) sizes[columnfamily].append(st.st_size) for columnfamily in sizes: metrics['%s.%s.min' % (keyspace, columnfamily)] = { 'ts': now, 'type': 'GAUGE', 'value': min(sizes[columnfamily]), } metrics['%s.%s.max' % (keyspace, columnfamily)] = { 'ts': now, 'type': 'GAUGE', 'value': max(sizes[columnfamily]), } metrics['%s.%s.avg' % (keyspace, columnfamily)] = { 'ts': now, 'type': 'GAUGE', 'value': (sum(sizes[columnfamily]) / len(sizes[columnfamily])), } metrics['%s.%s.total' % (keyspace, columnfamily)] = { 'ts': now, 'type': 'GAUGE', 'value': sum(sizes[columnfamily]), } return metrics def run_check(callback): callback('cassandra_tpstats', tpstats_metrics()) callback('cassandra_sstables', sstables_metrics())
Python
0.002345
@@ -2341,32 +2341,223 @@ ),%0A %7D +%0A metrics%5B'%25s.%25s.count' %25 (keyspace, columnfamily)%5D = %7B%0A 'ts': now,%0A 'type': 'GAUGE',%0A 'value': len(sizes%5Bcolumnfamily%5D),%0A %7D %0A%0A return met
cdcae64d095a7cbab99e439bc37ee7009fe5c482
Mark version 0.3.1
mezzanine_polls/__init__.py
mezzanine_polls/__init__.py
__version__ = 0.3
Python
0.000008
@@ -10,9 +10,11 @@ __ = 0.3 +.1 %0A
afaa2dd700a7474b81b981b266ee5aaa977d28d5
Update team_rank_request.py to use response.json()
team_rank_request.py
team_rank_request.py
import requests from requests.auth import HTTPBasicAuth import secret import json x = 0 y = 0 parameters = 'teamstats' response = requests.get( 'https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) rawdata = response.content data = json.loads(rawdata.decode()) teamlist = data["playoffteamstandings"]["conference"][0]["teamentry"] for afc_team_list in teamlist: afc_team_name = data["playoffteamstandings"]["conference"][0]["teamentry"][x]["team"]["Name"] afc_team_city = data["playoffteamstandings"]["conference"][0]["teamentry"][x]["team"]["City"] afc_team_id = data["playoffteamstandings"]["conference"][0]["teamentry"][x]["team"]["ID"] afc_team_abbr = data["playoffteamstandings"]["conference"][0]["teamentry"][x]["team"]["Abbreviation"] afc_rank = data["playoffteamstandings"]["conference"][0]["teamentry"][x]["rank"] print((afc_team_name), (afc_team_city), (afc_team_id), (afc_team_abbr), afc_rank) x = x + 1 for nfc_team_list in teamlist: nfc_team_name = data["playoffteamstandings"]["conference"][1]["teamentry"][y]["team"]["Name"] nfc_team_city = data["playoffteamstandings"]["conference"][1]["teamentry"][y]["team"]["City"] nfc_team_id = data["playoffteamstandings"]["conference"][1]["teamentry"][y]["team"]["ID"] nfc_team_abbr = data["playoffteamstandings"]["conference"][1]["teamentry"][y]["team"]["Abbreviation"] nfc_rank = data["playoffteamstandings"]["conference"][1]["teamentry"][y]["rank"] y = y + 1 print((nfc_team_name), (nfc_team_city), (nfc_team_id), (nfc_team_abbr), nfc_rank) last_update = data["playoffteamstandings"]["lastUpdatedOn"] print(last_update)
Python
0.000001
@@ -312,16 +312,42 @@ f_pw))%0A%0A +data = response.json()%0A%0A# rawdata @@ -365,16 +365,18 @@ content%0A +# data = j @@ -404,16 +404,18 @@ ode())%0A%0A +%0A%0A teamlist
e16fbffeaa31fcffe2a2b511828427473217d3c2
Fix iOS template by adding checking on string icon corresponding to identifier
templates/tpl.ios.py
templates/tpl.ios.py
from string import Template ### Strings licence = """The MIT License (MIT) Copyright (c) 2015 Cobaltians Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ ### Templates # FontName.h template tplfonth = Template("""#import <Foundation/Foundation.h> #import <Cobalt/CobaltFont.h> @interface Font${fontkey} : NSObject <CobaltFont> @end """) # FontName.m template tplfontm = Template(""" #import "Font${fontkey}.h" @implementation Font${fontkey} ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #pragma mark COBALT FONT ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + (UIImage *)imageWithIcon:(NSString *)identifier color:(UIColor *)color andSize:(CGSize)size { CGFloat scale = [UIScreen mainScreen].scale; if ([UIScreen instancesRespondToSelector:@selector(scale)]) { UIGraphicsBeginImageContextWithOptions(size, NO, scale); } else { UIGraphicsBeginImageContext(size); } NSString *icon = [Font${fontkey} stringForIcon:identifier]; NSRange iconRange = NSMakeRange(0, icon.length); UIColor *backgroundColor = [UIColor clearColor]; CGRect textRect = CGRectMake(0, 0, size.width, size.height); UIFont *font = [UIFont fontWithName:@"${fontkey}" size:size.height]; NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init]; paragraphStyle.alignment = NSTextAlignmentCenter; NSMutableAttributedString *text = [[NSMutableAttributedString alloc] initWithString:icon]; [text addAttribute:NSFontAttributeName value:font range:iconRange]; [text addAttribute:NSForegroundColorAttributeName value:color range:iconRange]; [text addAttribute:NSBackgroundColorAttributeName value:backgroundColor range:iconRange]; [text addAttribute:NSParagraphStyleAttributeName value:paragraphStyle range:iconRange]; [text drawInRect:textRect]; UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return image; } ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #pragma mark FONT ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + (NSString *)stringForIcon:(NSString *)identifier { return [[Font${fontkey} glyphDictionary] objectForKey:identifier]; } + (NSDictionary *)glyphDictionary { static NSDictionary *glyphDictionary = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ glyphDictionary = @{${tokenlistkey}}; }); return glyphDictionary; } @end """)
Python
0
@@ -2135,24 +2135,51 @@ dentifier%5D;%0A + if (icon != nil) %7B%0A NSRange @@ -2219,16 +2219,20 @@ ength);%0A + UICo @@ -2272,24 +2272,28 @@ learColor%5D;%0A + CGRect t @@ -2346,24 +2346,28 @@ ight);%0A %0A + UIFont * @@ -2443,16 +2443,20 @@ + size:siz @@ -2467,24 +2467,28 @@ ight%5D;%0A %0A + NSMutabl @@ -2557,24 +2557,28 @@ loc%5D init%5D;%0A + paragrap @@ -2620,24 +2620,28 @@ enter;%0A %0A + NSMutabl @@ -2723,16 +2723,20 @@ :icon%5D;%0A + %5Btex @@ -2783,24 +2783,28 @@ + + value:font%0A @@ -2802,16 +2802,20 @@ ue:font%0A + @@ -2837,32 +2837,36 @@ iconRange%5D;%0A + + %5Btext addAttribu @@ -2912,24 +2912,28 @@ + value:color%0A @@ -2941,32 +2941,36 @@ + + range:iconRange%5D @@ -2963,32 +2963,36 @@ nge:iconRange%5D;%0A + %5Btext addAtt @@ -3042,24 +3042,28 @@ + + value:backgr @@ -3081,32 +3081,36 @@ + range:iconRange%5D @@ -3111,24 +3111,28 @@ Range%5D;%0A + + %5Btext addAtt @@ -3181,24 +3181,28 @@ + value:paragr @@ -3219,32 +3219,36 @@ + + range:iconRange%5D @@ -3245,24 +3245,28 @@ iconRange%5D;%0A + %5Btext dr @@ -3281,29 +3281,29 @@ :textRect%5D;%0A +%0A -%0A UIImage @@ -3352,24 +3352,28 @@ eContext();%0A + UIGraphi @@ -3389,24 +3389,28 @@ eContext();%0A + return i @@ -3415,16 +3415,22 @@ image;%0A + %7D%0A %7D%0A%0A/////
f326dd569e9240c2b883e9c5f436728f321a0c61
Add TransactionMiddleware
tenant/middleware.py
tenant/middleware.py
from django.core.urlresolvers import resolve from django.shortcuts import get_object_or_404 from tenant.models import Tenant from tenant.utils import connect_tenant_provider, disconnect_tenant_provider class TenantMiddleware(object): def process_request(self, request): request.tenant = None name = resolve(request.path).kwargs.get('tenant') or request.GET.get('tenant') if name: tenant = get_object_or_404(Tenant, name=name) request.tenant = tenant connect_tenant_provider(request, tenant.name) def process_response(self, request, response): disconnect_tenant_provider(request) request.tenant = None return response
Python
0
@@ -84,16 +84,50 @@ t_or_404 +%0Afrom django.db import transaction %0A%0Afrom t @@ -761,8 +761,1182 @@ esponse%0A +%0A%0A%0Aclass TransactionMiddleware(object):%0A def get_tenant(self, request):%0A tenant = getattr(request, 'tenant', None)%0A if tenant:%0A return tenant.ident%0A %0A def process_request(self, request):%0A %22%22%22Enters transaction management%22%22%22%0A transaction.enter_transaction_management(using=self.get_tenant(request))%0A transaction.managed(True, using=self.get_tenant(request))%0A%0A def process_exception(self, request, exception):%0A %22%22%22Rolls back the database and leaves transaction management%22%22%22%0A if transaction.is_dirty(using=self.get_tenant(request)):%0A transaction.rollback(using=self.get_tenant(request))%0A transaction.leave_transaction_management(using=self.get_tenant(request))%0A%0A def process_response(self, request, response):%0A %22%22%22Commits and leaves transaction management.%22%22%22%0A if transaction.is_managed(using=self.get_tenant(request)):%0A if transaction.is_dirty(using=self.get_tenant(request)):%0A transaction.commit(using=self.get_tenant(request))%0A transaction.leave_transaction_management(using=self.get_tenant(request))%0A return response%0A
28332ebd6292223d5b5197b98160b0b3831c1ed4
Fix conversion warning
modules/nettokom/backend.py
modules/nettokom/backend.py
# -*- coding: utf-8 -*- # Copyright(C) 2012 Florent Fourcot # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from weboob.capabilities.bill import ICapBill, Subscription, SubscriptionNotFound, Detail from weboob.tools.backend import BaseBackend, BackendConfig from weboob.tools.value import ValueBackendPassword from .browser import Nettokom __all__ = ['NettoKomBackend'] class NettoKomBackend(BaseBackend, ICapBill): NAME = 'nettokom' MAINTAINER = u'Florent Fourcot' EMAIL = 'weboob@flo.fourcot.fr' VERSION = '0.i' LICENSE = 'AGPLv3+' DESCRIPTION = 'Nettokom website' CONFIG = BackendConfig(ValueBackendPassword('login', label='Account ID (phone number)', masked=False, regexp='^(\d{8,13}|)$'), ValueBackendPassword('password', label='Password') ) BROWSER = Nettokom def create_default_browser(self): return self.create_browser(self.config['login'].get(), self.config['password'].get()) def iter_subscription(self): for subscription in self.browser.get_subscription_list(): yield subscription def get_subscription(self, _id): with self.browser: subscription = self.browser.get_subscription(_id) if subscription: return subscription else: raise SubscriptionNotFound() def iter_bills_history(self, subscription): with self.browser: for history in self.browser.get_history(): yield history # The subscription is actually useless, but maybe for the futur... def get_details(self, subscription): with self.browser: for detail in self.browser.get_details(): yield detail def get_balance(self, subscription): if not isinstance(subscription, Subscription): subscription = self.get_subscription(subscription) balance = Detail() balance.id = "%s-balance" % subscription.id balance.price = subscription._balance balance.label = u"Balance %s" % subscription.id balance.currency = 'EUR' return balance
Python
0
@@ -2957,16 +2957,17 @@ rency = +u 'EUR'%0A
fa5fdc92f9f0a2eee1486e65d495979c3190fad8
put back the join message tj took out when he was going to do it another way
modules/python/ircclient.py
modules/python/ircclient.py
# Copyright (c) 2012 Stuart Walsh # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. from ircd import register, event, Target, have_target from ircd.user import * from pythonwrap import Client, Channel import numerics import time @register("NICK", min_args=1, max_args=1, access=0) def handle_nick(client, nick): target = Client.find_by_name(nick) if target and target != client: client.numeric(numerics.ERR_NICKNAMEINUSE, nick) return old_str = str(client) if not Client.nick_changing.fire(client, nick): return Client.del_name(client) client.Name = nick Client.add_name(client) if not client.is_registered(): check_and_register(client) else: client.send(":{oldstr} NICK :{nick}", oldstr=old_str, nick=client.Name) Client.nick_changed.fire(client, old_str) @register("USER", min_args=4, max_args=4, access=0) def handle_user(client, username, unused, unused2, realname): if client.is_registered(): client.numeric(numerics.ERR_ALREADYREGISTERED) return client.Username = username client.Realname = realname check_and_register(client) @register("PING", min_args=1, max_args=1) def handle_ping(client, arg): client.send(":{me} PONG {me} :{arg}", arg=arg) @register("PONG", min_args=1, max_args=2, access=0) def handle_pong(client, *args): pass @register("MODE", min_args=1, max_args=2) def handle_mode(client, name, *arg): if name[:1] == '#': pass else: target = Client.find_by_name(name) if not target: client.numeric(numerics.ERR_NOSUCHNICK, name) return if target != client: client.numeric(numerics.ERR_USERSDONTMATCH) return if len(arg) > 0: set_user_mode(client, arg[0]) else: set_user_mode(client, None) @register("WHOIS", min_args=1, max_args=2) @have_target(epilog=numerics.RPL_ENDOFWHOIS) def handle_whois(client, target, *arg): client.numeric(numerics.RPL_WHOISUSER, target.Name, target.Username, target.Host, target.Realname) client.numeric(numerics.RPL_WHOISIDLE, target.Name, target.Idletime); client.numeric(numerics.RPL_WHOISSERVER, target.Name, str(Client.Me), Client.Me.Info) min_len = 1 + len(str(Client.Me)) + 1 + 3 + 1 + len(target.Name) # ":foo.server.com 000 client :" channels = "" for channel in client.Channels: if min_len + len(channels) + len(channel.Name) >= 510: client.numeric(numerics.RPL_WHOISCHANNELS, target.Name, channels) channels = "" channels += channel.Name + " " client.numeric(numerics.RPL_WHOISCHANNELS, target.Name, channels) client.numeric(numerics.RPL_ENDOFWHOIS, target.Name) @register("WHOWAS", min_args=1, max_args=2) @have_target(numeric=numerics.ERR_WASNOSUCHNICK, epilog=numerics.RPL_ENDOFWHOWAS) def handle_whowas(client, name, *arg): pass @register("QUIT", min_args=0, max_args=1, access=0) def handle_quit(client, *arg): if len(arg) > 0: reason = arg[0] else: reason = "" client.close(reason) @register("PRIVMSG", min_args=2, max_args=2) @have_target() def handle_privmsg(client, target, message): target.send(":{client} PRIVMSG {name} :{message}", client=client, name=target.Name, message=message) client.LastMessage = int(time.time()) @register("NOTICE", min_args=2, max_args=2) @have_target() def handle_notice(client, target, message): target.send(":{client} NOTICE {name} :{message}", client=client, name=target.Name, message=message) @register("MOTD", min_args=0, max_args=1) def handle_motd(client, *args): send_motd(client) @register("JOIN", min_args=1, max_args=2) def handle_join(client, cname, *args): for name in cname.split(','): channel = Channel.find(name) if not channel: if name[0] != '#': client.numeric(numerics.ERR_BADCHANNAME, name) continue channel = Channel() channel.Name = name Channel.add(channel) if not channel.is_member(client) and Channel.joining.fire(channel, client): channel.add_member(client) channel.send_names(client) @register("NAMES", min_args=1, max_args=1) @have_target() def handle_names(client, target): target.send_names(client) @register("PART", min_args=1, max_args=2) @have_target(Target.CHANNEL) def handle_part(client, target, *args): if len(args) > 0: reason = args[0] else: reason = "" if not target.is_member(client): client.numeric(numerics.ERR_NOTONCHANNEL, target.Name) return target.send(":{client} PART {channel} :{reason}", client=client, channel=target.Name, reason=reason) client.send(":{client} PART {channel} :{reason}", channel=target.Name, reason=reason) client.remove_channel(target) target.remove_member(client) @event(Client.closing) def client_closing(client, reason): client.send_channels_common(":{client} QUIT :{reason}", reason=reason) @event(Client.nick_changed) def client_nick_changed(client, old_source): client.send_channels_common(":{old} NICK :{new}", old=old_source, new=client.Name) @event(Channel.joined) def channel_joined(channel, client): channel.send(":{client} JOIN :{channel}", client=client)
Python
0
@@ -4939,24 +4939,88 @@ l, client):%0A + client.send(%22:%7Bclient%7D JOIN :%7Bchannel%7D%22, channel=channel)%0A channe
48d4b35f92d848613297bdb9b5154f32b2c45d3b
rename 'set', add a missing return and fix a couple of other mode bugs
modules/python/ircclient.py
modules/python/ircclient.py
# Copyright (c) 2012 Stuart Walsh # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without # restriction, including without limitation the rights to use, # copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following # conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR # OTHER DEALINGS IN THE SOFTWARE. from ircd import register, event from pythonwrap import Client from sets import Set @register("NICK", min_args=1, max_args=2, access=0) def handle_nick(client, nick): if Client.find_by_name(nick): client.numeric(433, nick) return client.Name = nick Client.add_name(client) check_and_register(client) @register("USER", min_args=4, max_args=4, access=0) def handle_user(client, username, unused, unused2, realname): if client.is_registered(): client.numeric(462) return client.Username = username client.Realname = realname check_and_register(client) @register("PING", min_args=1, max_args=1, access=1) def handle_ping(client, arg): client.send(":{me} PONG {me} :{arg}", arg=arg) @register("MODE", min_args=1, max_args=2, access=1) def handle_mode(client, name, *arg): target = Client.find_by_name(name) if not target: client.numeric(401, name) return if target != client: client.numeric(502) return set_before = Set() if client.Invisible: set_before.add('i') if len(arg) == 0: mode = "+" for c in set_before: mode += c client.send("{client} MODE :{mode}", mode=mode) return set = True invalid = False set_after = Set() for c in arg[0]: if c == '+': set = True elif c == '-': set = False elif c == 'i': client.Invisible = set if set: set_after.add(c) else: if c in set_after: set_after.remove(c) else: invalid = True if invalid: client.numeric(501) mode = "" added = set_after - set_before removed = set_before - set_after if len(added) > 0: mode += '+' for c in added: mode += c if len(removed) > 0: mode += '-' for c in removed: mode += c client.send(":{client} MODE {name} :{mode}", name=client.Name, mode=mode) def check_and_register(client): if client.is_registered(): return if client.Name and client.Username: Client.add(client) @event(Client.disconnected) def client_disconnected(client): if(client.Name): Client.del_name(client)
Python
0
@@ -2290,19 +2290,20 @@ rn%0A %0A -set +plus = True%0A @@ -2383,19 +2383,20 @@ :%0A -set +plus = True%0A @@ -2420,19 +2420,20 @@ :%0A -set +plus = False @@ -2477,19 +2477,20 @@ sible = -set +plus %0A i @@ -2491,19 +2491,20 @@ if -set +plus :%0A @@ -2893,16 +2893,38 @@ e += c%0A%0A + if len(mode) %3E 0:%0A client
7e2d3ca342749940e90cbc5d69df38bcc5a9f644
Fix queries too slow on list and get tests
moirai/database/__init__.py
moirai/database/__init__.py
# -*- coding: utf-8; -*- # # Copyright (c) 2016 Álan Crístoffer # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ Database class. Connects to MongoDB and abstracts all communication with it. """ import time import uuid from pymongo import MongoClient class DatabaseV1(object): """ Database class. Connects to MongoDB and abstracts all communication with it. """ def __init__(self): self.client = MongoClient() self.db = self.client.moirai self.token_lifespan = 24 * 3600 def close(self): self.client.close() def settings_collection(self): return self.db.settings def set_setting(self, key, value): db = self.settings_collection() db.replace_one({"key": key}, {"key": key, "value": value}, upsert=True) def get_setting(self, key): db = self.settings_collection() document = db.find_one({"key": key}) if document: return document['value'] else: return None def verify_token(self, token): now = time.time() span = self.token_lifespan ts = self.get_setting('tokens') vs = [t for t in ts if t['token'] == token and now - t['time'] < span] if len(vs) > 0: ts = [t for t in ts if t['token'] != token] ts.append({'token': token, 'time': now}) self.set_setting('tokens', ts) return True return False def generate_token(self): t = { 'token': uuid.uuid4().hex, 'time': time.time() } ts = self.get_setting('tokens') or [] ts += [t] ts = [t for t in ts if time.time() - t['time'] < self.token_lifespan] self.set_setting('tokens', ts) return t['token'] def save_test_sensor_value(self, test, sensor, value, time, start_time): db = self.db.test_sensor_values data = { 'test': test, 'sensor': sensor, 'value': value, 'time': time, 'start_time': start_time } db.insert_one(data) def list_test_data(self): db = self.db.test_sensor_values cursor = db.aggregate([ { '$group': { '_id': { 'name': '$test', 'date': '$start_time' } } }, { '$replaceRoot': { 'newRoot': "$_id" } } ]) return list(cursor) def get_test_data(self, test, start_time, skip=0): db = self.db.test_sensor_values cursor = db.find({ 'test': test, 'start_time': start_time }, skip=skip, sort=[('time', 1)]) points = [{ 'sensor': o['sensor'], 'time': o['time'], 'value': o['value'] } for o in cursor] return points def remove_test(self, test, start_time): self.db.test_sensor_values.delete_many({ 'test': test, 'start_time': start_time })
Python
0
@@ -1520,16 +1520,48 @@ 4 * 3600 +%0A self.__create_indexes() %0A%0A de @@ -3244,32 +3244,192 @@ %5B%0A %7B%0A + '$match': %7B%0A 'time': %7B%0A '$lt': 1%0A %7D%0A %7D%0A %7D,%0A %7B%0A @@ -3890,15 +3890,70 @@ db. -find(%7B%0A +aggregate(%5B%0A %7B%0A '$match': %7B%0A @@ -3978,32 +3978,40 @@ st,%0A + 'start_time': st @@ -4031,98 +4031,279 @@ -%7D, skip=skip, sort=%5B( + %7D%0A %7D,%0A %7B%0A '$sort': %7B%0A 'time' -, 1)%5D)%0A%0A points = %5B%7B%0A 'sensor': o%5B'sensor'%5D,%0A +: 1%0A %7D%0A %7D,%0A %7B%0A '$skip': skip%0A %7D,%0A %7B%0A '$project': %7B%0A 'sensor': 1,%0A @@ -4310,24 +4310,26 @@ + 'time': o%5B'time' @@ -4324,19 +4324,19 @@ e': -o%5B'time'%5D,%0A +1,%0A @@ -4356,45 +4356,82 @@ e': -o%5B'value'%5D%0A %7D for o in cursor%5D +1,%0A '_id': 0%0A %7D%0A %7D%0A %5D) %0A%0A @@ -4447,14 +4447,20 @@ urn -points +list(cursor) %0A%0A @@ -4625,8 +4625,252 @@ %7D)%0A +%0A def __create_indexes(self):%0A self.db.test_sensor_values.create_index('time', name='time')%0A self.db.test_sensor_values.create_index('st', name='start_time')%0A self.db.test_sensor_values.create_index('test', name='test')%0A
83184a9ada18a82f3dfa4d73539f7a927ce50f01
Fix contact search breadcrumbs which were not correctly referencing parents
molly/apps/contact/views.py
molly/apps/contact/views.py
import simplejson import hashlib import urllib2 from datetime import timedelta from django.http import HttpResponse from django.utils.translation import ugettext as _ from molly.utils.views import BaseView from molly.utils.breadcrumbs import * from molly.apps.contact.providers import TooManyResults from .forms import GenericContactForm class IndexView(BaseView): @BreadcrumbFactory def breadcrumb(self, request, context): return Breadcrumb( self.conf.local_name, None, _('Contact search'), lazy_reverse('index'), ) def initial_context(self, request): return { 'form': self.conf.provider.form(request.GET or None), 'medium_choices': self.conf.provider.medium_choices, } def handle_GET(self, request, context): return self.render(request, context, 'contact/index', expires=timedelta(days=28)) class ResultListView(IndexView): @BreadcrumbFactory def breadcrumb(self, request, context): return Breadcrumb( self.conf.local_name, None, _('Contact search'), lazy_reverse('result_list'), ) def handle_GET(self, request, context): provider = self.conf.provider form = provider.form(request.GET or None) medium = request.GET.get('medium') if not medium in [m[0] for m in provider.medium_choices]: medium = provider.medium_choices[0][0] if form.is_valid(): query = provider.normalize_query(form.cleaned_data, medium) try: people = provider.perform_query(**query) except TooManyResults: return self.handle_error(request, context, _("Your search returned too many results.")) context.update({ 'results': people, 'medium': medium, }) context['form'] = form return self.render(request, context, 'contact/result_list', expires=timedelta(days=7)) def handle_error(self, request, context, message): context.update({ 'message': message, }) return self.render(request, context, 'contact/result_list') class ResultDetailView(BaseView): @BreadcrumbFactory def breadcrumb(self, request, context, id): return Breadcrumb( self.conf.local_name, None, _('Contact search'), lazy_reverse('result_detail', id), ) def handle_GET(self, request, context, id): try: context['result'] = self.conf.provider.fetch_result(id) except BaseContactProvider.NoSuchResult: raise Http404 return self.render(request, context, 'contact/result_detail')
Python
0.000002
@@ -109,16 +109,25 @@ Response +, Http404 %0Afrom dj @@ -288,16 +288,37 @@ s import + BaseContactProvider, TooMany @@ -331,16 +331,34 @@ s%0A%0Afrom +molly.apps.contact .forms i @@ -383,18 +383,16 @@ ctForm%0A%0A -%0A%0A class In @@ -1164,36 +1164,52 @@ me,%0A -None +lazy_parent('index') ,%0A _( @@ -2563,36 +2563,58 @@ me,%0A -None +lazy_parent('result_list') ,%0A _( @@ -2956,17 +2956,15 @@ /result_ -detail +list ')%0A
5168c98ea9b903a06cb52c79da81fe598abcb570
use correct import
mpf/devices/shot_profile.py
mpf/devices/shot_profile.py
"""Shot profiles.""" from mpfmc.core.mode import Mode from mpf.core.system_wide_device import SystemWideDevice from mpf.core.mode_device import ModeDevice class ShotProfile(ModeDevice, SystemWideDevice): """A shot profile.""" config_section = 'shot_profiles' collection = 'shot_profiles' class_label = 'shot_profile' def device_removed_from_mode(self, mode: Mode) -> None: """Remove from mode.""" pass
Python
0.000013
@@ -26,10 +26,8 @@ mpf -mc .cor
411b2a0758126a7a0eaafe1a8831c8c19abd903c
insert ICA reportlet title
mriqc/reports/individual.py
mriqc/reports/individual.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # pylint: disable=no-member # # @Author: oesteban # @Date: 2016-01-05 11:33:39 # @Email: code@oscaresteban.es # @Last modified by: oesteban """ Encapsulates report generation functions """ from __future__ import print_function, division, absolute_import, unicode_literals def individual_html(in_iqms, in_plots=None): from os import path as op import datetime from json import load from mriqc import logging, __version__ as ver from mriqc.utils.misc import BIDS_COMP from mriqc.reports import REPORT_TITLES from mriqc.reports.utils import iqms2html, read_report_snippet from mriqc.data import IndividualTemplate report_log = logging.getLogger('mriqc.report') def _get_details(in_iqms, modality): in_prov = in_iqms.pop('provenance', {}) warn_dict = in_prov.pop('warnings', None) sett_dict = in_prov.pop('settings', None) wf_details = [] if modality == 'bold': bold_exclude_index = in_iqms.get('dumb_trs') if bold_exclude_index is None: report_log.warning('Building bold report: no exclude index was found') elif bold_exclude_index > 0: msg = """\ <span class="problematic">Non-steady state (strong T1 contrast) has been detected in the \ first {} volumes</span>. They were excluded before generating any QC measures and plots.""" wf_details.append(msg.format(bold_exclude_index)) hmc_fsl = sett_dict.pop('hmc_fsl') if hmc_fsl is not None: msg = 'Framewise Displacement was computed using ' if hmc_fsl: msg += 'FSL <code>mcflirt</code>' else: msg += 'AFNI <code>3dvolreg</code>' wf_details.append(msg) fd_thres = sett_dict.pop('fd_thres') if fd_thres is not None: wf_details.append( 'Framewise Displacement threshold was defined at %f mm' % fd_thres) elif modality in ('T1w', 'T2w'): if warn_dict.pop('small_air_mask', False): wf_details.append( '<span class="problematic">Detected hat mask was too small</span>') if warn_dict.pop('large_rot_frame', False): wf_details.append( '<span class="problematic">Detected a zero-filled frame, has the original ' 'image been rotated?</span>') return in_prov, wf_details with open(in_iqms) as jsonfile: iqms_dict = load(jsonfile) # Now, the in_iqms file should be correctly named fname = op.splitext(op.basename(in_iqms))[0] out_file = op.abspath(fname + '.html') # Extract and prune metadata metadata = iqms_dict.pop('bids_meta', None) mod = metadata.pop('modality', None) prov, wf_details = _get_details(iqms_dict, mod) file_id = [metadata.pop(k, None) for k in list(BIDS_COMP.keys())] file_id = [comp for comp in file_id if comp is not None] if in_plots is None: in_plots = [] else: in_plots = [(REPORT_TITLES[mod][i], read_report_snippet(v)) for i, v in enumerate(in_plots)] pred_qa = None # metadata.pop('mriqc_pred', None) config = { 'modality': mod, 'sub_id': '_'.join(file_id), 'timestamp': datetime.datetime.now().strftime("%Y-%m-%d, %H:%M"), 'version': ver, 'imparams': iqms2html(iqms_dict, 'iqms-table'), 'svg_files': in_plots, 'workflow_details': wf_details, 'provenance': iqms2html(prov, 'provenance-table'), 'metadata': iqms2html(metadata, 'metadata-table'), 'pred_qa': pred_qa } if config['metadata'] is None: config['workflow_details'].append( '<span class="warning">File has no metadata</span> ' '<span>(sidecar JSON file missing or empty)</span>') tpl = IndividualTemplate() tpl.generate_conf(config, out_file) report_log.info('Generated individual log (%s)', out_file) return out_file
Python
0.000001
@@ -3248,32 +3248,157 @@ = %5B%5D%0A else:%0A + if any(('melodic_reportlet' in k for k in in_plots)):%0A REPORT_TITLES%5B'bold'%5D.insert(3, 'ICA components')%0A%0A in_plots
f553eb0a451c3e755d41d646964b0c0c8e22f0e2
Update Finland's municipality data URL to 2016
munigeo/importer/finland.py
munigeo/importer/finland.py
""" munigeo importer for Finnish nation-level data """ import re import os import zipfile import requests import io from django import db from django.contrib.gis.gdal import DataSource, SpatialReference, CoordTransform from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon, Point from munigeo.importer.base import Importer, register_importer from munigeo.importer.sync import ModelSyncher from munigeo.models import * from munigeo import ocd try: from concurrent.futures import ThreadPoolExecutor except ImportError: ThreadPoolExecutor = None # Disable threaded mode for now ThreadPoolExecutor = None MUNI_DATA_URL = 'http://kartat.kapsi.fi/files/kuntajako/kuntajako_1000k/etrs89/gml/TietoaKuntajaosta_2015_1000k.zip' @register_importer class FinlandImporter(Importer): name = "finland" def _process_muni(self, syncher, feat): muni_id = str(feat.get('nationalCode')) t = feat.get('text') m = re.match(r'\(2:([\w\s:-]+),([\w\s:-]+)\)', t) name_fi = m.groups()[0] name_sv = m.groups()[1] print(name_fi) munidiv = syncher.get(muni_id) if not munidiv: munidiv = AdministrativeDivision(origin_id=muni_id) munidiv.name_fi = name_fi munidiv.name_sv = name_sv munidiv.ocd_id = ocd.make_id(country='fi', kunta=name_fi) munidiv.type = self.muni_type munidiv.save() syncher.mark(munidiv) try: geom_obj = munidiv.geometry except AdministrativeDivisionGeometry.DoesNotExist: geom_obj = AdministrativeDivisionGeometry(division=munidiv) geom = feat.geom geom.transform(PROJECTION_SRID) # Store only the land boundaries #geom = geom.geos.intersection(self.land_area) geom = geom.geos if geom.geom_type == 'Polygon': geom = MultiPolygon(geom) geom_obj.boundary = geom geom_obj.save() try: muni = Municipality.objects.get(division=munidiv) except Municipality.DoesNotExist: muni = Municipality(division=munidiv) muni.name_fi = name_fi muni.name_sv = name_sv muni.id = munidiv.ocd_id.split('/')[-1].split(':')[-1] muni.save() def _setup_land_area(self): fin_bbox = Polygon.from_bbox(FIN_GRID) fin_bbox.srid = TM35_SRID fin_bbox.transform(4326) print("Loading global land shape") path = self.find_data_file('global/ne_10m_land.shp') ds = DataSource(path) land = ds[0][0] self.land_area = fin_bbox.intersection(land.geom.geos) self.land_area.transform(PROJECTION_SRID) def load_muni_data(self): print("Loading Finnish municipalities") resp = requests.get(MUNI_DATA_URL) with io.BytesIO(resp.content) as f: zf = zipfile.ZipFile(f) for name in zf.namelist(): if name.endswith('.xml'): break else: raise Exception('XML file not found in %s' % MUNI_DATA_URL) out_path = os.path.join(self, self.data_paths[0], 'fi') try: os.makedirs(out_path) except OSError: pass zf.extract(name, out_path) return os.path.join(out_path, name) def find_muni_data(self): for root_path in self.data_paths: base_path = os.path.join(root_path, 'fi') paths = os.listdir(base_path) for p in paths: if 'Kuntajaosta' in p: break else: return self.load_muni_data() xml_dir = p base_path = os.path.join(base_path, xml_dir) paths = os.listdir(base_path) for p in paths: if p.endswith('.xml'): break else: return self.load_muni_data() return os.path.join(base_path, p) def import_municipalities(self): #self._setup_land_area() print("Loading municipality boundaries") path = self.find_muni_data() ds = DataSource(path) lyr = ds[0] assert lyr.name == "AdministrativeUnit" defaults = {'name': 'Municipality'} muni_type, _ = AdministrativeDivisionType.objects.get_or_create(type='muni', defaults=defaults) self.muni_type = muni_type syncher = ModelSyncher(AdministrativeDivision.objects.filter(type=muni_type), lambda obj: obj.origin_id) # If running under Python 3, parallelize the heavy lifting. if ThreadPoolExecutor: executor = ThreadPoolExecutor(max_workers=8) futures = [] else: executor = None with db.transaction.atomic(): with AdministrativeDivision.objects.disable_mptt_updates(): for idx, feat in enumerate(lyr): if feat.get('nationalLevel') != '4thOrder': continue # Process the first in a single-threaded way to catch # possible exceptions early. if executor and idx > 0: futures.append(executor.submit(self._process_muni, syncher, feat)) else: self._process_muni(syncher, feat) if executor: for f in futures: res = f.result() executor.shutdown() AdministrativeDivision.objects.rebuild()
Python
0
@@ -729,17 +729,17 @@ osta_201 -5 +6 _1000k.z @@ -3456,32 +3456,117 @@ oot_path, 'fi')%0A + if not os.path.exists(base_path):%0A os.makedirs(base_path)%0A path
119677750f88be27d1f7df8652e5457e5a424008
Fix syntax error.
myfedora/widgets/widgets.py
myfedora/widgets/widgets.py
# Copyright (C) 2008 Red Hat, Inc. All rights reserved. # # This copyrighted material is made available to anyone wishing to use, modify, # copy, or redistribute it subject to the terms and conditions of the GNU # General Public License v.2. This program is distributed in the hope that it # will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. You should have # received a copy of the GNU General Public License along with this program; if # not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth # Floor, Boston, MA 02110-1301, USA. Any Red Hat trademarks that are # incorporated in the source code or documentation are not subject to the GNU # General Public License and may only be used or replicated with the express # permission of Red Hat, Inc. # # Author(s): Luke Macken <lmacken@redhat.com> from tg import url from tw.api import Widget, JSLink, js_function, js_callback from tw.jquery import jquery_js, jQuery from myfedora.streams import RSSDataStream from myfedora.lib.app_factory import AppFactory orbited_js = JSLink(link='http://localhost:8000/_/orbited.js') rsswidget_js = JSLink(link='/javascript/rsswidget.js') class RSSWidget(Widget): """ A generic RSS widget. This Widget is able to stream data using comet to the user. This means, that once the widget is rendered, it will automatically open a persistent connect back to the server where data will be pushed asynchronously. This widget uses jQuery to populate and animate new item creation. """ params = ['name', 'entries', 'id'] template = 'genshi:myfedora.templates.rsswidget' javascript=[orbited_js, jquery_js, rsswidget_js] include_dynamic_js_calls = True data = None # A DataStream, or... ? event_cb = None # a js_callback that handles new items def update_params(self, d): """ TODO: get the data feed name and user, and pass it to widget_connect ? allow for widgets to supply their own event_cb """ super(RSSWidget, self).update_params(d) user = 'bobvila' event_cb = js_callback("""function(data) { $.each(data, function(i, entry){ $("<div/>").hide().append( $("<img/>").attr("src", entry["image"]) .attr("height", "32").attr("width", "32") ).append( $("<a/>").attr("href", entry["link"]).text(entry["title"]) ).prependTo("#%s_data").slideDown(); }); }""" % self.id) self.add_call(js_function('widget_connect')(user,self.data.id,event_cb)) def __str__(self): return "<%s %s>" % (self.__class__.__name__, self.id) class FedoraPeopleData(RSSDataStream): url = 'http://planet.fedoraproject.org/rss20.xml' id = 'fedorapeople' class FedoraPeopleApp(AppFactory) entry_name = 'fedorapeople' def __init__(self, app_config_id, width=None, height=None): super(FedoraPeopleData, self).__init__(app_config_id, width, height) self.rss_stream = RSSDataStream() self.rss_stream.url = 'http://planet.fedoraproject.org/rss20.xml' self.rss_stream.id = 'fedorapeople' def get_data(self, force_refresh=False): data = super(FedoraPeopleData, self).get_data(force_refresh) data['rss-stream'] = FedoraPeopleData() return data class FedoraPeopleWidget(RSSWidget): name = 'Fedora People' authors = ['Luke Macken <lmacken@redhat.com>'] description = 'A streaming Fedora People widget' data = FedoraPeopleData
Python
0.000004
@@ -2989,16 +2989,17 @@ Factory) +: %0A ent
e7f6f0be68a1de0312996b610255c7a624674a41
Configure mailgun
mysite/config/production.py
mysite/config/production.py
# -*- coding: utf-8 -*- ''' Production Configurations - Use djangosecure - Use Amazon's S3 for storing static files and uploaded media - Use sendgrid to send emails - Use MEMCACHIER on Heroku ''' from configurations import values # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings try: from S3 import CallingFormat AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN except ImportError: # TODO: Fix this where even if in Dev this class is called. pass from .common import Common class Production(Common): # This ensures that Django will be able to detect a secure connection # properly on Heroku. SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # INSTALLED_APPS INSTALLED_APPS = Common.INSTALLED_APPS # END INSTALLED_APPS # SECRET KEY SECRET_KEY = values.SecretValue() # END SECRET KEY # django-secure INSTALLED_APPS += ("djangosecure", ) # set this to 60 seconds and then to 518400 when you can prove it works SECURE_HSTS_SECONDS = 60 SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True) SECURE_FRAME_DENY = values.BooleanValue(True) SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True) SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True) SESSION_COOKIE_SECURE = values.BooleanValue(False) SESSION_COOKIE_HTTPONLY = values.BooleanValue(True) # SECURE_SSL_REDIRECT = values.BooleanValue(True) # end django-secure # SITE CONFIGURATION # Hosts/domain names that are valid for this site # See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts ALLOWED_HOSTS = ["*"] # END SITE CONFIGURATION INSTALLED_APPS += ("gunicorn", ) # STORAGE CONFIGURATION # See: http://django-storages.readthedocs.org/en/latest/index.html # INSTALLED_APPS += ( # 'storages', # ) # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings # STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' # See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings # AWS_ACCESS_KEY_ID = values.SecretValue() # AWS_SECRET_ACCESS_KEY = values.SecretValue() # AWS_STORAGE_BUCKET_NAME = values.SecretValue() AWS_AUTO_CREATE_BUCKET = True AWS_QUERYSTRING_AUTH = False # see: https://github.com/antonagestam/collectfast AWS_PRELOAD_METADATA = True INSTALLED_APPS += ('collectfast', ) # AWS cache settings, don't change unless you know what you're doing: AWS_EXPIRY = 60 * 60 * 24 * 7 AWS_HEADERS = { 'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % ( AWS_EXPIRY, AWS_EXPIRY) } # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = '/static/' # END STORAGE CONFIGURATION # EMAIL DEFAULT_FROM_EMAIL = values.Value('mysite <noreply@example.com>') EMAIL_HOST = values.Value('smtp.sendgrid.com') # EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD") # EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME") EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT") EMAIL_SUBJECT_PREFIX = values.Value('[mysite] ', environ_name="EMAIL_SUBJECT_PREFIX") EMAIL_USE_TLS = True # SERVER_EMAIL = EMAIL_HOST_USER # END EMAIL # TEMPLATE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) # END TEMPLATE CONFIGURATION # CACHING # Only do this here because thanks to django-pylibmc-sasl and pylibmc # memcacheify is painful to install on windows. try: # See: https://github.com/rdegges/django-heroku-memcacheify from memcacheify import memcacheify CACHES = memcacheify() except ImportError: CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211") # END CACHING # Your production stuff: Below this line define 3rd party libary settings
Python
0.000001
@@ -2937,31 +2937,36 @@ ue(' -mysite %3Cnoreply@example +hello %3Chello@digital-impacts .com @@ -3005,24 +3005,23 @@ e('smtp. -sendgrid +mailgun .com')%0A @@ -3018,26 +3018,24 @@ un.com')%0A - # EMAIL_HOST_ @@ -3097,24 +3097,23 @@ n_name=%22 -SENDGRID +MAILGUN _PASSWOR @@ -3115,26 +3115,24 @@ SSWORD%22)%0A - # EMAIL_HOST_ @@ -3194,16 +3194,15 @@ me=%22 -SENDGRID +MAILGUN _USE @@ -3250,11 +3250,11 @@ lue( -587 +465 , en @@ -3405,11 +3405,11 @@ USE_ -TLS +SSL = T @@ -3415,18 +3415,16 @@ True%0A - # SERVER_
2626be18570958cca7665168f34166f1845ec6da
add test
test/service_test.py
test/service_test.py
# vim: set expandtab sw=4 ts=4: # # Unit tests for Service # # Copyright (C) 2014-2015 Dieter Adriaenssens <ruleant@users.sourceforge.net> # # This file is part of buildtimetrend/service # <https://github.com/buildtimetrend/service/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from service import is_repo_allowed from buildtimetrend.settings import Settings import unittest class TestService(unittest.TestCase): @classmethod def setUpClass(self): self.settings = Settings() def setUp(self): # reinit settings singleton if self.settings is not None: self.settings.__init__() def test_is_repo_allowed(self): # error is thrown when called without parameters self.assertRaises(TypeError, is_repo_allowed) # error is thrown when called with an invalid parameter self.assertFalse(is_repo_allowed(None)) # repo is allowed by default self.assertTrue(is_repo_allowed("name/repo")) def test_is_repo_allowed_set_denied(self): # test denied repo self.settings.add_setting("denied_repo", {"test1"}) self.assertTrue(is_repo_allowed("name/repo")) self.assertFalse(is_repo_allowed("name/test1")) self.assertTrue(is_repo_allowed("name/test2")) def test_is_repo_allowed_set_denied_multi(self): # test multiple denied repos self.settings.add_setting("denied_repo", {"test1", "test2"}) self.assertTrue(is_repo_allowed("name/repo")) self.assertFalse(is_repo_allowed("name/test1")) self.assertFalse(is_repo_allowed("name/test2")) def test_is_repo_allowed_set_allowed(self): # test allowed repo self.settings.add_setting("allowed_repo", {"test1"}) self.assertFalse(is_repo_allowed("name/repo")) self.assertTrue(is_repo_allowed("name/test1")) self.assertFalse(is_repo_allowed("name/test2")) def test_is_repo_allowed_set_allowed_multi(self): # test multiple allowed repos self.settings.add_setting("allowed_repo", {"test1", "test2"}) self.assertFalse(is_repo_allowed("name/repo")) self.assertTrue(is_repo_allowed("name/test1")) self.assertTrue(is_repo_allowed("name/test2"))
Python
0.000002
@@ -2817,28 +2817,479 @@ repo_allowed(%22name/test2%22))%0A +%0A def test_is_repo_allowed_set_denied_allowed(self):%0A # set denied repo%0A self.settings.add_setting(%22denied_repo%22, %7B%22test1%22%7D)%0A # set allowed repo%0A self.settings.add_setting(%22allowed_repo%22, %7B%22name%22%7D)%0A%0A self.assertTrue(is_repo_allowed(%22name/repo%22))%0A self.assertFalse(is_repo_allowed(%22name/test1%22))%0A self.assertTrue(is_repo_allowed(%22name/test2%22))%0A self.assertFalse(is_repo_allowed(%22owner/repo%22))%0A
e35fd7126b5e82d9628185bbe6503e288600ff53
Fix flow formatting unit tests.
test/test_console.py
test/test_console.py
from libmproxy import console, proxy, filt, flow import tutils import libpry class uState(libpry.AutoTree): def test_flow(self): """ normal flow: connect -> request -> response """ c = console.ConsoleState() f = self._add_request(c) assert f.request in c.flow_map assert c.get_focus() == (f, 0) def test_focus(self): """ normal flow: connect -> request -> response """ c = console.ConsoleState() f = self._add_request(c) assert c.get_focus() == (f, 0) assert c.get_from_pos(0) == (f, 0) assert c.get_from_pos(1) == (None, None) assert c.get_next(0) == (None, None) f2 = self._add_request(c) assert c.get_focus() == (f, 1) assert c.get_next(0) == (f, 1) assert c.get_prev(1) == (f2, 0) assert c.get_next(1) == (None, None) c.set_focus(0) assert c.get_focus() == (f2, 0) c.set_focus(-1) assert c.get_focus() == (f2, 0) c.delete_flow(f2) assert c.get_focus() == (f, 0) c.delete_flow(f) assert c.get_focus() == (None, None) def _add_request(self, state): r = tutils.treq() return state.add_request(r) def _add_response(self, state): f = self._add_request(state) r = tutils.tresp(f.request) state.add_response(r) def test_add_response(self): c = console.ConsoleState() f = self._add_request(c) r = tutils.tresp(f.request) c.focus = None c.add_response(r) def test_focus_view(self): c = console.ConsoleState() self._add_request(c) self._add_response(c) self._add_request(c) self._add_response(c) self._add_request(c) self._add_response(c) assert not c.set_limit("~q") assert len(c.view) == 3 assert c.focus == 2 class uformat_keyvals(libpry.AutoTree): def test_simple(self): assert console.format_keyvals( [ ("aa", "bb"), None, ("cc", "dd"), (None, "dd"), (None, "dd"), ] ) class uformat_flow(libpry.AutoTree): def test_simple(self): f = tutils.tflow() foc = ('focus', '>>') assert foc not in console.format_flow(f, False) assert foc in console.format_flow(f, True) assert foc not in console.format_flow(f, False, True) assert foc in console.format_flow(f, True, True) f.response = tutils.tresp() f.request = f.response.request f.backup() assert ('method', '[edited] ') in console.format_flow(f, True) assert ('method', '[edited] ') in console.format_flow(f, True, True) f.request.set_replay() assert ('method', '[replay] ') in console.format_flow(f, True) assert ('method', '[replay] ') in console.format_flow(f, True, True) f.response.code = 404 assert ('error', '404') in console.format_flow(f, True, True) f.response.headers["content-type"] = ["text/html"] assert ('text', ' text/html') in console.format_flow(f, True, True) f.response =None f.error = proxy.Error(f.request, "error") assert ('error', 'error') in console.format_flow(f, True, True) class uPathCompleter(libpry.AutoTree): def test_lookup_construction(self): c = console._PathCompleter() assert c.complete("/tm") == "/tmp/" c.reset() assert c.complete("./completion/a") == "./completion/aaa" c.reset() assert c.complete("./completion/aaa") == "./completion/aaa" assert c.complete("./completion/aaa") == "./completion/aab" def test_completion(self): c = console._PathCompleter(True) c.reset() c.lookup = [ ("a", "x/a"), ("aa", "x/aa"), ] assert c.complete("a") == "a" assert c.final == "x/a" assert c.complete("a") == "aa" assert c.complete("a") == "a" c = console._PathCompleter(True) r = c.complete("l") assert c.final.endswith(r) c.reset() assert c.complete("/nonexistent") == "/nonexistent" assert c.final == "/nonexistent" c.reset() assert c.complete("~") != "~" c.reset() s = "thisisatotallynonexistantpathforsure" assert c.complete(s) == s assert c.final == s class uOptions(libpry.AutoTree): def test_all(self): assert console.Options(kill=True) tests = [ uformat_keyvals(), uformat_flow(), uState(), uPathCompleter(), uOptions() ]
Python
0
@@ -2706,156 +2706,8 @@ ()%0A%0A - assert ('method', '%5Bedited%5D ') in console.format_flow(f, True)%0A assert ('method', '%5Bedited%5D ') in console.format_flow(f, True, True)%0A @@ -2760,33 +2760,32 @@ thod', '%5Breplay%5D - ') in console.fo @@ -2838,17 +2838,16 @@ %5Breplay%5D - ') in co
2af6533a366e008253f1ee07706ab07f6071f112
Remove duplicated import
vcs_info_panel/tests/test_clients/test_git.py
vcs_info_panel/tests/test_clients/test_git.py
import subprocess import datetime import pytz from unittest.mock import patch import datetime from django.test import TestCase from vcs_info_panel.clients.git import GitClient def without_git_repository(*commands): def decorator(func): def inner(*args, **kwargs): with patch('subprocess.check_output') as _check_output: _check_output.side_effect = subprocess.CalledProcessError(128, commands, 'fatal: Not a git repository (or any of the parent directories): .git') return func(*args, **kwargs) return inner return decorator class GitClientTestCase(TestCase): def setUp(self): self.client = GitClient() def _test_called_check_output(self, commands): with patch('subprocess.check_output') as _check_output: _check_output.assert_called_with(commands) def test_base_command(self): self.assertEqual(self.client.base_command, 'git') def test_is_repository_with_repository(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'true' self.assertEqual(self.client.is_repository(), True) _check_output.assert_called_once_with(['git', 'rev-parse', '--is-inside-work-tree']) @without_git_repository('git', 'rev-parse', '--is-inside-work-tree') def test_is_repository_without_repository(self): self.assertEqual(self.client.is_repository(), False) def test_get_short_hash(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'3218766' self.assertEqual(self.client.get_short_hash(), '3218766') _check_output.assert_called_once_with(['git', 'rev-parse', 'HEAD']) @without_git_repository('git', 'rev-parse', 'HEAD') def test_get_short_hash_without_repository(self): self.assertEqual(self.client.get_short_hash(), None) def test_get_short_hash(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'3218766' self.assertEqual(self.client.get_short_hash(), '3218766') _check_output.assert_called_once_with(['git', 'rev-parse', '--short', 'HEAD']) @without_git_repository('git', 'rev-parse', '--short', 'HEAD') def test_get_short_hash_without_repository(self): self.assertEqual(self.client.get_short_hash(), None) def test_get_current_branch_name(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'master' self.assertEqual(self.client.get_current_branch_name(), 'master') _check_output.assert_called_once_with(['git', 'rev-parse', '--abbrev-ref', 'HEAD']) @without_git_repository('git', 'rev-parse', '--abbrev-ref', 'HEAD') def test_get_hash_without_repository(self): self.assertEqual(self.client.get_current_branch_name(), None) def test_get_author_name(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'giginet' self.assertEqual(self.client.get_author_name(), 'giginet') _check_output.assert_called_once_with(['git', 'show', '--format=%an', 'HEAD']) @without_git_repository('git', 'show', '--format=%an', 'HEAD') def test_get_author_email_without_repository(self): self.assertEqual(self.client.get_author_email(), None) def test_get_author_email(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'giginet@kawaz.org' self.assertEqual(self.client.get_author_email(), 'giginet@kawaz.org') _check_output.assert_called_once_with(['git', 'show', '--format=%ae', 'HEAD']) @without_git_repository('git', 'show', '--format=%ae', 'HEAD') def test_get_author_email_without_repository(self): self.assertEqual(self.client.get_author_email(), None) def test_get_committer_name(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'giginet' self.assertEqual(self.client.get_committer_name(), 'giginet') _check_output.assert_called_once_with(['git', 'show', '--format=%cn', 'HEAD']) @without_git_repository('git', 'show', '--format=%cn', 'HEAD') def test_get_committer_email_without_repository(self): self.assertEqual(self.client.get_committer_email(), None) def test_get_committer_email(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'giginet@kawaz.org' self.assertEqual(self.client.get_committer_email(), 'giginet@kawaz.org') _check_output.assert_called_once_with(['git', 'show', '--format=%ce', 'HEAD']) @without_git_repository('git', 'show', '--format=%ce', 'HEAD') def test_get_committer_email_without_repository(self): self.assertEqual(self.client.get_committer_email(), None) def test_get_date(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'2015-12-04 20:29:10 +0900' jst = pytz.timezone('Asia/Tokyo') self.assertEqual(self.client.get_date(), datetime.datetime(2015, 12, 4, 20, 29, 10, tzinfo=jst)) _check_output.assert_called_once_with(['git', 'show', '--format=%ci', 'HEAD']) @without_git_repository('git', 'show', '--format=%ci', 'HEAD') def test_get_date_without_repository(self): self.assertEqual(self.client.get_date(), None) def test_get_message(self): with patch('subprocess.check_output') as _check_output: _check_output.return_value = b'Fix issue' self.assertEqual(self.client.get_message(), 'Fix issue') _check_output.assert_called_once_with(['git', 'show', '--format=%b', 'HEAD']) @without_git_repository('git', 'show', '--format=%b', 'HEAD') def test_get_message_without_repository(self): self.assertEqual(self.client.get_message(), None)
Python
0.000002
@@ -11,32 +11,16 @@ process%0A -import datetime%0A import p
b39ac9a59c4e6a36baa67a7ec57e687ee673aa68
Check for required UFO info fields
Lib/fontbakery/specifications/ufo_sources.py
Lib/fontbakery/specifications/ufo_sources.py
# -*- coding: utf-8 -*- # # This file has been automatically formatted with `yapf --style ' # {based_on_style: google}'` and `docformatter`. from __future__ import (absolute_import, division, print_function, unicode_literals) from fontbakery.callable import check, condition from fontbakery.checkrunner import ERROR, FAIL, PASS, Section, Spec from fontbakery.constants import CRITICAL default_section = Section('Default') class UFOSpec(Spec): def setup_argparse(self, argument_parser): """Set up custom arguments needed for this spec.""" import glob import logging import argparse def get_fonts(pattern): fonts_to_check = [] # use glob.glob to accept *.ufo for fullpath in glob.glob(pattern): if fullpath.endswith(".ufo"): fonts_to_check.append(fullpath) else: logging.warning( ("Skipping '{}' as it does not seem " "to be valid UFO source directory.").format(fullpath)) return fonts_to_check class MergeAction(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): target = [item for l in values for item in l] setattr(namespace, self.dest, target) argument_parser.add_argument( 'fonts', nargs='+', type=get_fonts, action=MergeAction, help='font file path(s) to check.' ' Wildcards like *.ufo are allowed.') return ('fonts',) # ---------------------------------------------------------------------------- # This variable serves as an exportable anchor point, see e.g. the # Lib/fontbakery/commands/check_ufo_sources.py script. specification = UFOSpec( default_section=default_section, iterargs={'font': 'fonts'}, derived_iterables={'ufo_sources': ('ufo_source', True)}) register_check = specification.register_check register_condition = specification.register_condition # ---------------------------------------------------------------------------- @register_condition @condition def ufo_source(font): import defcon return defcon.Font(font) @register_check @check(id='com.daltonmaag/check/ufolint', priority=CRITICAL) def com_daltonmaag_check_ufolint(font): """Run ufolint on UFO source directory.""" import subprocess ufolint_cmd = ["ufolint", font] try: subprocess.check_output(ufolint_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: yield FAIL, ("ufolint failed the UFO source. Output follows :" "\n\n{}\n").format(e.output) except OSError: yield ERROR, "ufolint is not available!" yield PASS, "ufolint passed the UFO source." for section_name, section in specification._sections.items(): print("There is a total of {} checks on {}.".format( len(section._checks), section_name))
Python
0
@@ -340,16 +340,22 @@ R, FAIL, + WARN, PASS, S @@ -2878,16 +2878,1259 @@ rce.%22%0A%0A%0A +@register_check%0A@check(id='com.daltonmaag/check/required-fields')%0Adef com_daltonmaag_check_required_fields(font):%0A %22%22%22Check that required fields are present in the UFO fontinfo.%0A%0A ufo2ft requires these info fields to compile a font binary: unitsPerEm,%0A ascender, descender, xHeight, capHeight and familyName.%0A%0A It will warn unless these fields are present: postscriptUnderlineThickness,%0A postscriptUnderlinePosition%0A %22%22%22%0A import defcon%0A%0A req_missing = %5B%5D%0A rec_missing = %5B%5D%0A ufo = defcon.Font(font)%0A%0A for required_field in %5B%0A %22_unitsPerEm%22, %22_ascender%22, %22_descender%22, %22_xHeight%22, %22_capHeight%22,%0A %22_familyName%22%0A %5D:%0A if ufo.info.__dict__.get(required_field) is None:%0A req_missing.append(required_field)%0A%0A for optional_field in %5B%0A %22_postscriptUnderlineThickness%22, %22_postscriptUnderlinePosition%22%0A %5D:%0A if ufo.info.__dict__.get(optional_field) is None:%0A rec_missing.append(optional_field)%0A%0A if req_missing:%0A yield FAIL, %22Required field(s) missing: %7B%7D%22.format(req_missing)%0A%0A if rec_missing:%0A yield WARN, %22Recommended field(s) missing: %7B%7D%22.format(rec_missing)%0A%0A yield PASS, %22Required and recommended fields present.%22%0A%0A%0A for sect
b234ab98b9c1b23f16648e39b6091fd39d10ac89
change rollback states to failure states and handle missing finial cfn state as failure - add DELETE_COMPLETE to failure states
src/main/python/gaius/service.py
src/main/python/gaius/service.py
# -*- coding: utf-8 -*- """ Interface to the Crassus Lambda function. This module notifies Crassus about updates to a CFN stack so Crassus will trigger the update process. """ import json import logging from datetime import datetime from dateutil import tz, parser as datetime_parser from time import sleep import boto3 logger = logging.getLogger('gaius') FINAL_STATES = [ 'CREATE_COMPLETE', 'DELETE_COMPLETE', 'UPDATE_COMPLETE', ] ERROR_STATES = [ 'CREATE_FAILED', 'ROLLBACK_FAILED', 'DELETE_FAILED', 'ROLLBACK_COMPLETE', 'UPDATE_ROLLBACK_COMPLETE', 'UPDATE_ROLLBACK_FAILED' ] def parse_parameters(parameters): """ Parse input parameters from the command line """ parameter_list = [x for x in parameters.split(',')] return dict([y.split('=') for y in parameter_list]) def generate_message(stack_name, parameters, region, version=1): """ Generate the update notification message """ message = {} message['version'] = version message['stackName'] = stack_name message['region'] = region message['parameters'] = parse_parameters(parameters) return message def notify(stack_name, parameters, topic_arn, region): """ Sends an update notification to Crassus """ message = generate_message(stack_name, parameters, region) sns_client = boto3.client('sns', region_name=region) json_answer = sns_client.publish( TopicArn=topic_arn, Message=json.dumps(message), ) logger.debug(json_answer) def is_related_message(message_dict, stack_name): """Checks if StackName belongs to client-session or is missing""" if message_dict.get('stackName') == stack_name: return True elif message_dict.get('stackName') is None: return True return False def cleanup(back_channel_url, timeout, stack_name, region): """Cleans up old messages on the deployment pipeline""" sqs_resource = boto3.resource('sqs', region_name=region) queue = sqs_resource.Queue(url=back_channel_url) while timeout > 0: messages = queue.receive_messages(MaxNumberOfMessages=10) messages = filter_stack_related_messages(messages, stack_name) if not messages: break else: for message in messages: cleanup_old_messages(message, stack_name) timeout -= 1 def filter_stack_related_messages(messages, stack_name): return filter(lambda msg: json.loads(msg.body).get('stackName') == stack_name, messages) def log_delete_message(message_dict): message_status = message_dict.get('status') message_payload = message_dict.get('message') message_rtype = message_dict.get('resourceType') message_stack_name = message_dict['stackName'] logger.info('%s: %s: %s: %s', message_stack_name, message_status, message_rtype, message_payload) def cleanup_old_messages(message, stack_name): now = datetime.now(tz=tz.tzutc()) message_dict = json.loads(message.body) message_stack_name = message_dict['stackName'] message_timestamp = message_dict['timestamp'] message_datetime = datetime_parser.parse(message_timestamp) if (message_stack_name == stack_name and message_datetime < now): log_delete_message(message_dict) message.delete() return True def receive(back_channel_url, timeout, stack_name, region, poll_interval=2): """Reads out the back-channel on the deployment pipeline""" timeout_orig = timeout sqs_resource = boto3.resource('sqs', region_name=region) queue = sqs_resource.Queue(url=back_channel_url) while timeout > 0: messages = queue.receive_messages(MaxNumberOfMessages=1) for message in messages: if process_message(message, stack_name): cleanup(back_channel_url, timeout, stack_name, region) logger.info('Final CFN message received') return timeout -= poll_interval sleep(poll_interval) # raise exception if we reach this point as presumably no final stage # is reached within the timeout raise DeploymentErrorException( 'No final CFN message was received after {0} seconds'.format( timeout_orig)) def process_message(message, stack_name): message_dict = json.loads(message.body) message_status = message_dict.get('status') message_payload = message_dict.get('message') message_rtype = message_dict.get('resourceType') logger.debug(message_dict) if not is_related_message(message_dict, stack_name): message.change_visibility(VisibilityTimeout=0) else: log_delete_message(message_dict) message.delete() if message_status == 'failure': raise DeploymentErrorException( 'Crassus failed with "{0}"'.format(message_payload)) elif (message_rtype == 'AWS::CloudFormation::Stack' and message_status in ERROR_STATES): raise DeploymentErrorException( 'Crassus failed with "{0}"'.format(message_payload)) elif (message_rtype == 'AWS::CloudFormation::Stack' and message_status in FINAL_STATES): return True class DeploymentErrorException(Exception): def __init__(self, message): self.message = message def __str__(self): return self.message
Python
0
@@ -397,31 +397,8 @@ E',%0A - 'DELETE_COMPLETE',%0A @@ -586,16 +586,39 @@ _FAILED' +,%0A 'DELETE_COMPLETE' %0A%5D%0A%0A%0Adef
367fed89f27e0a5a7947fc32540c99bcba58fbad
Improve HTTP error reporting.
zerver/management/commands/register_server.py
zerver/management/commands/register_server.py
import subprocess from argparse import ArgumentParser from typing import Any, Dict import requests from django.conf import settings from django.core.management.base import CommandError from django.utils.crypto import get_random_string from requests.models import Response from zerver.lib.management import ZulipBaseCommand, check_config from zerver.lib.remote_server import PushBouncerSession if settings.DEVELOPMENT: SECRETS_FILENAME = "zproject/dev-secrets.conf" else: SECRETS_FILENAME = "/etc/zulip/zulip-secrets.conf" class Command(ZulipBaseCommand): help = """Register a remote Zulip server for push notifications.""" def add_arguments(self, parser: ArgumentParser) -> None: parser.add_argument( "--agree_to_terms_of_service", action="store_true", help="Agree to the Zulipchat Terms of Service: https://zulip.com/terms/.", ) parser.add_argument( "--rotate-key", action="store_true", help="Automatically rotate your server's zulip_org_key", ) def handle(self, *args: Any, **options: Any) -> None: if not settings.DEVELOPMENT: check_config() if not settings.ZULIP_ORG_ID: raise CommandError( "Missing zulip_org_id; run scripts/setup/generate_secrets.py to generate." ) if not settings.ZULIP_ORG_KEY: raise CommandError( "Missing zulip_org_key; run scripts/setup/generate_secrets.py to generate." ) if settings.PUSH_NOTIFICATION_BOUNCER_URL is None: if settings.DEVELOPMENT: settings.PUSH_NOTIFICATION_BOUNCER_URL = ( settings.EXTERNAL_URI_SCHEME + settings.EXTERNAL_HOST ) else: raise CommandError( "Please uncomment PUSH_NOTIFICATION_BOUNCER_URL " "in /etc/zulip/settings.py (remove the '#')" ) request = { "zulip_org_id": settings.ZULIP_ORG_ID, "zulip_org_key": settings.ZULIP_ORG_KEY, "hostname": settings.EXTERNAL_HOST, "contact_email": settings.ZULIP_ADMINISTRATOR, } if options["rotate_key"]: request["new_org_key"] = get_random_string(64) self._log_params(request) if not options["agree_to_terms_of_service"] and not options["rotate_key"]: print( "To register, you must agree to the Zulipchat Terms of Service: " "https://zulip.com/terms/" ) tos_prompt = input("Do you agree to the Terms of Service? [Y/n] ") print("") if not ( tos_prompt.lower() == "y" or tos_prompt.lower() == "" or tos_prompt.lower() == "yes" ): raise CommandError("Aborting, since Terms of Service have not been accepted.") response = self._request_push_notification_bouncer_url( "/api/v1/remotes/server/register", request ) if response.json()["created"]: print( "You've successfully registered for the Mobile Push Notification Service!\n" "To finish setup for sending push notifications:" ) print( "- Restart the server, using /home/zulip/deployments/current/scripts/restart-server" ) print("- Return to the documentation to learn how to test push notifications") else: if options["rotate_key"]: print(f"Success! Updating {SECRETS_FILENAME} with the new key...") subprocess.check_call( [ "crudini", "--set", SECRETS_FILENAME, "secrets", "zulip_org_key", request["new_org_key"], ] ) print("Mobile Push Notification Service registration successfully updated!") def _request_push_notification_bouncer_url(self, url: str, params: Dict[str, Any]) -> Response: registration_url = settings.PUSH_NOTIFICATION_BOUNCER_URL + url session = PushBouncerSession() try: response = session.post(registration_url, params=params) except requests.RequestException: raise CommandError( "Network error connecting to push notifications service " f"({settings.PUSH_NOTIFICATION_BOUNCER_URL})", ) try: response.raise_for_status() except requests.HTTPError: content_dict = response.json() raise CommandError("Error: " + content_dict["msg"]) return response def _log_params(self, params: Dict[str, Any]) -> None: print("The following data will be submitted to the push notification service:") for key in sorted(params.keys()): print(f" {key}: {params[key]}") print("")
Python
0
@@ -4692,16 +4692,21 @@ TTPError + as e :%0A @@ -4715,38 +4715,179 @@ -content_dict = response.json() +# Report nice errors from the Zulip API if possible.%0A try:%0A content_dict = response.json()%0A except Exception:%0A raise e%0A %0A
6e6e2e03da2f4ef141b51843ca16fdb52f0770ca
Use tokenized no-reply address in send_test_email.
zerver/management/commands/send_test_email.py
zerver/management/commands/send_test_email.py
from typing import Any from django.conf import settings from django.core.mail import mail_admins, mail_managers, send_mail from django.core.management import CommandError from django.core.management.commands import sendtestemail from zerver.lib.send_email import FromAddress class Command(sendtestemail.Command): def handle(self, *args: Any, **kwargs: str) -> None: if settings.WARN_NO_EMAIL: raise CommandError("Outgoing email not yet configured, see\n " "https://zulip.readthedocs.io/en/latest/production/email.html") message = ("Success! If you receive this message, you've " "successfully configured sending email from your " "Zulip server. Remember that you need to restart " "the Zulip server with /home/zulip/deployments/current/scripts/restart-server " "after changing the settings in /etc/zulip before your changes will take effect.") send_mail("Zulip email test", message, FromAddress.SUPPORT, kwargs['email']) send_mail("Zulip noreply email test", message, FromAddress.NOREPLY, kwargs['email']) if kwargs['managers']: mail_managers("Zulip manager email test", "This email was sent to the site managers.") if kwargs['admins']: mail_admins("Zulip admins email test", "This email was sent to the site admins.")
Python
0
@@ -1139,15 +1139,36 @@ ess. -NOREPLY +tokenized_no_reply_address() , kw
90af4693dce351499e6898f02a5b2f6c2a0ca99f
Fix https://github.com/mottosso/Qt.py/issues/24
Qt.py
Qt.py
"""Map all bindings to PySide2 This module replaces itself with the most desirable binding. Resolution order: - PySide2 - PyQt5 - PySide - PyQt4 Usage: >>> import sys >>> from Qt import QtWidgets >>> app = QtWidgets.QApplication(sys.argv) >>> button = QtWidgets.QPushButton("Hello World") >>> button.show() >>> app.exec_() """ import os import sys __version__ = "0.2.3" def _pyqt5(): import PyQt5.Qt # Remap PyQt5.QtCore.Signal = PyQt5.QtCore.pyqtSignal PyQt5.QtCore.Slot = PyQt5.QtCore.pyqtSlot PyQt5.QtCore.Property = PyQt5.QtCore.pyqtProperty # Add PyQt5.__wrapper_version__ = __version__ PyQt5.__binding__ = "PyQt5" PyQt5.__binding_version__ = PyQt5.QtCore.PYQT_VERSION_STR PyQt5.__qt_version__ = PyQt5.QtCore.PYQT_VERSION_STR return PyQt5 def _pyqt4(): import PyQt4.Qt # Remap PyQt4.QtWidgets = PyQt4.QtGui PyQt4.QtCore.Signal = PyQt4.QtCore.pyqtSignal PyQt4.QtCore.Slot = PyQt4.QtCore.pyqtSlot PyQt4.QtCore.Property = PyQt4.QtCore.pyqtProperty # Add PyQt4.__wrapper_version__ = __version__ PyQt4.__binding__ = "PyQt4" PyQt4.__binding_version__ = PyQt4.QtCore.PYQT_VERSION_STR PyQt4.__qt_version__ = PyQt4.QtCore.PYQT_VERSION_STR return PyQt4 def _pyside2(): import PySide2 # Add PySide2.__wrapper_version__ = __version__ PySide2.__binding__ = "PySide2" PySide2.__binding_version__ = PySide2.__version__ PySide2.__qt_version__ = PySide2.QtCore.qVersion() return PySide2 def _pyside(): import PySide import PySide.QtGui # Remap PySide.QtWidgets = PySide.QtGui PySide.QtCore.QSortFilterProxyModel = PySide.QtGui.QSortFilterProxyModel # Add PySide.__wrapper_version__ = __version__ PySide.__binding__ = "PySide" PySide.__binding_version__ = PySide.__version__ PySide.__qt_version__ = PySide.QtCore.qVersion() return PySide def _init(): """Try loading each binding in turn Please note: the entire Qt module is replaced with this code: sys.modules["Qt"] = binding() This means no functions or variables can be called after this has executed. """ preferred = os.getenv("QT_PREFERRED_BINDING") if preferred: available = { "PySide2": _pyside2, "PySide": _pyside, "PyQt5": _pyqt5, "PyQt4": _pyqt4 } if preferred not in available: raise ImportError("Preferred Qt binding \"%s\" " "not available" % preferred) sys.modules["Qt"] = available[preferred]() return else: for binding in (_pyside2, _pyqt5, _pyside, _pyqt4): try: sys.modules["Qt"] = binding() return except ImportError: continue # If not binding were found, throw this error raise ImportError("No Qt binding were found.") _init()
Python
0.000003
@@ -410,9 +410,9 @@ 0.2. -3 +4 %22%0A%0A%0A @@ -601,32 +601,90 @@ e.pyqtProperty%0A%0A + from PyQt5 import uic%0A PyQt5.load_ui = uic.loadUi%0A%0A # Add%0A Py @@ -1121,24 +1121,82 @@ qtProperty%0A%0A + from PyQt4 import uic%0A PyQt4.load_ui = uic.loadUi%0A%0A # Add%0A @@ -1654,208 +1654,780 @@ -return PySide2%0A%0A%0Adef _pyside():%0A import PySide%0A import PySide.QtGui%0A%0A # Remap%0A PySide.QtWidgets = PySide.QtGui%0A%0A PySide.QtCore.QSortFilterProxyModel = PySide.QtGui.QSortFilterProxyModel +# Remap%0A def load_ui(ui_filepath, *args, **kwargs):%0A %22%22%22Wrap QtUiTools.QUiLoader().load()%0A for compatibility against PyQt5.uic.loadUi()%0A %22%22%22%0A from PySide2 import QtUiTools%0A return QtUiTools.QUiLoader().load(ui_filepath)%0A PySide2.load_ui = load_ui%0A%0A return PySide2%0A%0A%0Adef _pyside():%0A import PySide%0A import PySide.QtGui%0A%0A # Remap%0A PySide.QtWidgets = PySide.QtGui%0A%0A PySide.QtCore.QSortFilterProxyModel = PySide.QtGui.QSortFilterProxyModel%0A%0A def load_ui(ui_filepath, *args, **kwargs):%0A %22%22%22Wrap QtUiTools.QUiLoader().load()%0A for compatibility against PyQt5.uic.loadUi()%0A %22%22%22%0A from PySide import QtUiTools%0A return QtUiTools.QUiLoader().load(ui_filepath)%0A PySide.load_ui = load_ui %0A%0A
077b7cf72cc8347f9509f564ba702cc25013a0e8
Refactor ca.py
ca.py
ca.py
import random from time import sleep from tqdm import tqdm import numpy as np import matplotlib.pyplot as plt def new_mitotic_event(): return np.random.randint(5,11) def check_full_neighborhood(cell, cells): #TODO: comprobar el grid para ver si el vecindario tiene espacio return True #Devolver lista con posiciones candidatas por estar libres def if_apply_kill_cell(tests_result): return tests_result[0] == '1' def if_apply_mitotic(mitotic_candidate_cell, cells): return tests_result[1] == '1' def postpone_mitotic_event(mitotics_events, new_event_time, cell_position): new_event_time = new_mitotic_event() + iteration if new_event_time in mitotics_events: events_aux = mitotics_events[new_event_time] events_aux.append(cell_position) mitotics_events[new_event_time] = events_aux else: mitotics_events.update({new_event_time: [cell_position]}) return mitotics_events #Tests: Salvo excepcion, se devuelve 1 cuando hay mitosis, y 0 cuando aplica la muerte. def test_1(cell, a): if np.random.random() < 1/a: print("Muerte aleatoria") return '0' return '1' def test_2(cell, e, cells): #Si no esta activo EA y aplica la muerte, devuelve 0. En otro caso, devolve 1. if cell in cells: cell_genome = cells[cell] n = cell_genome.mutations() if not cell_genome.ea: if np.random.randint(0,n) < n/e: print("Muerte por mutaciones") return '0' return '1' def test_3(cell): #TODO: Hay que buscar en el articulo que procedimiento sigue este test. return '1' def test_4(cell, cells): full = check_full_neighborhood(cell, cells) if full and cell.igi and np.random.random() < 1/g: print("Mata a un vecino") return '1' return '0' def test_5(cell): if cell.tl == 0 and not cell.ei: print("Muerte por telomero") return '0' else: return '1' class Genome: def __init__(self, sg, igi, ea, ag, ei, mt, tl): self.sg = sg self.igi = igi self.ea = ea self.ag = ag self.ei = ei self.mt = mt self.tl = tl def __str__(self): return '[' + str(self.sg) + ', ' + str(self.igi) + ', ' + str(self.ea) + ', ' + str(self.ag) + ', ' + str(self.ei) + ', ' + str(self.mt) + ', ' + str(self.tl) + ']' def decrease_telomer(self): self.tl -= 1 def mutations(sefl): count = 0 if self.sg: count += 1 if self.igi: count += 1 if self.ea: count += 1 if self.ag: count += 1 if self.ei: count += 1 if self.mt: count += 1 return count """ BEGIN Borrar """ def mutate(mitotic_candidate_cell): mitotic_candidate_cell.sg = np.random.randint(0,2) mitotic_candidate_cell.igi = np.random.randint(0,2) mitotic_candidate_cell.ea = np.random.randint(0,2) mitotic_candidate_cell.ag = np.random.randint(0,2) mitotic_candidate_cell.ei = np.random.randint(0,2) mitotic_candidate_cell.mt = np.random.randint(0,2) mitotic_candidate_cell.tl -= 1 return mitotic_candidate_cell """ END Borrar """ if __name__ == "__main__": #Global parameters definition and initialization #TODO: hacer que sea configurable por fichero, usando argparse sleep_time = 0.05 time = 100*100 iterations = range(time) grid_size = 10 mutation_rate = 10**5 telomer_length = 50 death_probability = 10 factor_increase_base_rate_mutation = 10**2 kill_neighbor = 30 random_death = 10**3 #Rename several global parameters m = mutation_rate tl = telomer_length e = death_probability i = factor_increase_base_rate_mutation g = kill_neighbor a = random_death #Global structures definition """grid = np.array([['[*, *, *, *, *, *, *]' for j in range(grid_size)] for i in range(grid_size)]) #TODO transformar la cadena por algun numero""" #Global structures initialization half_grid = int(grid_size/2) """grid[half_grid][half_grid] = str(cells[0])""" #First cell first_cell_position = (half_grid, half_grid) cells = {first_cell_position: Genome(0, 0, 0, 0, 0, 0, tl)} #mitotics_events structure initialization mitotics_events = {new_mitotic_event(): [first_cell_position]} #Run for iteration in tqdm(iterations): if iteration in mitotics_events: events = mitotics_events[iteration] del mitotics_events[iteration] for event in events: #Event == Cell position if event in cells: mitotic_candidate_cell = cells[event] tests_result = test_1(mitotic_candidate_cell, a) tests_result += test_2(mitotic_candidate_cell, e, cells) tests_result += test_3(mitotic_candidate_cell,) #Ver TODO en funcion. tests_result += test_4(mitotic_candidate_cell, cells) tests_result += test_5(mitotic_candidate_cell) if if_apply_kill_cell(tests_result): print("Cell death event succeded!") elif if_apply_mitotic(mitotic_candidate_cell, mitotic_candidate_cell): print("Mitotic event succeded!") else: #Programar nuevo evento mitotico """ BEGIN Borrar cells[event] = mutate(mitotic_candidate_cell) END Borrar """ new_event_time = new_mitotic_event() + iteration mitotics_events = postpone_mitotic_event(mitotics_events, new_event_time, event) sleep(sleep_time)
Python
0.000002
@@ -389,122 +389,229 @@ lt): -%0A return tests_result%5B0%5D == '1'%0A%0Adef if_apply_mitotic(mitotic_candidate_cell, cells):%0A return tests_result%5B1 + # Randomly death or mutation damage applied%0A return tests_result%5B0%5D == '1' or tests_result%5B1%5D == '1'%0A%0Adef if_apply_mitotic(tests_result):%0A return tests_result%5B2%5D == '1' and tests_result%5B3%5D == '1' and tests_result%5B4 %5D == @@ -5381,54 +5381,20 @@ tic( -mitotic_candidate_cell, mitotic_candidate_cell +tests_result ):%0A
e6a807733c689d632c3d217962a78b49ba485516
Make sure we don't overwrite a result file
plumbium/processresult.py
plumbium/processresult.py
from __future__ import print_function import datetime from functools import wraps import json import os import os.path import shutil from subprocess import check_output, STDOUT, CalledProcessError import tarfile import tempfile import traceback import sys class Pipeline(object): def __init__(self): self.debug = False self.results = [] def run(self, name, pipeline, base_dir, *input_files, **kwargs): self.results = [] self.debug = kwargs.get('debug', False) self.name = name self.input_files = input_files self.base_dir = base_dir self.launched_dir = os.getcwd() self._copy_input_files_to_working_dir() self.start_date = datetime.datetime.now() os.chdir(self.working_dir) pipeline_exception = None try: pipeline(*input_files) except Exception as e: pipeline_exception = e traceback.print_exc() finally: os.chdir(self.launched_dir) self.save(pipeline_exception) def _copy_input_files_to_working_dir(self): self.working_dir = tempfile.mkdtemp(prefix='plumbium_{0}_'.format(self.name)) for i in self.input_files: dest_dir = os.path.join(self.working_dir, os.path.dirname(i.filename)) source = os.path.join(self.base_dir, i.filename) if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.copy(source, dest_dir) def _store_printed_output(self): with open('printed_output.txt', 'w') as printed_output_record: for r in self.results: printed_output_record.write(r.output) def record(self, result): self.results.append(result) def save(self, exception=None): results = { 'name': self.name, 'input_files': [repr(f) for f in self.input_files], 'dir': self.launched_dir, 'start_date': self.start_date.strftime('%Y%m%d %H:%M'), } if exception is not None: results['pipeline_exception'] = repr(exception) results['processes'] = [r.as_dict() for r in self.results] basename = '{0}-{1}'.format( self.name, self.start_date.strftime('%Y%m%d_%H%M') ) with open(os.path.join(self.working_dir, basename + '.json'), 'w') as f: json.dump(results, f, indent=4, separators=(',', ': ')) archive = tarfile.open(basename + '.tar.gz', 'w:gz') archive.add(self.working_dir, arcname=basename) archive.close() pipeline = Pipeline() class OutputRecorder(object): def reset(self): self.output = '' _output_recorder = OutputRecorder() def call(cmd, cwd=None): try: _output_recorder.output += check_output(cmd, stderr=STDOUT, cwd=cwd) except CalledProcessError as e: print(e.output) _output_recorder.output = e.output raise def record(*output_names): def decorator(f): @wraps(f) def process_recorder(*args, **kwargs): returned_images = None exception = None _output_recorder.reset() try: returned_images = f(*args, **kwargs) except: traceback.print_exc(file=sys.stderr) exception = traceback.format_exc() if type(returned_images) is not tuple: returned_images = (returned_images,) named_images = dict(zip(output_names, returned_images)) result = ProcessOutput( func=f, args=args, kwargs=kwargs, output=_output_recorder.output, exception=exception, **named_images ) pipeline.record(result) return result return process_recorder return decorator class ProcessOutput(object): def __init__(self, func, args, kwargs, output, exception, **output_images): self._results = output_images self.output = output self.function = func self.input_args = args self.input_kwargs = kwargs self.exception = exception def __repr__(self): r = self.function.__name__ + '(' if self.input_args: r += ', '.join([repr(x) for x in self.input_args]) if self.input_kwargs: r += ', '.join(['{0}={1!r}'.format(x) for x in self.input_kwargs]) r += ')' return r def as_dict(self): d = { 'function': self.function.__name__, 'input_args': [repr(x) for x in self.input_args], 'input_kwargs': {str(x[0]): repr(x[1]) for x in self.input_kwargs}, 'printed_output': self.output, 'returned': [repr(r) for r in self._results.values()], } if self.exception: d['exception'] = repr(self.exception) return d def __getitem__(self, key): return self._results[key]
Python
0.000001
@@ -965,16 +965,71 @@ inally:%0A + self.finish_date = datetime.datetime.now()%0A @@ -2064,16 +2064,86 @@ H:%25M'),%0A + 'finish_date': self.finish_date.strftime('%25Y%25m%25d %25H:%25M'),%0A @@ -2607,24 +2607,50 @@ le.open( +self._clear_filename('.', basename + '.tar @@ -2641,18 +2641,17 @@ basename - + +, '.tar.g @@ -2652,16 +2652,17 @@ .tar.gz' +) , 'w:gz' @@ -2744,16 +2744,441 @@ lose()%0A%0A + def _clear_filename(self, directory, basename, ext):%0A tgt = os.path.join(directory, basename + ext)%0A if os.path.exists(tgt):%0A inc = 1%0A tgt = os.path.join(directory, '%7B0%7D-%7B1:02d%7D%7B2%7D'.format(basename, inc, ext))%0A while os.path.exists(tgt):%0A inc += 1%0A tgt = os.path.join(directory, '%7B0%7D-%7B1:02d%7D%7B2%7D'.format(basename, inc, ext))%0A return tgt%0A%0A %0Apipelin
a971b84541b991bbc14be73e94b633c88edcd567
Remove unused vars
programs/ledcube/audio.py
programs/ledcube/audio.py
# # Copyright (c) 2014 PolyFloyd # import io import numpy.fft import os import pyaudio class Source: def get_spectrum(self, signal): n = len(signal) signal = numpy.array([(s + 1) / 2 for s in signal], dtype=float) spectrum = numpy.abs(numpy.fft.rfft(signal)) freqs = numpy.fft.fftfreq(spectrum.size, 1 / self.get_sample_rate()) spectrum = spectrum[1:] return (spectrum, freqs) def get_input(self): return self.input def set_input(self, input): if type(input) == str: self.input = os.fdopen(os.open(input, os.O_RDONLY), 'rb') else: self.input = input def get_signal(self, seconds): return [self.get_next_sample() for i in range(0, int(self.get_sample_rate() * seconds))] def get_next_sample(self): pass # virtual def get_sample_rate(self): pass # virtual class PCMSource(Source): def __init__(self, input_file, sample_rate, sample_bits, sample_endianness='little', sample_sign='signed'): assert(sample_endianness == 'little' or sample_endianness == 'big') assert(sample_sign == 'signed' or sample_sign == 'unsigned') self.set_input(input_file) self.sample_rate = sample_rate self.sample_bits = sample_bits self.sample_endianness = sample_endianness self.sample_sign = sample_sign def sample_from_raw_data(self, raw_data): intval = int.from_bytes(raw_data, self.sample_endianness, signed=self.sample_sign == 'signed') return intval / (2 ** (len(raw_data) * 8 - 1)) def get_next_sample(self): return self.sample_from_raw_data(self.get_input().read(self.sample_bits // 8)) def get_sample_rate(self): return self.sample_rate
Python
0.000001
@@ -33,18 +33,8 @@ %0A#%0A%0A -import io%0A impo @@ -59,23 +59,8 @@ t os -%0Aimport pyaudio %0A%0Acl @@ -112,32 +112,8 @@ l):%0A - n = len(signal)%0A
1c66a337dd14b7ce148e356b729d538413093b8c
Update admin for deform-fields
project/apps/api/admin.py
project/apps/api/admin.py
# import logging # log = logging.getLogger(__name__) from django.contrib import admin from django_object_actions import ( DjangoObjectActions, takes_instance_or_queryset, ) from easy_select2 import select2_modelform from .models import ( Convention, Contest, Contestant, Group, Performance, Singer, Note, District, Director, Judge, Song, ) class PerformanceInline(admin.TabularInline): form = select2_modelform( Performance, attrs={'width': '250px'}, ) model = Performance extra = 0 show_change_link = True class ContestantInline(admin.TabularInline): form = select2_modelform( Contestant, attrs={'width': '250px'}, ) model = Contestant extra = 0 show_change_link = True fields = ( 'contest', 'group', # 'director', # 'district', # 'tenor', # 'lead', # 'baritone', # 'bass', # 'prelim', # 'seed', # 'score', # 'place', ) @admin.register(Convention) class ConventionAdmin(admin.ModelAdmin): form = select2_modelform( Convention, attrs={'width': '250px'}, ) list_display = ( 'name', 'location', 'dates', 'kind', 'year', 'district', 'is_active', ) fields = ( 'is_active', 'name', 'location', 'dates', 'timezone', 'district', 'kind', 'year', ) list_filter = ( 'kind', 'year', 'district', ) readonly_fields = ( 'name', ) save_on_top = True @admin.register(Contest) class ContestAdmin(DjangoObjectActions, admin.ModelAdmin): # @takes_instance_or_queryset # def import_scores(self, request, queryset): # for obj in queryset: # obj.import_scores() # import_scores.label = 'Import Scores' form = select2_modelform( Contest, attrs={'width': '250px'}, ) save_on_top = True # objectactions = [ # 'import_scores', # ] # inlines = [ # ContestantInline, # ] list_filter = ( 'level', 'kind', 'year', 'district', ) list_display = ( 'name', 'district', 'convention', 'level', 'kind', 'year', 'panel', 'scoresheet_pdf', 'scoresheet_csv', 'is_active', ) fields = ( ('is_active', 'is_complete',), 'name', 'convention', 'level', 'kind', 'year', 'district', 'panel', 'scoresheet_pdf', 'scoresheet_csv', ) readonly_fields = ( 'name', ) @admin.register(Performance) class PerformanceAdmin(admin.ModelAdmin): form = select2_modelform( Performance, attrs={'width': '250px'}, ) list_display = ( 'id', 'draw', 'session', 'stagetime', 'place', 'points', 'song1', 'mus1', 'prs1', 'sng1', 'song2', 'mus2', 'prs2', 'sng2', ) list_filter = ( 'contestant__contest__level', 'contestant__contest__kind', 'round', 'contestant__contest__year', ) ordering = ( 'place', 'draw', ) save_on_top = True @admin.register(Group) class GroupAdmin(admin.ModelAdmin): form = select2_modelform( Group, attrs={'width': '250px'}, ) search_fields = ( 'name', ) list_display = ( 'name', 'location', 'website', 'facebook', 'twitter', 'email', 'phone', 'chapter_name', 'chapter_code', # 'bsmdb_id', 'picture', ) inlines = [ ContestantInline, ] list_filter = ( 'kind', ) save_on_top = True @admin.register(Contestant) class ContestantAdmin(admin.ModelAdmin): @takes_instance_or_queryset def update_contestants(self, request, queryset): for obj in queryset: obj.save() update_contestants.label = 'Update Contestants' form = select2_modelform( Contestant, attrs={'width': '250px'}, ) objectactions = [ 'update_contestants', ] list_display = ( 'name', 'district', 'director', 'lead', 'tenor', 'baritone', 'bass', 'seed', 'prelim', 'place', 'score', 'men', ) search_fields = ( 'name', ) list_filter = ( 'contest__level', 'contest__kind', 'contest__year', ) # inlines = [ # PerformanceInline, # ] # fields = ( # 'contest', # 'group', # 'district', # 'director', # 'seed', # 'prelim', # 'place', # 'score', # ) save_on_top = True @admin.register(Singer) class SingerAdmin(admin.ModelAdmin): form = select2_modelform( Singer, attrs={'width': '250px'}, ) save_on_top = True fields = ( 'name', ) @admin.register(Director) class DirectorAdmin(admin.ModelAdmin): form = select2_modelform( Director, attrs={'width': '250px'}, ) save_on_top = True fields = ( 'name', ) @admin.register(Judge) class JudgeAdmin(admin.ModelAdmin): form = select2_modelform( Judge, attrs={'width': '250px'}, ) save_on_top = True fields = ( 'name', ) @admin.register(Note) class NoteAdmin(admin.ModelAdmin): form = select2_modelform( Note, attrs={'width': '250px'}, ) save_on_top = True fields = ( 'user', 'performance', 'text', ) @admin.register(Song) class SongAdmin(admin.ModelAdmin): form = select2_modelform( Song, attrs={'width': '250px'}, ) save_on_top = True fields = ( 'name', ) @admin.register(District) class DistrictAdmin(admin.ModelAdmin): form = select2_modelform( District, attrs={'width': '250px'}, ) search_fields = ( 'name', ) list_display = ( 'name', 'location', 'website', 'facebook', 'twitter', 'email', 'phone', 'picture', 'long_name', 'kind', ) list_filter = ( 'kind', ) save_on_top = True
Python
0
@@ -3975,32 +3975,152 @@ 'kind',%0A )%0A%0A + readonly_fields = (%0A 'director',%0A 'lead',%0A 'tenor',%0A 'baritone',%0A 'bass',%0A )%0A%0A save_on_top
573b34ef9a07b47549d8074548fc0b7a7238b016
fix flake8 error
project/ctnotify/views.py
project/ctnotify/views.py
# -*- coding: utf-8 -*- import json # from werkzeug import Response from boto.s3.connection import S3Connection from boto.s3.key import Key from google.appengine.api import urlfetch from google.appengine.ext import deferred from kay.utils import render_to_response from ctnotify.queue import get_process_que, get_sqs_connection, get_s3_object_key from core.models import CTNotification, CTNotificationLog def get_s3_object(aws_access_key_id, aws_secret_access_key, bucket_name, object_key): # remove later access key and id s3_connection = S3Connection(aws_access_key_id, aws_secret_access_key) bucket = s3_connection.get_bucket(bucket_name) s3_object = Key(bucket) s3_object.key = object_key s3_object_result = s3_object.get_contents_as_string() return s3_object_result def parse_ctlog(json_data, filter_string): parse_result = [] filters = filter_string.split(',') for ctlog in json_data['Records']: for filter in filters: if filter in ctlog['eventName']: parse_result.append({'event_name': ctlog['eventName'], 'ctlog': ctlog}) return parse_result def notify_to_slack(ctlogs, webhook_url): for ctlog in ctlogs: raw_content = '```\n'+json.dumps(ctlog['ctlog'], indent=4) + '\n```' payloads = json.dumps( {'text': raw_content, 'username': u'notify-bot'}, ensure_ascii=True) urlfetch.fetch( url=webhook_url, payload=payloads, method=urlfetch.POST, deadline=30, follow_redirects=False, ) entity = CTNotificationLog(event_name=ctlog['event_name'], raw_content=raw_content) entity.put() def check_ctlog(ct_setting): sqs_connection = get_sqs_connection(ct_setting.access_key_id, ct_setting.secret_access_key) parse_task_que = get_process_que( sqs_connection, ct_setting.que_name) if parse_task_que.count() > 0: fetch_que_result = sqs_connection.receive_message(parse_task_que) bucket_name, object_key = get_s3_object_key(fetch_que_result[0].get_body()) ctlog = json.loads(get_s3_object(ct_setting.access_key_id, ct_setting.secret_access_key, bucket_name, object_key)) parse_result = parse_ctlog(ctlog, ct_setting.event_name) notify_to_slack(parse_result, ct_setting.slack_webhook) parse_task_que.delete_message(fetch_que_result[0]) deferred.defer(check_ctlog, ct_setting) def index(request): ct_notif_settings = CTNotification.all().fetch(1000) for ct_setting in ct_notif_settings: check_ctlog(ct_setting) return render_to_response('ctnotify/index.html', {'message': 'Hello'})
Python
0
@@ -1231,17 +1231,19 @@ '%60%60%60%5Cn' -+ + + json.dum @@ -2194,16 +2194,45 @@ ss_key,%0A +
3dd0d906832d353ff46df699e9ed78757641c50f
Update settings.py
projfd/projfd/settings.py
projfd/projfd/settings.py
# Build paths inside the project like this: os.path.join(BASE_DIR, ...) import getpass, imp, os from django.utils.crypto import get_random_string from subprocess import check_output BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SETTINGS_DIR = os.path.dirname(__file__) PROJECT_PATH = os.path.join(SETTINGS_DIR, os.pardir) PROJECT_PATH = os.path.abspath(PROJECT_PATH) SECRET_KEY = get_random_string() # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATES = [{ 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ "django.contrib.auth.context_processors.auth", "django.template.context_processors.debug", "django.template.context_processors.i18n", "django.template.context_processors.media", "django.template.context_processors.static", "django.template.context_processors.tz", "django.contrib.messages.context_processors.messages", 'social.apps.django_app.context_processors.backends', 'social.apps.django_app.context_processors.login_redirect' ] }, },] ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.gis', 'django_extensions', 'social.apps.django_app.default', 'mod_wsgi.server', 'rest_framework', 'appfd', 'behave_django', 'apifd' ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', # disable csrf because prevents api calls to all views # 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'django.middleware.security.SecurityMiddleware', ) ROOT_URLCONF = 'projfd.urls' AUTHENTICATION_BACKENDS = ( 'social.backends.facebook.FacebookOAuth2', 'social.backends.google.GoogleOAuth2', 'social.backends.twitter.TwitterOAuth', 'django.contrib.auth.backends.ModelBackend', ) WSGI_APPLICATION = 'projfd.wsgi.application' # Database # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'dbfd', 'USER': getpass.getuser(), # gets current user running this file 'PORT': 5432, } } # Internationalization # https://docs.djangoproject.com/en/1.8/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.8/howto/static-files/ STATIC_ROOT = "/home/usrfd/firstdraft/projfd/static/" STATIC_URL = '/static/' DEFAULT_CHARSET = 'utf-8' FILE_CHARSET = 'utf-8' MEDIA_URL = '/media/' MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media') # For Emailing """ EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = EMAIL_HOST_USER EMAIL_HOST_PASSWORD = EMAIL_HOST_PASSWORD EMAIL_PORT = 587 EMAIL_USE_TLS = True ACCOUNT_ACTIVATION_DAYS = 7 REGISTRATION_OPEN=True """ # For Python Social Auth LOGIN_REDIRECT_URL = '/' REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',), 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', 'PAGE_SIZE': 10 } SENDFILE_BACKEND = 'sendfile.backends.mod_wsgi' SENDFILE_ROOT = 'home/usrfd/maps' SENDFILE_URL = '/maps' #://pythonhosted.org/django-guardian/configuration.html ANONYMOUS_USER_ID = -1
Python
0.000001
@@ -1671,32 +1671,15 @@ cial -.apps.django_app.default +_django ',%0A @@ -2380,32 +2380,37 @@ S = (%0A 'social +_core .backends.facebo @@ -2431,32 +2431,37 @@ th2',%0A 'social +_core .backends.google @@ -2486,16 +2486,21 @@ 'social +_core .backend
60887e7ffb6954bc78b0be7ece93690211837149
remove debug code
hs.py
hs.py
#!/usr/bin/python3 ################################################################################ # hs.py - Hokie Stalker # Query the Virginia Tech people search service for information about a person. # Licensed under the New BSD License. # # https://github.com/mutantmonkey/hokiestalker # author: mutantmonkey <mutantmonkey@mutantmonkey.in> ################################################################################ import sys import lxml.etree import urllib.parse import urllib.request SEARCH_URL = "https://webapps.middleware.vt.edu/peoplesearch/PeopleSearch?query={0}&dsml-version=2" NS = '{urn:oasis:names:tc:DSML:2:0:core}' rows = [] """Return a formatted row for printing.""" def row(name, data): if data is None: return if type(data) == str: rows.append("{0:20s}{1}".format(name + ':', data)) else: rows.append("{0:20s}{1}".format(name + ':', data[0])) # print additional lines if necessary, trimming off the first row if len(data) > 1: for line in data[1:]: rows.append("{0:20s}{1}".format('', line)) """Parse the address from an LDAP response where $ is used to separate lines.""" def parse_addr(data): if data is None: return None addr = data.split('$') return addr """Check if the current attribute has the desired key.""" def is_attr(attr, key): return attr.attrib['name'] == key """Search LDAP using the argument as a query. Argument must be a valid LDAP query.""" def search(query): query = urllib.parse.quote(query) r = urllib.request.Request(SEARCH_URL.format(query), headers={ 'User-agent' : 'hokiestalker/2.0', }) f = urllib.request.urlopen(r) xml = lxml.etree.parse(f) results = xml.findall('{0}searchResponse/{0}searchResultEntry'.format(NS)) if len(results) <= 0: return False for entry in results: entry_data = {} for attr in entry: entry_data[attr.attrib['name']] = attr[0].text print(entry_data) names = [] if 'displayName' in entry_data: names.append(entry_data['displayName']) if 'givenName' in entry_data and 'sn' in entry_data: if 'middleName' in entry_data: names.append('{0} {1} {2}'.format(entry_data['givenName'], entry_data['middleName'], entry_data['sn'])) else: names.append('{0} {1}'.format(entry_data['givenName'], entry_data['sn'])) row('Name', names) if 'uid' in entry_data: row('UID', entry_data['uid']) if 'uupid' in entry_data: row('PID', entry_data['uupid']) if 'major' in entry_data: row('Major', entry_data['major']) elif 'department' in entry_data: row('Department', entry_data['department']) if 'title' in entry_data: row('Title', entry_data['title']) if 'postalAddress' in entry_data: row('Office', parse_addr(entry_data['postalAddress'])) if 'mailStop' in entry_data: row('Mail Stop', entry_data['mailStop']) if 'telephoneNumber' in entry_data: row('Office Phone', entry_data['telephoneNumber']) if 'localPostalAddress' in entry_data: row('Mailing Address', parse_addr(entry_data['localPostalAddress'])) if 'localPhone' in entry_data: row('Phone Number', entry_data['localPhone']) if 'mail' in entry_data: row('Email Address', entry_data['mail']) print("\n".join(rows)) del rows[:] if entry.getnext() is not None and entry.getnext().tag != \ '{0}searchResultDone'.format(NS): print() return True q = sys.argv[1:] s = search(' '.join(q)) if not s: print("No results found")
Python
0.02323
@@ -2006,35 +2006,8 @@ xt%0A%0A - print(entry_data)%0A%0A
50628685c310703fb24f266dfd4d72b666eecfa4
Update version to 1.0.0 (not yet tagged)
py/desisurvey/_version.py
py/desisurvey/_version.py
__version__ = '0.8.2.dev415'
Python
0
@@ -12,18 +12,11 @@ = ' -0.8.2.dev415 +1.0.0 '%0A
aa13f33ec07b6a1dd9728d311c2c05b2a333b5a6
Fix wrong reference to self in internalServerErrorHandler.
src/sheared/web/server/server.py
src/sheared/web/server/server.py
# vim:nowrap:textwidth=0 # # Sheared -- non-blocking network programming library for Python # Copyright (C) 2003 Sune Kirkeby <sune@mel.interspace.dk> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # import sys, traceback, types, errno from sheared import error from sheared.protocol import http from sheared.python import io from sheared.python import log from sheared.web import cookie import oh_nine, one_oh def movedHandler(server, exception, request, reply): reply.send("This page has moved. You can now find it here:\r\n" " %s\r\n" % reply.headers.get('Location')) def unauthorizedErrorHandler(server, exception, request, reply): reply.send("I need to see some credentials.\r\n") def forbiddenErrorHandler(server, exception, request, reply): reply.send("Forbidden.\r\n") def notFoundErrorHandler(server, exception, request, reply): reply.send("Not found.\r\n") def internalServerErrorHandler(server, exception, request, reply): reply.send('Internal Server Error.\r\n') self.logInternalError(exception.args) def defaultErrorHandler(server, exception, request, reply): reply.send("I am terribly sorry, but an error (%d) occured " "while processing your request.\r\n" % e.statusCode) server.logInternalError(exception) class HTTPServer: def __init__(self): self.hosts = {} self.default_host = None self.oh_nine = oh_nine.Server() self.one_oh = one_oh.Server() self.errorlog = None self.massageRequestCallbacks = [] self.requestCompletedCallbacks = [] self.errorHandlers = [ (Exception, defaultErrorHandler), (error.web.InternalServerError, internalServerErrorHandler), (error.web.NotFoundError, notFoundErrorHandler), (error.web.ForbiddenError, forbiddenErrorHandler), (error.web.UnauthorizedError, unauthorizedErrorHandler), (error.web.Moved, movedHandler), ] def addVirtualHost(self, name, vhost): self.hosts[name] = vhost def setDefaultHost(self, name): self.default_host = name def setErrorLog(self, l): self.errorlog = l def startup(self, transport): try: client = transport.other transport = io.BufferedReader(transport) rl = transport.readline().rstrip() if not rl: transport.close() return try: requestline = http.HTTPRequestLine(rl) except ValueError: raise error.web.BadRequestError, 'could not parse request-line: %r' % rl if requestline.version[0] == 0: # HTTP/0.9 request, reply = self.oh_nine.parse(transport, requestline) request.other = client self.handle(request, reply) elif requestline.version[0] == 1: # HTTP/1.x request, reply = self.one_oh.parse(transport, requestline) request.other = client self.handle(request, reply) else: # FIXME -- is this the Right Thing? raise error.web.NotImplementedError, 'HTTP Version not supported' except error.web.WebServerError, e: if len(e.args) == 1 and isinstance(e.args[0], types.StringType): err = e.args[0] else: err = 'Unknown why?!' transport.write('HTTP/1.0 %d %s\r\n' % (e.statusCode, err)) transport.write('Content-Type: text/plain\r\n\r\n') transport.write('Crashing in flames!\r\n') except OSError, e: if e.errno in (errno.ECONNRESET, errno.EPIPE): pass else: raise except: self.logInternalError(sys.exc_info()) transport.close() def handle(self, request, reply): try: if not request.headers.has_key('Host'): if self.default_host: request.headers.setHeader('Host', self.default_host) else: raise error.web.NotFoundError, 'no Host header and no default host' try: vhost = self.hosts[request.headers['Host']] request.hostname = request.headers['Host'] except KeyError: if self.default_host: vhost = self.hosts[self.default_host] request.hostname = self.default_host else: raise error.web.NotFoundError, 'unknown host and no default host' for cb in self.massageRequestCallbacks: cb(request, reply) try: vhost.handle(request, reply) except error.web.WebServerError: raise except: raise error.web.InternalServerError, sys.exc_info() except error.web.WebServerError, e: if not reply.decapitated: reply.setStatusCode(e.statusCode) reply.headers.setHeader('Content-Type', 'text/plain') self.handleWebServerError(e, request, reply) for cb in self.requestCompletedCallbacks: try: cb(request, reply) except: self.logInternalError(sys.exc_info()) def handleWebServerError(self, exception, request, reply): handler = None for kls, hnd in self.errorHandlers: if isinstance(exception, kls): if handler: if issubclass(kls, handler[0]): handler = kls, hnd else: handler = kls, hnd if not handler: raise error.web.InternalServerError handler[1](self, exception, request, reply) def logInternalError(self, ex): if self.errorlog: self.errorlog.exception(ex) else: log.default.exception(ex) __all__ = ['HTTPServer']
Python
0.999918
@@ -1673,26 +1673,28 @@ r%5Cn')%0A se -lf +rver .logInternal
5934d94c9644eaea850a27773db5890b68078477
Load all api items
pybossa_analyst/client.py
pybossa_analyst/client.py
# -*- coding: utf8 -*- """API client module for pybossa-analyst.""" import enki class PyBossaClient(object): """A class for interacting with PyBossa.""" def __init__(self, app=None): """Init method.""" self.app = app if app is not None: # pragma: no cover self.init_app(app) def init_app(self, app): self.api_key = app.config['API_KEY'] self.endpoint = app.config['ENDPOINT'] enki.pbclient.set('api_key', self.api_key) enki.pbclient.set('endpoint', self.endpoint) def get_results(self, project_id, **kwargs): """Return results.""" return enki.pbclient.find_results(project_id, all=1, **kwargs) def get_tasks(self, project_id, **kwargs): """Return tasks.""" return enki.pbclient.find_tasks(project_id, all=1, **kwargs) def get_task_runs(self, project_id, **kwargs): """Return task runs.""" return enki.pbclient.find_taskruns(project_id, all=1, **kwargs) def get_projects(self, **kwargs): """Return projects.""" return enki.pbclient.find_project(all=1, **kwargs) def update_result(self, result): """Update a result.""" return enki.pbclient.update_result(result) def get_task_run_dataframe(self, project_id, task_id): """Return a dataframe containing all task run info for a task.""" p = self.get_projects(id=project_id)[0] e = enki.Enki(self.api_key, self.endpoint, p.short_name, all=1) e.get_tasks(task_id=task_id) e.get_task_runs() t = e.tasks[0] return e.task_runs_df[t.id]
Python
0
@@ -556,578 +556,1659 @@ def -get_results(self, project_id, **kwargs):%0A %22%22%22Return results.%22%22%22%0A return enki.pbclient.find_results(project_id, all=1, **kwargs)%0A%0A def get_tasks(self, project_id, **kwargs):%0A %22%22%22Return tasks.%22%22%22%0A return enki.pbclient.find_tasks(project_id, all=1, **kwargs)%0A%0A def get_task_runs(self, project_id, **kwargs):%0A %22%22%22Return task runs.%22%22%22%0A return enki.pbclient.find_taskruns(project_id, all=1, **kwargs)%0A%0A def get_projects(self, **kwargs):%0A %22%22%22Return projects.%22%22%22%0A return enki.pbclient.find_project(all=1, **kwargs +_load(self, func, query):%0A items = func(**query)%0A last_fetched = items%0A while self._not_exhausted(last_fetched, query):%0A query%5B'last_id'%5D = last_fetched%5B-1%5D.id%0A last_fetched = func(**query)%0A items += last_fetched%0A return items%0A%0A def _not_exhausted(self, last_fetched, query):%0A return (len(last_fetched) != 0%0A and len(last_fetched) == query%5B'limit'%5D%0A and query.get('id') is None)%0A%0A def get_results(self, project_id, **kwargs):%0A %22%22%22Return results.%22%22%22%0A query = dict(project_id=project_id, all='1', limit=100, **kwargs)%0A if kwargs.get('limit'):%0A return enki.pbclient.find_results(**query)%0A return self._load(enki.pbclient.find_results, query)%0A%0A def get_tasks(self, project_id, **kwargs):%0A %22%22%22Return tasks.%22%22%22%0A query = dict(project_id=project_id, all='1', limit=100, **kwargs)%0A if kwargs.get('limit'):%0A return enki.pbclient.find_tasks(**query)%0A return self._load(enki.pbclient.find_tasks, query)%0A%0A def get_task_runs(self, project_id, **kwargs):%0A %22%22%22Return task runs.%22%22%22%0A query = dict(project_id=project_id, all='1', limit=100, **kwargs)%0A if kwargs.get('limit'):%0A return enki.pbclient.find_taskruns(**query)%0A return self._load(enki.pbclient.find_taskruns, query)%0A%0A def get_projects(self, **kwargs):%0A %22%22%22Return projects.%22%22%22%0A query = dict(all='1', limit=100, **kwargs)%0A if kwargs.get('limit'):%0A return enki.pbclient.find_project(**query)%0A return self._load(enki.pbclient.find_project, query )%0A%0A @@ -2502,16 +2502,25 @@ oject_id +, limit=1 )%5B0%5D%0A
701405657d4edcf3c953a7f45866829f7294d2d0
make tests a bit more robust about the cwd
pycket/test/test_basic.py
pycket/test/test_basic.py
import pytest from pycket.expand import expand, to_ast from pycket.interpreter import * from pycket.values import * from pycket.prims import * def run_fix(p,v): e = expand(p) ast = to_ast(e) val = interpret_one(ast) assert isinstance(val, W_Fixnum) assert val.value == v def run(p,v): e = expand(p) ast = to_ast(e) val = interpret_one(ast) assert equal_loop(val,v) with file("../stdlib.sch") as f: stdlib = f.read() def run_top(p,v=None): pp = "(let () \n %s \n %s\n %s)"%(stdlib, p, "" if v else "(void)") e = expand(pp) ast = to_ast(e) val = interpret([ast]) if v: assert equal_loop(val,v) def test_constant(): prog = "1" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 1 def test_read_err (): with pytest.raises(Exception): expand ("(") with pytest.raises(Exception): expand ("1 2") with pytest.raises(Exception): expand ("(1 2) 3") def test_plus(): prog = "(+ 2 3)" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 5 def test_thunk(): prog = "((lambda () 1))" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 1 def test_thunk2(): prog = "((lambda () 1 2))" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 2 def test_call(): prog = "((lambda (x) (+ x 1)) 2)" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 3 def test_curry(): prog = "(((lambda (y) (lambda (x) (+ x y))) 2) 3)" val = interpret_one(to_ast(expand(prog))) assert isinstance(val, W_Fixnum) assert val.value == 5 def test_arith(): run_fix("(+ 1 2)", 3) run_fix("(* 1 2)", 2) run_fix("(- 1 2)", -1) run_fix("(* -1 2)", -2) def test_letrec(): run_fix("(letrec ([x 1]) x)", 1) run_fix("(letrec ([x 1] [y 2]) y)", 2) run_fix("(letrec ([x 1] [y 2]) (+ x y))", 3) run_fix("(let ([x 0]) (letrec ([x 1] [y x]) (+ x y)))", 2) run_fix("(letrec ([x (lambda (z) x)]) 2)", 2) def test_let(): run_fix("(let () 1)", 1) run_fix("(let ([x 1]) x)", 1) run_fix("(let ([x 1] [y 2]) y)", 2) run_fix("(let ([x 1] [y 2]) (+ x y))", 3) run_fix("(let ([x 0]) (let ([x 1] [y x]) (+ x y)))", 1) def test_fac(): run_fix("(letrec ([fac (lambda (n) (if (= n 0) 1 (* n (fac (- n 1)))))]) (fac 5))", 120) def test_fib(): run_fix("(letrec ([fib (lambda (n) (if (< n 2) 1 (+ (fib (- n 1)) (fib (- n 2)))))]) (fib 2))", 2) run_fix("(letrec ([fib (lambda (n) (if (< n 2) 1 (+ (fib (- n 1)) (fib (- n 2)))))]) (fib 3))", 3) def test_void(): run ("(void)", w_void) run ("(void 1)", w_void) run ("(void 2 3 #true)", w_void) def test_cons(): run_fix ("(car (cons 1 2))", 1) run_fix ("(cdr (cons 1 2))", 2) def test_set_car(): run_fix ("(letrec ([x (cons 1 2)]) (set-car! x 3) (car x))", 3) run_fix ("(letrec ([x (cons 1 2)]) (set-cdr! x 3) (cdr x))", 3) def test_set_bang(): run("((lambda (x) (set! x #t) x) 1)", w_true) run("(letrec([x 0]) ((lambda (x) (set! x #t) x) 1))", w_true) def test_bools(): run ("#t", w_true) run ("#true", w_true) run ("#T", w_true) run ("#f", w_false) run ("#false", w_false) run ("#F", w_false) run ("true", w_true) run ("false", w_false) def test_lists(): run ("null", w_null) run ("(list)", w_null) run ("(list #t)", to_list ([w_true])) def test_fib(): Y = """ (lambda (f) ((lambda (x) (x x)) (lambda (g) (f (lambda (z) ((g g) z)))))) """ fac = """ (lambda (f) (lambda (x) (if (< x 2) 1 (* x (f (- x 1)))))) """ fib = """ (lambda (f) (lambda (x) (if (< x 2) x (+ (f (- x 1)) (f (- x 2)))))) """ run_fix("((%s %s) 2)"%(Y,fib), 1) run_fix("((%s %s) 2)"%(Y,fac), 2) def test_vararg(): run_fix ("((lambda x (car x)) 1)", 1) run_fix ("((lambda (a . x) a) 1)", 1) run ("((lambda (a . x) x) 1)", w_null) def test_callcc(): run_fix ("(call/cc (lambda (k) 1))", 1) run_fix ("(+ 1 (call/cc (lambda (k) 1)))", 2) run_fix ("(+ 1 (call/cc (lambda (k) (k 1))))", 2) run_fix ("(+ 1 (call/cc (lambda (k) (+ 5 (k 1)))))", 2) def test_define(): run_top("(define x 1) x", W_Fixnum(1))
Python
0.000001
@@ -7,16 +7,26 @@ pytest%0A +import os%0A from pyc @@ -147,16 +147,100 @@ port *%0A%0A +stdlib_fn = os.path.join(os.path.dirname(os.path.dirname(__file__)), %22stdlib.sch%22)%0A%0A def run_ @@ -505,23 +505,17 @@ ile( -%22../ stdlib -.sch%22 +_fn ) as
73b7d0670414ec65a152d239a5c5c60464ce8ff9
Fix bad import. Fixes #2196
pymatgen/core/__init__.py
pymatgen/core/__init__.py
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """ This package contains core modules and classes for representing structures and operations on them. """ import os try: from ruamal import yaml except ImportError: try: import ruamel_yaml as yaml # type: ignore # noqa except ImportError: import yaml # type: ignore # noqa from .composition import Composition # noqa from .lattice import Lattice # noqa from .operations import SymmOp # noqa from .periodic_table import DummySpecies, Element, Species # noqa from .sites import PeriodicSite, Site # noqa from .structure import IMolecule, IStructure, Molecule, Structure # noqa from .units import ArrayWithUnit, FloatWithUnit, Unit # noqa __author__ = "Pymatgen Development Team" __email__ = "pymatgen@googlegroups.com" __maintainer__ = "Shyue Ping Ong" __maintainer_email__ = "shyuep@gmail.com" __version__ = "2022.0.10" SETTINGS_FILE = os.path.join(os.path.expanduser("~"), ".pmgrc.yaml") def _load_pmg_settings(): # Load environment variables by default as backup d = {} for k, v in os.environ.items(): if k.startswith("PMG_"): d[k] = v elif k in ["VASP_PSP_DIR", "MAPI_KEY", "DEFAULT_FUNCTIONAL"]: d["PMG_" + k] = v # Override anything in env vars with that in yml file try: with open(SETTINGS_FILE, "rt") as f: d_yml = yaml.safe_load(f) d.update(d_yml) except IOError: # If there are any errors, default to using environment variables # if present. pass d = d or {} return dict(d) SETTINGS = _load_pmg_settings() locals().update(SETTINGS)
Python
0.000001
@@ -240,17 +240,17 @@ rom ruam -a +e l import
afd6afa06e60676d1d633de9529dde0a5c4b6683
Remove unnecessary yield
python/ciphers/polyval.py
python/ciphers/polyval.py
# Copyright 2021 Google LLC # # Use of this source code is governed by an MIT-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/MIT. import ciphers.cipher import ciphers.gf import parsers.polyval class Hash(ciphers.cipher.Cipher): def make_testvector(self, input, description): return { 'cipher': self.variant, 'description': description, 'input': input, 'hash': self.hash(**input), } def check_testvector(self, tv): self.variant = tv['cipher'] assert tv['hash'] == self.hash(**tv['input']) def linux_testvec_struct(self): return 'hash_testvec' class Polyval(Hash): def __init__(self): super().__init__() self.gf = ciphers.gf.GF([128, 127, 126, 121, 0]) self.polyval_const = self.gf.from_int( sum(1 << x for x in [127, 124, 121, 114, 0])) self.choose_variant(lambda x: True) def variant_name(self): return self.name() def variants(self): yield { 'cipher': 'Polyval', 'lengths': { 'key': self.gf.blocksize, } } def test_input_lengths(self): v = dict(self.lengths()) for mlen in range(0, 80, 16): yield {**v, "message": mlen} yield {**v, "message": 256} def hash(self, key, message): blocksize = self.lengths()['key'] assert len(message) % blocksize == 0 hgen = self.gf.from_bytes(key, byteorder="little") hpoly = hgen * self.polyval_const hash_result = self.gf.from_int(0) for i in range(0, len(message), blocksize): hash_result += self.gf.from_bytes( message[i:i + blocksize], byteorder='little') hash_result *= hpoly return hash_result.to_bytes(byteorder='little') def external_testvectors(self, tvdir): for tv in parsers.polyval.parse_tvs(tvdir): yield { 'cipher': self.variant, 'description': "From RFC", 'input': { 'key': tv['Record authentication key'], 'message': tv['POLYVAL input'], }, 'hash': tv['POLYVAL result'], } def testvec_fields(self): return ['key', 'message', 'hash'] def convert_testvec(self, v): return { 'key': v['input']['key'], 'message': v['input']['message'], 'hash': v['hash'], } def linux_convert_testvec(self, v): return { 'key': v['input']['key'], 'plaintext': v['input']['message'], 'digest': v['hash'], 'psize': len(v['input']['message']), 'ksize': len(v['input']['key']), }
Python
0.000015
@@ -1280,24 +1280,32 @@ in -range(0, 80, 16) +%5B0, 16, 32, 48, 64, 256%5D :%0A @@ -1346,44 +1346,8 @@ len%7D -%0A yield %7B**v, %22message%22: 256%7D %0A%0A
d55f2b98822faa7d71f5fce2bfa980f8265e0610
Use take() instead of takeSample() in PySpark kmeans example.
python/examples/kmeans.py
python/examples/kmeans.py
""" This example requires numpy (http://www.numpy.org/) """ import sys import numpy as np from pyspark import SparkContext def parseVector(line): return np.array([float(x) for x in line.split(' ')]) def closestPoint(p, centers): bestIndex = 0 closest = float("+inf") for i in range(len(centers)): tempDist = np.sum((p - centers[i]) ** 2) if tempDist < closest: closest = tempDist bestIndex = i return bestIndex if __name__ == "__main__": if len(sys.argv) < 5: print >> sys.stderr, \ "Usage: PythonKMeans <master> <file> <k> <convergeDist>" exit(-1) sc = SparkContext(sys.argv[1], "PythonKMeans") lines = sc.textFile(sys.argv[2]) data = lines.map(parseVector).cache() K = int(sys.argv[3]) convergeDist = float(sys.argv[4]) kPoints = data.takeSample(False, K, 34) tempDist = 1.0 while tempDist > convergeDist: closest = data.map( lambda p : (closestPoint(p, kPoints), (p, 1))) pointStats = closest.reduceByKey( lambda (x1, y1), (x2, y2): (x1 + x2, y1 + y2)) newPoints = pointStats.map( lambda (x, (y, z)): (x, y / z)).collect() tempDist = sum(np.sum((kPoints[x] - y) ** 2) for (x, y) in newPoints) for (x, y) in newPoints: kPoints[x] = y print "Final centers: " + str(kPoints)
Python
0
@@ -835,24 +835,76 @@ gv%5B4%5D)%0A%0A +# TODO: change this after we port takeSample()%0A # kPoints = da @@ -931,16 +931,43 @@ K, 34)%0A + kPoints = data.take(K)%0A temp
d9f0f53997bbaea4a55588f2d7edcb2ae9e85184
remove install-time PySerial dependency and don't fail if installed as non-root
python/fibre/discovery.py
python/fibre/discovery.py
""" Provides functions for the discovery of Fibre nodes """ import sys import json import time import threading import traceback import fibre.protocol import fibre.utils import fibre.remote_object import fibre.serial_transport from fibre.utils import Event, Logger from fibre.protocol import ChannelBrokenException # Load all installed transport layers channel_types = {} try: import fibre.usbbulk_transport channel_types['usb'] = fibre.usbbulk_transport.discover_channels except ModuleNotFoundError: pass try: import fibre.serial_transport channel_types['serial'] = fibre.serial_transport.discover_channels except ModuleNotFoundError: pass try: import fibre.tcp_transport channel_types['tcp'] = fibre.tcp_transport.discover_channels except ModuleNotFoundError: pass try: import fibre.udp_transport channel_types['udp'] = fibre.udp_transport.discover_channels except ModuleNotFoundError: pass def noprint(text): pass def find_all(path, serial_number, did_discover_object_callback, search_cancellation_token, channel_termination_token, logger): """ Starts scanning for Fibre nodes that match the specified path spec and calls the callback for each Fibre node that is found. This function is non-blocking. """ def did_discover_channel(channel): """ Inits an object from a given channel and then calls did_discover_object_callback with the created object This queries the endpoint 0 on that channel to gain information about the interface, which is then used to init the corresponding object. """ try: logger.debug("Connecting to device on " + channel._name) try: json_bytes = channel.remote_endpoint_read_buffer(0) except (TimeoutError, ChannelBrokenException): logger.debug("no response - probably incompatible") return json_crc16 = fibre.protocol.calc_crc16(fibre.protocol.PROTOCOL_VERSION, json_bytes) channel._interface_definition_crc = json_crc16 try: json_string = json_bytes.decode("ascii") except UnicodeDecodeError: logger.debug("device responded on endpoint 0 with something that is not ASCII") return logger.debug("JSON: " + json_string.replace('{"name"', '\n{"name"')) logger.debug("JSON checksum: 0x{:02X} 0x{:02X}".format(json_crc16 & 0xff, (json_crc16 >> 8) & 0xff)) try: json_data = json.loads(json_string) except json.decoder.JSONDecodeError as error: logger.debug("device responded on endpoint 0 with something that is not JSON: " + str(error)) return json_data = {"name": "fibre_node", "members": json_data} obj = fibre.remote_object.RemoteObject(json_data, None, channel, logger) obj.__dict__['_json_data'] = json_data['members'] obj.__dict__['_json_crc'] = json_crc16 device_serial_number = fibre.utils.get_serial_number_str(obj) if serial_number != None and device_serial_number != serial_number: logger.debug("Ignoring device with serial number {}".format(device_serial_number)) return did_discover_object_callback(obj) except Exception: logger.debug("Unexpected exception after discovering channel: " + traceback.format_exc()) # For each connection type, kick off an appropriate discovery loop for search_spec in path.split(','): prefix = search_spec.split(':')[0] the_rest = ':'.join(search_spec.split(':')[1:]) if prefix in channel_types: threading.Thread(target=channel_types[prefix], args=(the_rest, serial_number, did_discover_channel, search_cancellation_token, channel_termination_token, logger)).start() else: raise Exception("Invalid path spec \"{}\"".format(search_spec)) def find_any(path="usb", serial_number=None, search_cancellation_token=None, channel_termination_token=None, timeout=None, logger=Logger(verbose=False)): """ Blocks until the first matching Fibre node is connected and then returns that node """ result = [ None ] done_signal = Event(search_cancellation_token) def did_discover_object(obj): result[0] = obj done_signal.set() find_all(path, serial_number, did_discover_object, done_signal, channel_termination_token, logger) try: done_signal.wait(timeout=timeout) finally: done_signal.set() # terminate find_all return result[0]
Python
0
@@ -195,38 +195,8 @@ ect%0A -import fibre.serial_transport%0A from
e0c3a6d602549633fd4ec732b88ca8e6ef2e7d27
optimize parameter name
python/paddle/v2/layer.py
python/paddle/v2/layer.py
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import paddle.trainer_config_helpers as conf_helps from paddle.trainer_config_helpers.config_parser_utils import \ parse_network_config as __parse__ from paddle.trainer_config_helpers.default_decorators import wrap_name_default import collections def parse_network(*outputs): """ parse all output layers and then generate a model config proto. :param outputs: :return: """ def __real_func__(): context = dict() real_output = [each.to_proto(context=context) for each in outputs] conf_helps.outputs(real_output) return __parse__(__real_func__) class Layer(object): def __init__(self, name, parent_layer): assert isinstance(parent_layer, dict) assert isinstance(name, basestring) self.name = name self.__parent_layer__ = parent_layer def to_proto(self, context): """ function to set proto attribute """ kwargs = dict() for param_name in self.__parent_layer__: if not isinstance(self.__parent_layer__[param_name], collections.Sequence): param_value = self.__parent_layer__[param_name].to_proto( context=context) else: param_value = map(lambda x: x.to_proto(context=context), self.__parent_layer__[param_name]) kwargs[param_name] = param_value if self.name not in context: context[self.name] = self.to_proto_impl(**kwargs) return context[self.name] def to_proto_impl(self, **kwargs): raise NotImplementedError() def __convert_to_v2__(method_name, name_prefix, parent_names): if name_prefix is not None: wrapper = wrap_name_default(name_prefix=name_prefix) else: wrapper = None class V2LayerImpl(Layer): def __init__(self, name=None, **kwargs): parent_layers = dict() other_kwargs = dict() for pname in parent_names: parent_layers[pname] = kwargs[pname] for key in kwargs.keys(): if key not in parent_names: other_kwargs[key] = kwargs[key] super(V2LayerImpl, self).__init__(name, parent_layers) self.__other_kwargs__ = other_kwargs if wrapper is not None: __init__ = wrapper(__init__) def to_proto_impl(self, **kwargs): args = dict() for each in kwargs: args[each] = kwargs[each] for each in self.__other_kwargs__: args[each] = self.__other_kwargs__[each] return getattr(conf_helps, method_name)(name=self.name, **args) return V2LayerImpl data = __convert_to_v2__('data_layer', None, []) fc = __convert_to_v2__('fc_layer', name_prefix='fc', parent_names=['input']) max_id = __convert_to_v2__( 'maxid_layer', name_prefix='maxid_layer', parent_names=['input']) classification_cost = __convert_to_v2__( 'classification_cost', name_prefix='classification_cost', parent_names=['input', 'label']) cross_entropy_cost = __convert_to_v2__( 'cross_entropy', name_prefix='cross_entropy', parent_names=['input', 'label']) __all__ = [ 'parse_network', 'data', 'fc', 'max_id', 'classification_cost', 'cross_entropy_cost' ] if __name__ == '__main__': pixel = data(name='pixel', size=784) label = data(name='label', size=10) hidden = fc(input=pixel, size=100, act=conf_helps.SigmoidActivation()) inference = fc(input=hidden, size=10, act=conf_helps.SoftmaxActivation()) maxid = max_id(input=inference) cost1 = classification_cost(input=inference, label=label) cost2 = cross_entropy_cost(input=inference, label=label) print parse_network(cost1) print parse_network(cost2) print parse_network(cost1, cost2) print parse_network(cost2) print parse_network(inference, maxid)
Python
0.00001
@@ -1265,24 +1265,25 @@ parent_layer +s ):%0A a @@ -1311,16 +1311,17 @@ nt_layer +s , dict)%0A @@ -1416,16 +1416,17 @@ nt_layer +s __ = par @@ -1434,16 +1434,17 @@ nt_layer +s %0A%0A de @@ -1570,21 +1570,21 @@ for -param +layer _name in @@ -1603,16 +1603,17 @@ nt_layer +s __:%0A @@ -1653,32 +1653,33 @@ parent_layer +s __%5B -param +layer _name%5D,%0A @@ -1743,27 +1743,24 @@ -param_value +v1_layer = self. @@ -1769,32 +1769,33 @@ parent_layer +s __%5B -param +layer _name%5D.to_pr @@ -1870,27 +1870,24 @@ -param_value +v1_layer = map(l @@ -1951,27 +1951,24 @@ - self.__paren @@ -1978,16 +1978,17 @@ ayer +s __%5B -param +layer _nam @@ -2010,21 +2010,21 @@ kwargs%5B -param +layer _name%5D = @@ -2028,19 +2028,16 @@ %5D = -param_value +v1_layer %0A%0A
07f8b59e5708d423da709d91170f2ecd3946a397
add to the logging if you are using break_on_open or not
pythonx/vdebug/session.py
pythonx/vdebug/session.py
from __future__ import print_function import socket import vim from . import dbgp from . import error from . import event from . import listener from . import log from . import opts from . import util class SessionHandler: def __init__(self, ui, breakpoints): self.__ui = ui self.__breakpoints = breakpoints self.__ex_handler = util.ExceptionHandler(self) self.__session = None self.listener = None def dispatch_event(self, name, *args): event.Dispatcher(self).dispatch_event(name, *args) def ui(self): return self.__ui def breakpoints(self): return self.__breakpoints def session(self): return self.__session def listen(self): if self.listener and self.listener.is_listening(): print("Waiting for a connection: none found so far") elif self.listener and self.listener.is_ready(): print("Found connection, starting debugger") self.__new_session() else: self.start_listener() def start_listener(self): self.listener = listener.Listener.create() print("Vdebug will wait for a connection in the background") util.Environment.reload() if self.is_open(): self.ui().set_status("listening") self.listener.start() self.start_if_ready() def stop_listening(self): if self.listener: self.listener.stop() self.ui().say("Vdebug stopped waiting for a connection") if self.__session: self.__session.close_connection() def run(self): if self.is_connected(): self.dispatch_event("run") else: self.listen() def stop(self): if self.is_connected(): self.__session.close_connection() elif self.is_listening(): self.stop_listening() elif self.is_open(): self.__ui.close() else: self.__ui.say("Vdebug is not running") def close(self): self.stop_listening() if self.is_connected(): self.__session.close_connection() if self.is_open(): self.__ui.close() def is_connected(self): return self.__session and self.__session.is_connected() def is_listening(self): return self.listener and self.listener.is_listening() def is_open(self): return self.__ui.is_open def status(self): if self.is_connected(): return "running" return self.listener.status() def status_for_statusline(self): return "vdebug(%s)" % self.status() def start_if_ready(self): try: if self.listener.is_ready(): print("Found connection, starting debugger") self.__new_session() return True return False except Exception as e: print("Error starting Vdebug: %s" % self.__ex_handler.exception_to_string(e)) def __new_session(self): log.Log("create session", log.Logger.DEBUG) self.__session = Session(self.__ui, self.__breakpoints, util.Keymapper()) log.Log("start session", log.Logger.DEBUG) status = self.__session.start(self.listener.create_connection()) log.Log("refresh event", log.Logger.DEBUG) self.dispatch_event("refresh", status) class Session: def __init__(self, ui, breakpoints, keymapper): self.__ui = ui self.__breakpoints = breakpoints self.__keymapper = keymapper self.__api = None self.cur_file = None self.cur_lineno = None self.context_names = None def api(self): return self.__api def keymapper(self): return self.__keymapper def set_api(self, api): self.__api = api def is_connected(self): return self.__api is not None def is_open(self): return self.__ui.is_open def ui(self): return self.__ui def close(self): """ Close both the connection and vdebug.ui. """ self.close_connection() self.__ui.close() self.__keymapper.unmap() def close_connection(self, stop=True): """ Close the connection to the debugger. """ self.__ui.mark_as_stopped() try: if self.is_connected(): log.Log("Closing the connection") if stop: if opts.Options.get('on_close') == 'detach': try: self.__api.detach() except dbgp.CmdNotImplementedError: self.__ui.error('Detach is not supported by the ' 'debugger, stopping instead') opts.Options.overwrite('on_close', 'stop') self.__api.stop() else: self.__api.stop() self.__api.conn.close() self.__api = None self.__breakpoints.unlink_api() else: self.__api = None self.__breakpoints.unlink_api() except EOFError: self.__api = None self.__ui.say("Connection has been closed") except socket.error: self.__api = None self.__ui.say("Connection has been closed") def start(self, connection): util.Environment.reload() if self.__ui.is_modified(): raise error.ModifiedBufferError("Modified buffers must be saved " "before debugging") try: self.__api = dbgp.Api(connection) if not self.is_open(): self.__ui.open() self.__keymapper.map() self.__ui.set_listener_details(opts.Options.get('server'), opts.Options.get('port'), opts.Options.get('ide_key')) addr = self.__api.conn.address log.Log("Found connection from %s" % str(addr), log.Logger.INFO) self.__ui.set_conn_details(addr[0], addr[1]) self.__collect_context_names() self.__set_features() self.__initialize_breakpoints() if opts.Options.get('break_on_open', int) == 1: status = self.__api.step_into() else: status = self.__api.run() return status except Exception: self.close() raise def detach(self): """Detach the debugger engine, and allow it to continue execution. """ self.__ui.say("Detaching the debugger") self.__api.detach() self.close_connection(False) def __set_features(self): """Evaluate vim dictionary of features and pass to debugger. Errors are caught if the debugger doesn't like the feature name or value. This doesn't break the loop, so multiple features can be set even in the case of an error.""" features = vim.eval('g:vdebug_features') for name, value in features.items(): try: self.__api.feature_set(name, value) except dbgp.DBGPError as e: error_str = "Failed to set feature %s: %s" % (name, e.args[0]) self.__ui.error(error_str) def __initialize_breakpoints(self): self.__breakpoints.update_lines( self.__ui.get_breakpoint_sign_positions()) self.__breakpoints.link_api(self.__api) def __collect_context_names(self): cn_res = self.__api.context_names() self.context_names = cn_res.names() log.Log("Available context names: %s" % self.context_names, log.Logger.DEBUG)
Python
0
@@ -6502,57 +6502,229 @@ -status = self.__api.step_into()%0A else: +log.Log('starting with step_into (break_on_open = 1)', log.Logger.DEBUG)%0A status = self.__api.step_into()%0A else:%0A log.Log('starting with run (break_on_open = 0)', log.Logger.DEBUG) %0A
b5f2e4127a36d8509cd4ab3932ae20408726229d
rename clear_list() -> clear_object(), add random_string()
pywincffi/dev/testutil.py
pywincffi/dev/testutil.py
""" Test Utility ------------ This module is used by the unittests. """ import os import subprocess import sys from cffi import FFI, CDefError try: # The setup.py file installs unittest2 for Python 2 # which backports newer test framework features. from unittest2 import TestCase as _TestCase except ImportError: # pragma: no cover # pylint: disable=wrong-import-order from unittest import TestCase as _TestCase from pywincffi.core.config import config from pywincffi.core.logger import get_logger logger = get_logger("core.testutil") # To keep lint on non-windows platforms happy. try: WindowsError except NameError: # pragma: no cover WindowsError = OSError # pylint: disable=redefined-builtin # Load in our own kernel32 with the function(s) we need # so we don't have to rely on pywincffi.core libtest = None # pylint: disable=invalid-name ffi = FFI() try: ffi.cdef("void SetLastError(DWORD);") libtest = ffi.dlopen("kernel32") # pylint: disable=invalid-name except (AttributeError, OSError, CDefError): # pragma: no cover if os.name == "nt": logger.warning("Failed to build SetLastError()") class TestCase(_TestCase): """ A base class for all test cases. By default the core test case just provides some extra functionality. """ def setUp(self): if os.name == "nt": # pragma: no cover # Always reset the last error to 0 between tests. This # ensures that any error we intentionally throw in one # test does not causes an error to be raised in another. self.SetLastError(0) config.load() # pylint: disable=invalid-name def SetLastError(self, value=0, lib=None): # pragma: no cover """Calls the Windows API function SetLastError()""" if os.name != "nt": self.fail("Only an NT system should call this method") if lib is None: lib = libtest if lib is None: self.fail("`lib` was not defined") if not isinstance(value, int): self.fail("Expected int for `value`") return lib.SetLastError(ffi.cast("DWORD", value)) def _terminate_process(self, process): # pylint: disable=no-self-use """ Calls terminnate() on ``process`` and ignores any errors produced. """ try: process.terminate() except Exception: # pylint: disable=broad-except pass def create_python_process(self, command): """Creates a Python process that run ``command``""" process = subprocess.Popen( [sys.executable, "-c", command], stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.addCleanup(self._terminate_process, process) return process def clear_list(self, list_): """ Clear a Python list of entries in place. This function exists for compatibility across Python versions. """ assert isinstance(list_, list) del list_[:]
Python
0.000059
@@ -105,16 +105,94 @@ port sys +%0Afrom random import choice%0Afrom string import ascii_lowercase, ascii_uppercase %0A%0Afrom c @@ -2887,19 +2887,21 @@ ear_ -lis +objec t(self, list @@ -2896,21 +2896,21 @@ t(self, -list_ +thing ):%0A @@ -2928,189 +2928,1014 @@ -Clear a Python list of entries in place. This function exists%0A for compatibility across Python versions.%0A %22%22%22%0A assert isinstance(list_, list)%0A del list_%5B:%5D +Attempts to clear all data out of %60%60thing%60%60. This will fail the test%0A if we can't handle the type of object provided.%0A %22%22%22%0A try:%0A thing.clear()%0A except AttributeError:%0A if isinstance(thing, list): # Older version of Python.%0A del thing%5B:%5D%0A else:%0A self.fail(%22Don't know how to clear %25s%22 %25 type(thing))%0A%0A def random_string(self, length):%0A %22%22%22%0A Returns a random string as long as %60%60length%60%60. The first character%0A will always be a letter. All other characters will be A-F,%0A A-F or 0-9.%0A %22%22%22%0A if length %3C 1:%0A self.fail(%22Length must be at least 1.%22)%0A%0A # First character should always be a letter so the string%0A # can be used in object names.%0A output = choice(ascii_lowercase)%0A length -= 1%0A%0A while length:%0A length -= 1%0A output += choice(ascii_lowercase + ascii_uppercase + %220123456789%22)%0A%0A return output %0A
e7d379af6040f223f7c5627ed7727c3410eb3950
Update at 2017-07-21 16-38-19
tt.py
tt.py
import json import random from pathlib import Path import numpy as np import scipy.misc from moviepy.editor import VideoFileClip from tqdm import tqdm class WindowNpyGenerator(object): def __init__(self, n_train=None, n_val=None, fps=1, timesteps=5, overlap=4, target_dir=None): self.video_dirs = [] self.n_train = n_train or 100 self.n_val = n_val or 20 self.fps = fps self.timesteps = timesteps self.overlap = overlap self.target_dir = target_dir or Path('./npz/') def extract_windows(self, video_dir): video = VideoFileClip(str(video_dir / 'video.mp4')) info = json.load((video_dir / 'info.json').open()) n_frames = int(video.duration) * self.fps timesteps = self.timesteps overlap = self.overlap label = np.zeros(n_frames, dtype=np.uint8) for s, e in zip(info['starts'], info['ends']): fs = round(s * self.fps) fe = round(e * self.fps) label[fs:fe + 1] = 1 windows = [(video, f - timesteps, f, label[f - 1]) for f in range(timesteps, n_frames, timesteps - overlap)] return windows def gen_npz(self, windows, chunk_size=10000): for idx in tqdm(range(0, len(windows), chunk_size), desc='chunk'): chunk_s, chunk_e = idx, min(idx + chunk_size, len(windows)) chunk = windows[chunk_s:chunk_e] n = len(chunk) xs = np.zeros((n, self.timesteps, 224, 224, 3), dtype=np.float32) ys = np.zeros(n, dtype=np.uint8) for i, (video, s, e, y) in enumerate(tqdm(chunk, desc='data')): for j in range(e - s): img = video.get_frame((s + j) / self.fps) xs[i][j] = scipy.misc.imresize(img, (224, 224)) ys[i] = y npz_path = self.target_dir / '{:04d}.npz'.format(idx // chunk_size) np.savez(npz_path, xs=xs, ys=ys) def fit(self, video_dirs): train, val = [], [] for video_dir in video_dirs: windows = self.extract_windows(video_dir) random.shuffle(windows) pivot = round( (self.n_train) / (self.n_train + self.n_val) * len(windows)) train.extend(windows[:pivot]) val.extend(windows[pivot:]) print(video_dir, len(windows)) if len(train) < self.n_train: print('Not enough train data') return if len(val) < self.n_val: print('Not enough val data') return train = random.sample(train, k=self.n_train) val = random.sample(val, k=self.n_val) self.target_dir.mkdir(exist_ok=True) self.gen_npz(train) self.gen_npz(val) def flow(self, batch_size=80): npzs = sorted(self.target_dir.glob('*.npz')) idx = 0 x_batch = np.zeros( (batch_size, self.timesteps, 224, 224, 3), dtype=np.float32) y_batch = np.zeros((batch_size, 1), dtype=np.uint8) while True: for npz in npzs: for x, y in zip(npz['xs'], npz['ys']): x_batch[idx] = x y_batch[idx] = y if idx + 1 == batch_size: yield x_batch, y_batch idx = (idx + 1) % batch_size def main(): dataset = Path('~/tthl-dataset/').expanduser() video_dirs = sorted(dataset.glob('video*/')) gen = WindowNpyGenerator(n_train=10000, n_val=2000, fps=3, timesteps=2, overlap=1) gen.fit(video_dirs) if __name__ == '__main__': main()
Python
0
@@ -1393,16 +1393,18 @@ , desc=' + chunk'): @@ -1739,20 +1739,23 @@ , desc=' -data +windows ')):%0A
e6104a60ba37cf7c34d371d51bce107ce69a0d06
rename handlers to handler-names in tests
src/unittest/python/cli_tests.py
src/unittest/python/cli_tests.py
from __future__ import print_function, absolute_import, division from unittest import TestCase import monocyte.cli as cli class CliTest(TestCase): def test_cloudwatch_can_be_deactivated(self): test_config = { "cloudwatchlogs": {} } expected_config = { "cloudwatchlogs": {} } cli.apply_default_config(test_config) self.assertEqual(test_config, expected_config) def test_default_cloudwatch_config_used_when_no_cloudwatch_config_is_given(self): test_config = { "cloudwatchlogs": {"groupname": "test"} } expected_config = { "cloudwatchlogs": { 'region': 'eu-central-1', 'log_level': 20, 'groupname': 'test' } } cli.apply_default_config(test_config) self.assertEqual(test_config, expected_config) def test_default_cloudwatch_config_used_when_loglevel_is_translated(self): test_config = { "cloudwatchlogs": { "groupname": "test", "log_level": "debug" } } expected_config = { "cloudwatchlogs": { 'region': 'eu-central-1', 'log_level': 10, 'groupname': 'test' } } cli.apply_default_config(test_config) self.assertEqual(test_config, expected_config) def test_region_can_be_configured(self): test_config = { "cloudwatchlogs": {"region": "my_region"} } expected_config = { "cloudwatchlogs": { 'region': 'my_region', 'log_level': 20, 'groupname': 'monocyte_logs' } } cli.apply_default_config(test_config) self.assertEqual(test_config, expected_config) class ArgumentsToConfigTest(TestCase): def setUp(self): self.arguments = { # Only an explicit 'False' may trigger deletion of resources. '--dry-run': "something", # These parameters should support "," as separator, ignoring whitespace. '--use-handlers': "handler_a, handler_b,handler_c ", '--allowed-regions-prefixes': "region_a, region_b,region_c ", '--ignored-regions': "ignored_a, ignored_b,ignored_c ", '--ignored-resources': "resource.a, resource.b,resource.c", '--config-path': "/foo/bar/batz", '--cwl-groupname': None } self.expected_config = { 'dry_run': True, 'handlers': ['handler_a', 'handler_b', 'handler_c'], 'allowed_regions_prefixes': ["region_a", "region_b", "region_c"], 'ignored_regions': ["ignored_a", "ignored_b", "ignored_c"], 'ignored_resources': {"resource": ["a", "b", "c"]}, 'cloudwatchlogs': {} } def test_basic_translation(self): config_path, config = cli.convert_arguments_to_config(self.arguments) self.assertEqual(config_path, "/foo/bar/batz") self.assertEqual(config, self.expected_config) def test_dry_run_can_be_deactivated(self): self.arguments['--dry-run'] = 'False' self.expected_config['dry_run'] = False _, config = cli.convert_arguments_to_config(self.arguments) self.assertEqual(config, self.expected_config) def test_cloudwatchlogs_groupname_is_configurable(self): self.arguments['--cwl-groupname'] = 'my_groupname' self.expected_config['cloudwatchlogs'] = {'groupname': 'my_groupname'} _, config = cli.convert_arguments_to_config(self.arguments) self.assertEqual(config, self.expected_config)
Python
0
@@ -2201,19 +2201,20 @@ '-- -use- handler +-name s': @@ -2636,16 +2636,21 @@ 'handler +_name s': %5B'ha
d680fca8bef783bd6fad7c71989ca51fb4725bc8
upgrade to latest chatexchange+fix
ws.py
ws.py
#requires https://pypi.python.org/pypi/websocket-client/ import websocket import threading import json,os,sys,getpass,time from findspam import FindSpam from ChatExchange.chatexchange.client import * import HTMLParser parser=HTMLParser.HTMLParser() if("ChatExchangeU" in os.environ): username=os.environ["ChatExchangeU"] else: print "Username: " username=raw_input() if("ChatExchangeP" in os.environ): password=os.environ["ChatExchangeP"] else: password=getpass.getpass("Password: ") lasthost=None lastid=None wrap=Client("stackexchange.com") wrap.login(username,password) wrapm=Client("meta.stackexchange.com") wrapm.login(username,password) s="[ [SmokeDetector](https://github.com/Charcoal-SE/SmokeDetector) ] SmokeDetector started" room = wrap.get_room("11540") roomm = wrapm.get_room("89") room.send_message(s) def checkifspam(data): global lasthost,lastid d=json.loads(json.loads(data)["data"]) s= d["titleEncodedFancy"] print time.strftime("%Y-%m-%d %H:%M:%S"),parser.unescape(s).encode("ascii",errors="replace") site = d["siteBaseHostAddress"] site=site.encode("ascii",errors="replace") sys.stdout.flush() test=FindSpam.testpost(s,site) if (0<len(test)): if(lastid==d["id"] and lasthost == d["siteBaseHostAddress"]): return False # Don't repost. Reddit will hate you. lastid=d["id"] lasthost = d["siteBaseHostAddress"] return True return False def handlespam(data): try: d=json.loads(json.loads(data)["data"]) reason=",".join(FindSpam.testpost(d["titleEncodedFancy"],d["siteBaseHostAddress"])) s="[ [SmokeDetector](https://github.com/Charcoal-SE/SmokeDetector) ] %s: [%s](%s) on `%s`" % (reason,d["titleEncodedFancy"],d["url"],d["siteBaseHostAddress"]) print parser.unescape(s).encode('ascii',errors='replace') wrap.sendMessage("11540",s) wrapm.sendMessage("89",s) except UnboundLocalError: print "NOP" ws = websocket.create_connection("ws://qa.sockets.stackexchange.com/") ws.send("155-questions-active") room.join() def watcher(ev,wrap2): if ev.type_id != 1: return; if(msg.content.startswith("!!/stappit")): if(str(msg.data["user_id"]) in ["31768","103081","73046"]): room.send_message("11540","Goodbye, cruel world") os._exit(1) room.watch_socket(watcher) while True: a=ws.recv() if(a!= None and a!= ""): if(checkifspam(a)): threading.Thread(target=handlespam,args=(a,)).start() s="[ [SmokeDetector](https://github.com/Charcoal-SE/SmokeDetector) ] SmokeDetector aborted" room.sendMessage(s)
Python
0
@@ -846,16 +846,30 @@ (data):%0A + return True%0A global @@ -1810,63 +1810,52 @@ -wrap +room .send -M +_m essage( -%2211540%22, s)%0A -wrap +room m.send -M +_m essage( -%2289%22, s)%0A @@ -2074,19 +2074,18 @@ n;%0A if( -msg +ev .content @@ -2127,11 +2127,10 @@ str( -msg +ev .dat @@ -2204,16 +2204,8 @@ ge(%22 -11540%22,%22 Good
e480a5e93015989d87331c5d8c0b251c73d40e2c
Use raw_id_fields in the TokenAdmin
readthedocs/core/admin.py
readthedocs/core/admin.py
# -*- coding: utf-8 -*- """Django admin interface for core models.""" from datetime import timedelta from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from readthedocs.core.models import UserProfile from readthedocs.projects.models import Project class UserProjectInline(admin.TabularInline): model = Project.users.through verbose_name = 'User project' verbose_name_plural = 'User projects' extra = 1 raw_id_fields = ('project',) class UserProjectFilter(admin.SimpleListFilter): """Filter users based on project properties.""" parameter_name = 'project_state' title = _('user projects') PROJECT_ACTIVE = 'active' PROJECT_BUILT = 'built' PROJECT_RECENT = 'recent' def lookups(self, request, model_admin): return ( (self.PROJECT_ACTIVE, _('has active project')), (self.PROJECT_BUILT, _('has built project')), (self.PROJECT_RECENT, _('has project with recent builds')), ) def queryset(self, request, queryset): """ Add filters to queryset filter. ``PROJECT_ACTIVE`` and ``PROJECT_BUILT`` look for versions on projects, ``PROJECT_RECENT`` looks for projects with builds in the last year """ if self.value() == self.PROJECT_ACTIVE: return queryset.filter(projects__versions__active=True) if self.value() == self.PROJECT_BUILT: return queryset.filter(projects__versions__built=True) if self.value() == self.PROJECT_RECENT: recent_date = timezone.now() - timedelta(days=365) return queryset.filter(projects__builds__date__gt=recent_date) class UserAdminExtra(UserAdmin): """Admin configuration for User.""" list_display = ( 'username', 'email', 'first_name', 'last_name', 'is_staff', 'is_banned', ) list_filter = (UserProjectFilter,) + UserAdmin.list_filter actions = ['ban_user'] inlines = [UserProjectInline] def is_banned(self, obj): return hasattr(obj, 'profile') and obj.profile.banned is_banned.short_description = 'Banned' is_banned.boolean = True def ban_user(self, request, queryset): users = [] for profile in UserProfile.objects.filter(user__in=queryset): profile.banned = True profile.save() users.append(profile.user.username) self.message_user(request, 'Banned users: %s' % ', '.join(users)) ban_user.short_description = 'Ban user' class UserProfileAdmin(admin.ModelAdmin): list_display = ('user', 'homepage') search_fields = ('user__username', 'homepage') raw_id_fields = ('user',) admin.site.unregister(User) admin.site.register(User, UserAdminExtra) admin.site.register(UserProfile, UserProfileAdmin)
Python
0
@@ -409,16 +409,238 @@ Project%0A +from rest_framework.authtoken.admin import TokenAdmin%0A%0A# Monkeypatch raw_id_fields onto the TokenAdmin%0A# https://www.django-rest-framework.org/api-guide/authentication/#with-django-admin%0ATokenAdmin.raw_id_fields = %5B'user'%5D %0A%0Aclass
6d8880c10dab2614e0c0856da68a0dd1413fb655
Add SSL configuration option for PostgreSQL
redash/query_runner/pg.py
redash/query_runner/pg.py
import json import logging import select import sys import psycopg2 from redash.query_runner import * from redash.utils import JSONEncoder logger = logging.getLogger(__name__) types_map = { 20: TYPE_INTEGER, 21: TYPE_INTEGER, 23: TYPE_INTEGER, 700: TYPE_FLOAT, 1700: TYPE_FLOAT, 701: TYPE_FLOAT, 16: TYPE_BOOLEAN, 1082: TYPE_DATE, 1114: TYPE_DATETIME, 1184: TYPE_DATETIME, 1014: TYPE_STRING, 1015: TYPE_STRING, 1008: TYPE_STRING, 1009: TYPE_STRING, 2951: TYPE_STRING } def _wait(conn, timeout=None): while 1: try: state = conn.poll() if state == psycopg2.extensions.POLL_OK: break elif state == psycopg2.extensions.POLL_WRITE: select.select([], [conn.fileno()], [], timeout) elif state == psycopg2.extensions.POLL_READ: select.select([conn.fileno()], [], [], timeout) else: raise psycopg2.OperationalError("poll() returned %s" % state) except select.error: raise psycopg2.OperationalError("select.error received") class PostgreSQL(BaseSQLQueryRunner): noop_query = "SELECT 1" @classmethod def configuration_schema(cls): return { "type": "object", "properties": { "user": { "type": "string" }, "password": { "type": "string" }, "host": { "type": "string", "default": "127.0.0.1" }, "port": { "type": "number", "default": 5432 }, "dbname": { "type": "string", "title": "Database Name" } }, "order": ['host', 'port', 'user', 'password'], "required": ["dbname"], "secret": ["password"] } @classmethod def type(cls): return "pg" def __init__(self, configuration): super(PostgreSQL, self).__init__(configuration) values = [] for k, v in self.configuration.iteritems(): values.append("{}={}".format(k, v)) self.connection_string = " ".join(values) def _get_definitions(self, schema, query): results, error = self.run_query(query, None) if error is not None: raise Exception("Failed getting schema.") results = json.loads(results) for row in results['rows']: if row['table_schema'] != 'public': table_name = '{}.{}'.format(row['table_schema'], row['table_name']) else: table_name = row['table_name'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append(row['column_name']) def _get_tables(self, schema): query = """ SELECT table_schema, table_name, column_name FROM information_schema.columns WHERE table_schema NOT IN ('pg_catalog', 'information_schema'); """ self._get_definitions(schema, query) materialized_views_query = """ SELECT ns.nspname as table_schema, mv.relname as table_name, atr.attname as column_name FROM pg_class mv JOIN pg_namespace ns ON mv.relnamespace = ns.oid JOIN pg_attribute atr ON atr.attrelid = mv.oid AND atr.attnum > 0 AND NOT atr.attisdropped WHERE mv.relkind = 'm'; """ self._get_definitions(schema, materialized_views_query) return schema.values() def run_query(self, query, user): connection = psycopg2.connect(self.connection_string, async=True) _wait(connection, timeout=10) cursor = connection.cursor() try: cursor.execute(query) _wait(connection) if cursor.description is not None: columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description]) rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor] data = {'columns': columns, 'rows': rows} error = None json_data = json.dumps(data, cls=JSONEncoder) else: error = 'Query completed but it returned no data.' json_data = None except (select.error, OSError) as e: error = "Query interrupted. Please retry." json_data = None except psycopg2.DatabaseError as e: error = e.message json_data = None except (KeyboardInterrupt, InterruptException): connection.cancel() error = "Query cancelled by user." json_data = None finally: connection.close() return json_data, error class Redshift(PostgreSQL): @classmethod def type(cls): return "redshift" @classmethod def configuration_schema(cls): return { "type": "object", "properties": { "user": { "type": "string" }, "password": { "type": "string" }, "host": { "type": "string" }, "port": { "type": "number" }, "dbname": { "type": "string", "title": "Database Name" } }, "required": ["dbname", "user", "password", "host", "port"], "secret": ["password"] } register(PostgreSQL) register(Redshift)
Python
0.000001
@@ -1841,32 +1841,196 @@ %22Database Name%22%0A + %7D,%0A %22sslmode%22: %7B%0A %22type%22: %22string%22,%0A %22title%22: %22SSL Mode%22,%0A %22default%22: %22prefer%22%0A
af5b574fb785e65fd1292bb28e90d005be1ecd03
Fix pep8 errors
reporter/test/test_osm.py
reporter/test/test_osm.py
# coding=utf-8 """Test cases for the OSM module. :copyright: (c) 2013 by Tim Sutton :license: GPLv3, see LICENSE for more details. """ import os from reporter.utilities import LOGGER from reporter.osm import load_osm_document, extract_buildings_shapefile from reporter.test.helpers import FIXTURE_PATH from reporter.test.logged_unittest import LoggedTestCase class OsmTestCase(LoggedTestCase): """Test the OSM retrieval functions.""" def test_load_osm_document(self): """Check that we can fetch an osm doc and that it caches properly.""" # # NOTE - INTERNET CONNECTION NEEDED FOR THIS TEST # myUrl = ('http://overpass-api.de/api/interpreter?data=' '(node(-34.03112731086964,20.44997155666351,' '-34.029571310785315,20.45501410961151);<;);out+meta;') myFilePath = '/tmp/test_load_osm_document.osm' if os.path.exists(myFilePath): os.remove(myFilePath) # We test twice - once to ensure its fetched from the overpass api # and once to ensure the cached file is used on second access # Note: There is a small chance the second test could fail if it # exactly straddles the cache expiry time. try: myFile = load_osm_document(myFilePath, myUrl) except: myMessage = 'load_osm_document from overpass test failed %s' % myUrl LOGGER.exception(myMessage) raise myString = myFile.read() myMessage = 'load_osm_document from overpass content check failed.' assert 'Jacoline' in myString, myMessage #myFile = load_osm_document(myFilePath, myUrl) myFileTime = os.path.getmtime(myFilePath) # # This one should be cached now.... # load_osm_document(myFilePath, myUrl) myFileTime2 = os.path.getmtime(myFilePath) myMessage = 'load_osm_document cache test failed.' self.assertEqual(myFileTime, myFileTime2, myMessage) def test_extract_buildings_shapefile(self): """Test the osm to shp converter.""" myZipPath = extract_buildings_shapefile(FIXTURE_PATH) print myZipPath self.assertTrue(os.path.exists(myZipPath), myZipPath)
Python
0.000217
@@ -1375,21 +1375,16 @@ verpass -test failed %25
acf2d57fa49a5ed25275a279d04946178f8cedde
Fix formatting and add doc strings
reporting_scripts/base.py
reporting_scripts/base.py
import csv from pymongo import MongoClient class BaseEdX(object): def __init__(self, args): self.url = args.url client = MongoClient(self.url) self.db = client[args.db_name] self.collections = None self.output_directory = args.output_directory self.row_limit = args.row_limit self.csv_data = None self.list_of_headers = None def generate_csv(self, csv_data, list_of_headers, output_file): self.csv_data = csv_data self.list_of_headers = list_of_headers number_of_rows = len(csv_data) + 1 if number_of_rows <= self.row_limit: self._write_to_csv(self.output_file) else: if number_of_rows % self.row_limit: number_of_splits = number_of_rows // self.row_limit + 1 else: number_of_splits = number_of_rows // self.row_limit def _write_to_csv(self, output_file, number_of_splits=0): with open(output_file, 'w') as csv_file: writer = csv.writer(csv_file) writer.writerow(self.list_of_headers) for row in self.csv_data[number_of_splits * self.row_limit : (number_of_splits + 1) * self.row_limit]: # This loop looks for unicode objects and encodes them to ASCII to avoif Unicode errors, # for e.g. UnicodeEncodeError: 'ascii' codec can't encode character u'\xf1' for index,item in enumerate(row[:]): if type(item) is unicode: row[index] = item.encode('ascii', 'ignore') writer.writerow(row) for index in xrange(number_of_splits): self._write_to_csv(output_file.split('.')[0] + '_' + str(index) + '.csv', index)
Python
0.000002
@@ -466,16 +466,106 @@ _file):%0A + %09%22%22%22%0A %09Genersate csv report from generated data and given list of headers%0A %09%22%22%22%0A %09sel @@ -586,16 +586,16 @@ sv_data%0A - %09sel @@ -716,18 +716,20 @@ limit:%0A%09 -%09%09 + self._wr @@ -757,25 +757,24 @@ t_file)%0A -%09 %09else:%0A%09 %09%09if num @@ -765,18 +765,20 @@ %09else:%0A%09 -%09%09 + if numbe @@ -805,18 +805,16 @@ _limit:%0A -%09%09 %09%09number @@ -868,18 +868,18 @@ 1%0A%09 -%09%09 + else:%0A -%09%09 %09%09nu @@ -933,73 +933,285 @@ it%0A%09 -%09%09%09%0A%09def _write_to_csv(self, output_file, number_of_splits=0):%0A%09%09 + for index in xrange(number_of_splits):%0A%09 self._write_to_csv(output_file.split('.')%5B0%5D + '_' + str(index) + '.csv', index)%0A%09 %0A def _write_to_csv(self, output_file, number_of_splits=0):%0A %09%22%22%22%0A %09Helper method to write rows to csv files%0A %09%22%22%22%0A with @@ -1248,18 +1248,20 @@ _file:%0A%09 -%09%09 + writer = @@ -1283,18 +1283,20 @@ _file)%0A%09 -%09%09 + writer.w @@ -1326,18 +1326,20 @@ aders)%0A%09 -%09%09 + for row @@ -1434,19 +1434,24 @@ imit%5D:%0A%09 -%09%09%09 + # This l @@ -1531,18 +1531,16 @@ errors,%0A -%09%09 %09%09# for @@ -1609,18 +1609,16 @@ u'%5Cxf1'%0A -%09%09 %09%09for in @@ -1648,18 +1648,16 @@ ow%5B:%5D):%0A -%09%09 %09%09 if @@ -1682,19 +1682,20 @@ code:%0A%09%09 -%09%09%09 + row%5B @@ -1734,18 +1734,16 @@ gnore')%0A -%09%09 %09%09writer @@ -1763,129 +1763,5 @@ )%0A%09%09 -%09for index in xrange(number_of_splits):%0A%09%09%09%09self._write_to_csv(output_file.split('.')%5B0%5D + '_' + str(index) + '.csv', index) %0A
6d74c9d233aa44fd7072a269fa200bea610026b7
move WSfactory declaration for insecure connections to work
Web.py
Web.py
from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol, listenWS from twisted.web.server import Site from twisted.web.static import File from twisted.internet import reactor, task, ssl from twisted.web.util import Redirect from twisted.web.resource import Resource from Addresses import CustomAddress import Config import json openSockets = [] class WebHandler(): def __init__(self, queue): self.queue = queue # set up the handler to drop emails from the message 'queue' into the main processing queue lc = task.LoopingCall(self.processSockets) lc.start(2) # fire up static server while we're here staticResource = File('./static/dist') staticFactory = Site(staticResource) if Config.useSSL: contextFactory = ssl.DefaultOpenSSLContextFactory( Config.keyFile, Config.certFile ) # static HTTPS serving reactor.listenSSL(443, staticFactory, contextFactory) # WSS WSfactory=WebSocketServerFactory(u"wss://localhost:9000", debug=False) WSfactory.protocol = self.MyServerProtocol listenWS(WSfactory, contextFactory) else: # static HTTP serving reactor.listenTCP(80, staticFactory) # WS WSfactory.protocol = self.MyServerProtocol WSfactory=WebSocketServerFactory(u"ws://localhost:9000", debug=False) listenWS(WSfactory) reactor.run(installSignalHandlers=0) # no handlers because threads def processSockets(self): while self.queue.qsize() > 0: emailEntry = self.queue.get() if emailEntry['to'] not in [socket.id for socket in openSockets]: print 'Dropping message to', emailEntry['to'] + ': no such box' else: for openSocket in openSockets: if emailEntry['to'] == openSocket.id: openSocket.sendMessage(json.dumps(emailEntry).encode("utf8")) class MyServerProtocol(WebSocketServerProtocol): def __init__(self, *args, **kwargs): super(WebSocketServerProtocol, self).__init__(*args, **kwargs) self.id = CustomAddress().get() def onConnect(self, request): print "Client connection from " + self.id + " at: {0}".format(request.peer) def onOpen(self): print "Sent identification to " + self.id openSockets.append(self) self.sendMessage(json.dumps([self.id, Config.bindingPort, Config.dropSize]).encode("utf8")) def onClose(self, wasClean, code, reason): print "Client connection closed with " + self.id try: # we have some weird issue where this is called twice... openSockets.remove(self) except ValueError: pass
Python
0
@@ -1357,63 +1357,8 @@ WS%0A - WSfactory.protocol = self.MyServerProtocol%0A @@ -1427,32 +1427,87 @@ %22, debug=False)%0A + WSfactory.protocol = self.MyServerProtocol%0A list
16c8a739ab69e0888334923b3d7d8039620f59a3
Return the correctly sorted list of teams for playoffs
api.py
api.py
from flask.ext import restful from flask.ext.restful import Resource, Api, fields, marshal_with from fllipit import APP from models import DB, Team import pypyodbc API = Api(APP) # Get teams from the database def getTeams(): teams = [] if APP.config['TEST_DB']: # In test mode, use the sqlite database teams = Team.query.all() for team in teams: team.sortScores() else: try: # In production mode, get the data from the Access database # Create the database connection conn = pypyodbc.connect( r"Driver={Microsoft Access Driver (*.mdb, *.accdb)};" + r"Dbq="+APP.config['DB_FILE']+";") cur = conn.cursor() # Get the data from the database cur.execute( ''' SELECT TeamNumber, TeamName, Affiliation, Trial1Score, Trial2Score, Trial3Score, Trial4Score, Trial5Score, Trial6Score, Trial7Score, ToRound4, ToRound5, ToRound6, ToRound7, Trial1PenaltyCount, Trial2PenaltyCount, Trial3PenaltyCount, Trial4PenaltyCount, Trial5PenaltyCount, Trial6PenaltyCount, Trial7PenaltyCount FROM ScoringSummaryQuery ''') # Build the list of Team objects for row in cur.fetchall(): # Build the team object team = Team( number=row[0], name=row[1], affiliation=row[2], round1=row[3], round2=row[4], round3=row[5], round4=row[6], round5=row[7], round6=row[8], round7=row[9], advanceTo4=row[10], advanceTo5=row[11], advanceTo6=row[12], advanceTo7=row[13], round1Penalties=row[14], round2Penalties=row[15], round3Penalties=row[16], round4Penalties=row[17], round5Penalties=row[18], round6Penalties=row[19], round7Penalties=row[20],) # Add the current team to the list of all teams teams.append(team) # Close the database connection cur.close() conn.close() except Exception as excp: print(str(excp)) return teams def rankTeams(teams): return sorted( teams, key=lambda x: (x.bestScore, x.secondBestScore, x.worstScore, -x.bestScorePenalties, -x.secondBestScorePenalties, -x.worstScorePenalties), reverse=True) # Setup the fields that will be used in the JSON output teamFields = { "number": fields.Integer, "name": fields.String, "affiliation": fields.String, "round1": fields.Integer, "round2": fields.Integer, "round3": fields.Integer, "bestScore": fields.Integer, "rank": fields.Integer } playoffFields = { "number": fields.Integer, "name": fields.String, "score": fields.Integer } class Rankings(Resource): """Setup a REST resource for the Team data.""" @marshal_with(teamFields) def get(self): teams = getTeams() rankedTeams = rankTeams(teams) i = 1 for team in rankedTeams: team.rank = i i += 1 return rankedTeams class Playoffs(Resource): """Setup a REST resource for the playoff data""" @marshal_with(playoffFields) def get(self, roundNumber): # Get only the teams that are marked to advance to the selected round teams = [t for t in getTeams() if t.isAdvancingToRound(roundNumber)] # Add a temporary attribute 'score' to the team objects, for generic REST output for team in teams: team.score = team.getRoundScore(roundNumber) if roundNumber <= 4: # If this is the first playoff round, return team list sorted by qualifying rank return rankTeams(teams) else: # Return team list sorted by scores from previous round return sorted( teams, key=lambda x: (x.getRoundScore(roundNumber-1), -x.getRoundPenalties(roundNumber-1)), reverse=True) # map resource to URL API.add_resource(Rankings, '/api/teams') API.add_resource(Playoffs, '/api/playoffs/<int:roundNumber>')
Python
0.999999
@@ -4091,193 +4091,9 @@ er)%0A - %0A - if roundNumber %3C= 4:%0A # If this is the first playoff round, return team list sorted by qualifying rank%0A return rankTeams(teams)%0A else:%0A @@ -4156,28 +4156,24 @@ und%0A - return sorte @@ -4171,28 +4171,24 @@ urn sorted(%0A - @@ -4202,28 +4202,24 @@ - key=lambda x @@ -4248,18 +4248,16 @@ ndNumber --1 ), -x.ge @@ -4287,18 +4287,12 @@ mber --1 )),%0A -
0e58ef45f45df0192be6c52cd34df5f1b5c5a028
correct if condition
app.py
app.py
#!/usr/bin/env python import urllib import json import os from flask import Flask from flask import request from flask import make_response # Flask app should start in global layout app = Flask(__name__) @app.route('/webhook', methods=['POST']) def webhook(): req = request.get_json(silent=True, force=True) print("Request:") print(json.dumps(req, indent=4)) res = makeWebhookResult(req) res = json.dumps(res, indent=4) print(res) r = make_response(res) r.headers['Content-Type'] = 'application/json' return r def makeWebhookResult(req): if req.get("result").get("action") = "tell.hours": result = req.get("result") parameters = result.get("parameters") timetype = parameters.get("time-type") portaltype = parameters.get("portal-types") DteTime = {'CS':'9 hours', 'PTO':'8 hours'} StaffitTime = {'CS':'8 hours', 'PTO':'8 hours'} #if time-type == "DTE" speech = "You should book" + str(DteTime[timetype]) + "for" + timetype #speech = "Webhook called!!" print("Response:") print(speech) return { "speech": speech, "displayText": speech, #"data": {}, # "contextOut": [], "source": "apiai-onlinestore-shipping" } if __name__ == '__main__': port = int(os.getenv('PORT', 5000)) print "Starting app on port %d" % port app.run(debug=True, port=port, host='0.0.0.0')
Python
0.999998
@@ -622,16 +622,17 @@ tion%22) = += %22tell.h
ceba6831778d896a9e320d04cea5d5d4e3accc13
rename sms handling function
app.py
app.py
#!/usr/bin/env python import os import random from datetime import datetime, timedelta import twilio.twiml from flask import Flask, request from flask.ext.pymongo import PyMongo, DESCENDING app = Flask(__name__) CONFIGS = ( 'AUTH_TOKEN', 'CELL_NUM', 'MONGOLAB_URI', 'TWILIO_NUM', ) # Set up the app app = Flask(__name__) app.config.update(**{v: os.environ[v] for v in CONFIGS}) app.config['MONGO_URI'] = app.config['MONGOLAB_URI'] # for flask-pymongo # Initialize extensions pymongo = PyMongo(app) # Some constants SMS_CODE_RESET = timedelta(seconds=30) SMS_CODE_GRACE = timedelta(seconds=15) USER_CHECKIN_EXPIRE = timedelta(minutes=15) USER_POST_THROTTLE = timedelta(seconds=10) """Collection schemas users: { phone_number: string created: datetime last_checkin: datetime } codes: { code: string created: datetime } posts: { message: string poster_id: user_id submitted: datetime showtime: datetime # not present if it hasn't been shown yet extender_ids: [user_ids] } """ class InvalidCodeException(Exception): """When user uses code that doesn't exist""" class NotCheckedInException(Exception): """When user tries to vote or post before checking in""" class ChillOut(Exception): """When users get too excited (try to re-up-vote or re-post too soon).""" def notz(dt): """Remove the timezone info from a datetime object""" return dt.replace(tzinfo=None) def create_sms_code(): """Create a new code. More races woo!""" while True: code = ''.join(random.choice('abcdefghijklmnopqrstuvwxyz1234567890') for _ in range(6)) existing_with_code = pymongo.db.codes.find_one({'code': code}) if existing_with_code is None: break new_sms = { 'code': code, 'created': datetime.now() } pymongo.db.codes.insert(new_sms) return new_sms def get_sms_code(): """Fetches the most up-to-date SMS code for the billboard. This may trigger a new code if one is due. """ codes = pymongo.db.codes.find().sort('created', DESCENDING) try: current = next(codes) except StopIteration: # empty database current = create_sms_code() if notz(current['created']) + SMS_CODE_RESET < datetime.now(): # yo, WARNING: off to the races! current = create_sms_code() return current['code'] def check_sms_code(test_code): """Checks whether the SMS code is currently valid.""" codes = pymongo.db.codes.find().sort('created', DESCENDING) current = next(codes) if test_code == current['code']: return True else: previous = next(codes) if (test_code == previous['code'] and datetime.now() - notz(current['created']) < SMS_CODE_GRACE): return True else: return False def get_queue(): """Fetch all posts currently queued.""" unshown = pymongo.db.posts.find({'showtime': {'$exists': False}}) queue_in_order = unshown.sort('submitted') return queue_in_order def get_user_from_phone(phone_number): """Get a user given their phone number, or None if they don't exist""" return pymongo.db.users.find_one({'phone_number': phone_number}) def is_checked_in(user): """Test whether a user is checked in or not.""" if user is None: return False a_ok = notz(user['last_checkin']) + USER_CHECKIN_EXPIRE > datetime.now() return a_ok def check_in_with_sms_code(phone_number, code): """Check in (and possibly create) a user, verified by the active code. Returns the user's data, or raises InvalidCodeException if the code is wrong or expired. """ if not check_sms_code(code): raise InvalidCodeException("You fucked up") user = pymongo.db.users.find_one({'phone_number': phone_number}) if user is None: # so racey user = { 'phone_number': phone_number, 'created': datetime.now(), } user['last_checkin'] = datetime.now() pymongo.db.users.save(user) return user def post_message(user, message): """Try to queue a message for a user. Returns the message's position in the queue. Raises ChillOut if the user has posted too many messages recently. """ user_id = user['_id'] prev = pymongo.db.posts.find_one({'poster_id': user_id})\ .sort('submitted', DESCENDING) if (prev is not None and notz(prev['submitted']) + USER_POST_THROTTLE < datetime.now()): raise ChillOut('Whoa. Chill out, hey. So many messages.') post = { 'message': message, 'poster_id': user_id, 'submitted': datetime.now(), 'extenders': [user_id], } pymongo.db.posts.insert(post) return len(get_queue()) def save_vote(phone_number): """Register a vote for a user. Returns 1 if the vote was counted. Raises ChillOut if the user has already voted for the showing post. Currently it is hard-coded to always succeed """ return 1 @app.route('/sms', methods=['GET','POST']) def send_sms(): #Get number and response from_number = request.values.get('From', None) from_response = request.values.get('Body',None) first_word = from_response.lower().split(' ',1)[0]; resp = twilio.twiml.Response() user = get_user_from_phone(from_number) #Checks if user already checked in if is_checked_in(user): #Check if user response is vote if "vote" in first_word: if save_vote(): message="Vote successful" else: message="Vote unsuccessful" #Check if user response is a post elif "post" in first_word: queue_num = post_message(user, from_response.lower().split(' ',1)[1]) message = "Your message is queued in position {}".format(queue_num) else: #check if code is correct try: check = check_in_with_sms_code(from_number, from_response); except InvalidCodeException: #error handling message="fucked up" resp.message(message) return str(resp) message = ''' Thanks for checking in! To vote, Please text 'vote', otherwise text 'post' and type in your message ''' #User hasn't checked in but is checking in now elif "post" not in first_word and "vote" not in first_word: #check if code is correct try: check = check_in_with_sms_code(from_number, from_response); except InvalidCodeException: #error handling message="fucked up" resp.message(message) return str(resp) message = ''' Thanks for checking in! To vote, Please text 'vote', otherwise text 'post' and type in your message ''' else: #error handling message="Not checked in" resp.message(message) return str(resp) resp.message(message) return str(resp) # dev stuff def push(): """Push a test request context""" ctx = app.test_request_context() ctx.push() return ctx if __name__ == '__main__': app.run(debug=True)
Python
0.000056
@@ -5095,12 +5095,14 @@ def -send +handle _sms
844c66cf2c484758567903e7c559ef005b44c85e
Check for lack of javascript and add usage message.
app.py
app.py
from PIL import Image, ImageFile import flask import os from gevent.event import AsyncResult, Timeout from gevent.queue import Empty, Queue from shutil import rmtree from hashlib import sha1 from stat import S_ISREG, ST_CTIME, ST_MODE DATA_DIR = 'data' KEEP_ALIVE_DELAY = 45 MAX_IMAGE_SIZE = 800, 600 MAX_IMAGES = 10 app = flask.Flask(__name__, static_folder=DATA_DIR) broadcast_queue = Queue() try: # Reset saved files on each start rmtree(DATA_DIR, True) os.mkdir(DATA_DIR) except OSError: pass def broadcast(message): """Notify all waiting waiting gthreads of message.""" waiting = [] try: while True: waiting.append(broadcast_queue.get(block=False)) except Empty: pass print('Broadcasting {0} messages'.format(len(waiting))) for item in waiting: item.set(message) def receive(): """Generator that yields a message at least every KEEP_ALIVE_DELAY seconds. yields messages sent by `broadcast`. """ tmp = None while True: if not tmp: tmp = AsyncResult() broadcast_queue.put(tmp) try: yield tmp.get(timeout=KEEP_ALIVE_DELAY) tmp = None except Timeout: yield '' def save_normalized_image(path, data): image_parser = ImageFile.Parser() try: image_parser.feed(data) image = image_parser.close() except IOError: raise return False image.thumbnail(MAX_IMAGE_SIZE, Image.ANTIALIAS) image.save(path) return True def event_stream(client): try: for message in receive(): yield 'data: {0}\n\n'.format(message) finally: print('{0} disconnected from stream'.format(client)) @app.route('/post', methods=['POST']) def post(): sha1sum = sha1(flask.request.data).hexdigest() target = os.path.join(DATA_DIR, '{0}.jpg'.format(sha1sum)) if save_normalized_image(target, flask.request.data): broadcast(target) # Notify subscribers of completion return '' @app.route('/stream') def stream(): return flask.Response(event_stream(flask.request.remote_addr), mimetype='text/event-stream') @app.route('/') def home(): # Code adapted from: http://stackoverflow.com/questions/168409/ image_infos = [] for filename in os.listdir(DATA_DIR): filepath = os.path.join(DATA_DIR, filename) file_stat = os.stat(filepath) if S_ISREG(file_stat[ST_MODE]): image_infos.append((file_stat[ST_CTIME], filepath)) images = [] for i, (_, path) in enumerate(sorted(image_infos, reverse=True)): if i >= MAX_IMAGES: os.unlink(path) continue images.append('<div><img alt="User uploaded image" src="{0}" /></div>' .format(path)) return """ <!doctype html> <title>Image Uploader</title> <meta charset="utf-8" /> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script src="//ajax.googleapis.com/ajax/libs/jqueryui/1.10.1/jquery-ui.min.js"></script> <style> body { max-width: 500px; margin: auto; padding: 1em; background: black; color: #fff; font: 16px/1.6 menlo, monospace; } </style> <h3>Image Uploader</h3> <p>Image: <input id="file" type="file" /></p> <h3>Uploaded Image</h3> <div id="images">%s</div> <script> function sse() { var source = new EventSource('/stream'); source.onmessage = function(e) { if (e.data == '') return; console.log(e.data); var image = $('<img>', {alt: 'User uploaded image', src: e.data}).hide(); var container = $('<div>', {html: image}); $('#images').prepend(container); image.load(function(){ image.show('blind', {}, 1000); }); }; } $('#file').change(function(e){ var xhr = new XMLHttpRequest(); xhr.open('POST', '/post', true); xhr.send(e.target.files[0]); e.target.value = ''; }); sse(); </script> """ % '\n'.join(images) if __name__ == '__main__': app.debug = True app.run('0.0.0.0', threaded=True)
Python
0
@@ -3285,17 +3285,286 @@ /h3%3E%0A%3Cp%3E -I +Upload an image for everyone to see. Valid images are pushed to everyone%0Acurrently connected, and only the most recent 10 images are saved.%3C/p%3E%0A%3Cnoscript%3ENote: You must have javascript enabled in order to upload and%0Adynamically view new images.%3C/noscript%3E%0A%3Cp%3ESelect an i mage: %3Ci @@ -3615,16 +3615,40 @@ ed Image +s (updated in real-time) %3C/h3%3E%0A%3Cd
56d221e2b9a1f04ff8ca14c6ad3cb628633e35fd
Simplify query.
app.py
app.py
import chdb import flask import flask_sslify from flask.ext.compress import Compress import os import collections def get_db(): db = getattr(flask.g, '_db', None) if db is None: db = flask.g._db = chdb.init_db() return db Category = collections.namedtuple('Category', ['id', 'title']) CATEGORY_ALL = Category('all', '') def get_categories(include_default = True): categories = getattr(flask.g, '_categories', None) if categories is None: cursor = get_db().cursor() cursor.execute(''' SELECT id, title FROM categories WHERE id != "unassigned" ORDER BY title; ''') categories = [CATEGORY_ALL] + [Category(*row) for row in cursor] flask.g._categories = categories return categories if include_default else categories[1:] def get_category_by_id(catid, default = None): for c in get_categories(): if catid == c.id: return c return default def select_snippet_by_id(id): # The query below may match snippets with unassigned categories. That's # fine, we don't display the current category in the UI anyway. cursor = get_db().cursor() cursor.execute(''' SELECT snippets.snippet, snippets.section, articles.url, articles.title FROM snippets, articles WHERE snippets.id = ? AND snippets.article_id = articles.page_id;''', (id,)) ret = cursor.fetchone() return ret def select_random_id(cat = CATEGORY_ALL): cursor = get_db().cursor() ret = None if cat is not CATEGORY_ALL: cursor.execute(''' SELECT snippets.id FROM snippets, categories, articles WHERE categories.id = ? AND snippets.article_id = articles.page_id AND articles.category_id = categories.id ORDER BY RANDOM() LIMIT 1;''', (cat.id,)) ret = cursor.fetchone() if ret is None: cursor.execute(''' SELECT id FROM snippets ORDER BY RANDOM() LIMIT 1;''') ret = cursor.fetchone() assert ret and len(ret) == 1 return ret[0] app = flask.Flask(__name__) if 'DYNO' in os.environ: flask_sslify.SSLify(app) Compress(app) @app.route('/') def citation_hunt(): id = flask.request.args.get('id') cat = flask.request.args.get('cat') if cat is not None: cat = get_category_by_id(cat) if cat is None: # invalid category, normalize to "all" and try again by id cat = CATEGORY_ALL return flask.redirect( flask.url_for('citation_hunt', id = id, cat = cat.id)) else: cat = CATEGORY_ALL if id is not None: # pick snippet by id and just echo back the category, even # if the snippet doesn't belong to it. sinfo = select_snippet_by_id(id) if sinfo is None: # invalid id flask.abort(404) snippet, section, aurl, atitle = sinfo return flask.render_template('index.html', snippet = snippet, section = section, article_url = aurl, article_title = atitle, current_category = cat) id = select_random_id(cat) return flask.redirect( flask.url_for('citation_hunt', id = id, cat = cat.id)) @app.route('/categories.html') def categories_html(): return flask.render_template('categories.html', categories = get_categories(include_default = False)); @app.after_request def add_cache_header(response): if response.status_code != 302 and response.cache_control.max_age is None: response.cache_control.public = True response.cache_control.max_age = 3 * 24 * 60 * 60 return response @app.teardown_appcontext def close_db(exception): db = getattr(flask.g, '_db', None) if db is not None: db.close() if __name__ == '__main__': port = int(os.environ.get('PORT', 5000)) debug = 'DEBUG' in os.environ app.run(host = '0.0.0.0', port = port, debug = debug)
Python
0.999999
@@ -1623,20 +1623,8 @@ ets, - categories, art @@ -1651,30 +1651,8 @@ ERE -categories.id = ? AND snip @@ -1729,21 +1729,9 @@ d = -categories.id +? ORD
925ccb3c563da694866aa7396b71222d3ef1c2d6
Update path again
app.py
app.py
import json import datetime import random import os from flask import Flask from flask import render_template app = Flask(__name__) def isWeekend(date): if date.weekday() == 5 or date.weekday() == 6: return True else: return False def fuckShitUp(s): # Divide into words words = s.split(' ') # Pick a random spot to insert the fucking n = random.randint(0, len(words)-1) s = ' '.join(words[:n]) + ' Fucking ' + ' '.join(words[n:]) return s # Needed so we can use in the jinja templates app.jinja_env.globals.update(isWeekend=isWeekend) app.jinja_env.globals.update(fuckShitUp=fuckShitUp) currentDate = datetime.datetime.today() currentPath = "storage/menus/" dateNamePath = currentPath + str(currentDate.year) + "-" + str(currentDate.month) + "-" + str(currentDate.day) print("date name path:" + dateNamePath) # check if file exists yet if os.path.exists(dateNamePath) and os.path.isfile(dateNamePath): file = open(dateNamePath, "r") menuJSON = file.read() file.close() menus = json.loads(menuJSON) else: menus = {"Breakfast": [], "Lunch": [], "Dinner": []} @app.route('/') def showIndexPage(): if isWeekend(currentDate): if 3 <= currentDate.hour <= 10: menu = menus["Breakfast"] elif 10 < currentDate.hour <= 12+2: menu = menus["Lunch"] else: menu = menus["Dinner"] else: if 3 <= currentDate.hour <= 12+2: menu = menus["Lunch"] else: menu = menus["Dinner"] # dinner return render_template('main.html', menu=menu, currentDate=currentDate) @app.route('/breakfast') def showBreakfastPage(): if not isWeekend(currentDate): menu = menus["Breakfast"] return render_template('main.html', menu=menu, currentDate=currentDate) else: failText = "There's no breakfast on the weekend, motherfucker." return render_template('fail.html', failText=failText, currentDate=currentDate) @app.route('/brunch') def showBrunchPage(): if isWeekend(currentDate): menu = menus["Lunch"] return render_template('main.html', menu=menu, currentDate=currentDate) else: failText = "There's no brunch on weekdays, motherfucker." return render_template('fail.html', failText=failText, currentDate=currentDate) @app.route('/lunch') def showLunchPage(): if not isWeekend(currentDate): menu = menus["Lunch"] return render_template('main.html', menu=menu, currentDate=currentDate) else: failText = "There's no lunch on the weekend, motherfucker." return render_template('fail.html', failText=failText, currentDate=currentDate) menu = menus["Lunch"] return render_template('main.html', menu=menu, currentDate=currentDate) @app.route('/dinner') def showDinnerPage(): menu = menus["Dinner"] return render_template('main.html', menu=menu, currentDate=currentDate) if __name__ == "__main__": app.run()
Python
0
@@ -691,16 +691,17 @@ Path = %22 +/ storage/
e33a06ad4d4a7494f925a96e9d272e32e4dc18ba
Return json when error occurs
app.py
app.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from flask import Flask, request, json from flask.ext.cors import CORS import database import rsser import logging import threading logging.basicConfig(level=logging.INFO) logging.getLogger("gospellibrary").setLevel(logging.WARNING) logging.getLogger("requests").setLevel(logging.WARNING) logger = logging.getLogger(__name__) # Update data base in background logger.info("Updating database in background...") threading.Thread(target=database.update_database).start() logger.info("Starting server...") app = Flask(__name__) CORS(app) @app.route('/speakercast/speakers') def speakers(): logger.info("Getting speakers") speakers = [{'name': name, 'talks': count} for count, name in database.get_all_speaker_and_counts()] return json.dumps(speakers) @app.route('/speakercast/generate', methods=['POST', 'OPTIONS']) def generate(): if request.method == 'OPTIONS': return "" data = json.loads(request.data) speakers = data.get('speakers') if speakers is None: logger.error("No \"speakers\" field in request data!") return "ERROR" if len(speakers) == 0: logger.warning("Speaker list was empty. Ignoring request.") return "" id_ = database.generate_id(speakers) logger.info("Generated id ({}) for {}".format(id_, speakers)) return id_ @app.route('/speakercast/feed/<id>') def feed(id): speakers = database.get_speakers(id) if speakers is None: # TODO: Send some error logger.error("No speakers match {}!".format(id)) return "ERROR" talks = database.get_talks(speakers) logger.info("Creating RSS feed for {}: {}".format(id, speakers)) return rsser.create_rss_feed(talks=talks, speakers=list(speakers)) if __name__ == "__main__": app.run(debug=True)
Python
0.000142
@@ -1130,31 +1130,87 @@ return -%22ERROR%22 +json.dumps(%7B%22error%22: %22No %5C%22speakers%5C%22 field in request data!%22%7D) %0A%0A if len @@ -1305,26 +1305,90 @@ return -%22%22 +json.dumps(%7B%22error%22: %22Speaker list was empty. Ignoring request.%22%7D) %0A%0A id_ = @@ -1727,15 +1727,65 @@ urn -%22ERROR%22 +json.dumps(%7B%22error%22: %22No speakers match %7B%7D!%22.format(id)%7D) %0A%0A
d177d63ebc87208fdba4227377b2e1aebda8f077
Add port code for Heroku
app.py
app.py
from flask import Flask, Response, request from hypermedia_resource import HypermediaResource from hypermedia_resource.wrappers import HypermediaResponse, ResponseBuilder import maze app = Flask(__name__) # Helper functions for the views def maze_resource(type_of): """ Sets up a HypermediaResource for the resource """ resource = HypermediaResource() resource.meta.attributes.add("type", type_of) return resource def maze_response(resource): """ Build a HypermediaResponse """ response_builder = ResponseBuilder("application/vnd.amundsen.maze+xml") response = response_builder.build(resource, request.headers.get("Accept")) return Response(response.body, mimetype=response.media_type) # Route and views @app.route('/', methods=["GET"]) def root(): """ Root resource """ resource = maze_resource(type_of='item') resource.links.add(rel='start', href=maze.link_to_cell(0)) return maze_response(resource) @app.route('/cells/999', methods=["GET"]) def exit(): """ Exit resource """ resource = maze_resource(type_of='completed') resource.links.add(rel='start', href=maze.link_to_cell(0)) return maze_response(resource) @app.route('/cells/<cell_num>', methods=["GET"]) def cell(cell_num): """ Cell resource """ resource = maze_resource(type_of='cell') links = maze.get_links_for_cell(int(cell_num)) for rel, link in links.iteritems(): resource.links.add(rel=rel, href=link) return maze_response(resource) if __name__ == "__main__": app.debug = True app.run()
Python
0.000003
@@ -1,12 +1,22 @@ +import os%0A from flask i @@ -1578,35 +1578,84 @@ -app.debug = True%0A app.run( +port = int(os.environ.get(%22PORT%22, 5000))%0A app.run(host='0.0.0.0', port=port )%0A
874f63fff012c00a39c553e7eb9ca0ffcb4dcd18
error message: ERROR
app.py
app.py
#System Imports import sys, os import json import static import time import random from shutil import copyfile import operator import urllib2 import itertools import subprocess import math # from filechunkio import FileChunkIO from celery import Celery from collections import defaultdict, OrderedDict import collections import requests #Flask Imports from werkzeug import secure_filename from flask import Flask, Blueprint, make_response, render_template, render_template_string, request, session, flash, redirect, url_for, jsonify, get_flashed_messages, send_from_directory from flask.ext.bcrypt import Bcrypt from flask.ext.login import LoginManager, UserMixin, current_user, login_user, logout_user, login_required from flask.ext.mail import Mail, Message from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand from flask_bootstrap import Bootstrap from flask_bootstrap import __version__ as FLASK_BOOTSTRAP_VERSION from flask_nav import Nav from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator from flask_sqlalchemy import SQLAlchemy from markupsafe import escape import wtforms from flask_wtf import Form import random import jinja2 from flask_restful import reqparse, abort, Api, Resource from sqlalchemy import create_engine from sqlalchemy import Table, Column, Integer, String, MetaData, ForeignKey, Boolean from sqlalchemy.dialects import postgresql from sqlalchemy.dialects.postgresql import JSON, JSONB, ARRAY, BIT, VARCHAR, INTEGER, FLOAT, NUMERIC, OID, REAL, TEXT, TIME, TIMESTAMP, TSRANGE, UUID, NUMRANGE, DATERANGE from sqlalchemy.sql import select from sqlalchemy.orm import sessionmaker, scoped_session app = Flask(__name__) api = Api(app) # Get appropriate port from Heroku (or where ever we host) try: port = int(os.environ['PORT']) except: port = 5001 print 'will run on port ' + str(port) @app.route('/') def index(): return render_template("index.html") # DATA-GATHERING FUNCTIONS def post_to_iedb_mhci(protein_sequence, method='smm', length='9', allele='HLA-A*01:01'): data = { 'sequence_text': protein_sequence, 'length': length, 'method': method, 'allele': allele, } url = 'http://tools-api.iedb.org/tools_api/mhci/' response = requests.post(url, data=data) if response.ok: return response.text else: return 'Something went wrong' def post_to_iedb_mhcii(protein_sequence, method='nn_align', length='9', allele='HLA-DRB1*01:01'): data = { 'sequence_text': protein_sequence, 'length': length, 'method': method, 'allele': allele, } url = 'http://tools-api.iedb.org/tools_api/mhcii/' response = requests.post(url, data=data) if response.ok: return response.text else: return 'Something went wrong' # API RESOURCES BELOW class ProteinQuery(Resource): def get(self, protein_sequence): iedb_mhci_response = post_to_iedb_mhci(protein_sequence) iedb_mhcii_response = post_to_iedb_mhcii(protein_sequence) return {protein_sequence: { 'iedb_mhci': iedb_mhci_response, 'iedb_mhcii': iedb_mhcii_response, } } api.add_resource(ProteinQuery, '/query/<string:protein_sequence>') if __name__ == '__main__': app.run(port=port, debug=True)
Python
0.999999
@@ -2699,36 +2699,21 @@ return ' -Something went wrong +ERROR '%0A%0A%0A%0A%0A#
b03ac5fec972ba3014ecd68337f5f390e607a791
Add link to source. Center layout.
app.py
app.py
from PIL import Image, ImageFile import flask import os from gevent.event import AsyncResult, Timeout from gevent.queue import Empty, Queue from shutil import rmtree from hashlib import sha1 from stat import S_ISREG, ST_CTIME, ST_MODE DATA_DIR = 'data' KEEP_ALIVE_DELAY = 45 MAX_IMAGE_SIZE = 800, 600 MAX_IMAGES = 10 app = flask.Flask(__name__, static_folder=DATA_DIR) broadcast_queue = Queue() try: # Reset saved files on each start rmtree(DATA_DIR, True) os.mkdir(DATA_DIR) except OSError: pass def broadcast(message): """Notify all waiting waiting gthreads of message.""" waiting = [] try: while True: waiting.append(broadcast_queue.get(block=False)) except Empty: pass print('Broadcasting {0} messages'.format(len(waiting))) for item in waiting: item.set(message) def receive(): """Generator that yields a message at least every KEEP_ALIVE_DELAY seconds. yields messages sent by `broadcast`. """ tmp = None while True: if not tmp: tmp = AsyncResult() broadcast_queue.put(tmp) try: yield tmp.get(timeout=KEEP_ALIVE_DELAY) tmp = None except Timeout: yield '' def save_normalized_image(path, data): image_parser = ImageFile.Parser() try: image_parser.feed(data) image = image_parser.close() except IOError: raise return False image.thumbnail(MAX_IMAGE_SIZE, Image.ANTIALIAS) image.save(path) return True def event_stream(client): try: for message in receive(): yield 'data: {0}\n\n'.format(message) finally: print('{0} disconnected from stream'.format(client)) @app.route('/post', methods=['POST']) def post(): sha1sum = sha1(flask.request.data).hexdigest() target = os.path.join(DATA_DIR, '{0}.jpg'.format(sha1sum)) if save_normalized_image(target, flask.request.data): broadcast(target) # Notify subscribers of completion return '' @app.route('/stream') def stream(): return flask.Response(event_stream(flask.request.remote_addr), mimetype='text/event-stream') @app.route('/') def home(): # Code adapted from: http://stackoverflow.com/questions/168409/ image_infos = [] for filename in os.listdir(DATA_DIR): filepath = os.path.join(DATA_DIR, filename) file_stat = os.stat(filepath) if S_ISREG(file_stat[ST_MODE]): image_infos.append((file_stat[ST_CTIME], filepath)) images = [] for i, (_, path) in enumerate(sorted(image_infos, reverse=True)): if i >= MAX_IMAGES: os.unlink(path) continue images.append('<div><img alt="User uploaded image" src="{0}" /></div>' .format(path)) return """ <!doctype html> <title>Image Uploader</title> <meta charset="utf-8" /> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script src="//ajax.googleapis.com/ajax/libs/jqueryui/1.10.1/jquery-ui.min.js"></script> <style> body { max-width: 500px; margin: auto; padding: 1em; background: black; color: #fff; font: 16px/1.6 menlo, monospace; } </style> <h3>Image Uploader</h3> <p>Upload an image for everyone to see. Valid images are pushed to everyone currently connected, and only the most recent 10 images are saved.</p> <noscript>Note: You must have javascript enabled in order to upload and dynamically view new images.</noscript> <p>Select an image: <input id="file" type="file" /></p> <h3>Uploaded Images (updated in real-time)</h3> <div id="images">%s</div> <script> function sse() { var source = new EventSource('/stream'); source.onmessage = function(e) { if (e.data == '') return; console.log(e.data); var image = $('<img>', {alt: 'User uploaded image', src: e.data}).hide(); var container = $('<div>', {html: image}); $('#images').prepend(container); image.load(function(){ image.show('blind', {}, 1000); }); }; } $('#file').change(function(e){ var xhr = new XMLHttpRequest(); xhr.open('POST', '/post', true); xhr.send(e.target.files[0]); e.target.value = ''; }); sse(); </script> """ % '\n'.join(images) if __name__ == '__main__': app.debug = True app.run('0.0.0.0', threaded=True)
Python
0
@@ -3125,17 +3125,17 @@ -width: -5 +8 00px;%0A @@ -3245,16 +3245,67 @@ ospace;%0A + text-align:center;%0A %7D%0A%0A a %7B%0A color: #fff;%0A %7D%0A%3C/st @@ -3455,18 +3455,18 @@ recent -10 +%25s images @@ -3480,16 +3480,229 @@ ed.%3C/p%3E%0A +%3Cp%3EThe complete source for this Flask web service can be found at:%0A%3Ca href=%22https://github.com/bboe/flask-image-uploader/blob/master/app.py%22%3Ehttps://github.com/bboe/flask-image-uploader/blob/master/app.py%3C/a%3E%3C/p%3E%0A %3Cnoscrip @@ -4617,16 +4617,29 @@ t%3E%0A%22%22%22 %25 + (MAX_IMAGES, '%5Cn'.jo @@ -4648,16 +4648,20 @@ (images) +)%0A%0A%0A %0A%0A%0Aif __
096cd81f318e8446855fb806772c674328adc6b2
Create app.py
app.py
app.py
#!/usr/bin/env python from __future__ import print_function from future.standard_library import install_aliases install_aliases() from urllib.parse import urlparse, urlencode from urllib.request import urlopen, Request from urllib.error import HTTPError import json import os from flask import Flask from flask import request from flask import make_response # Flask app should start in global layout app = Flask(__name__) @app.route('/webhook', methods=['POST']) def webhook(): req = request.get_json(silent=True, force=True) print("Request:") print(json.dumps(req, indent=4)) res = processRequest(req) res = json.dumps(res, indent=4) # print(res) r = make_response(res) r.headers['Content-Type'] = 'application/json' return r def processRequest(req): if req.get("result").get("action") != "news.search": return {} baseurl = "https://newsapi.org/v1/articles?source=the-times-of-india&sortBy=top&apiKey=60969da0a38e4cf1aad619158c413030" if yql_query is None: return {} yql_url = baseurl + urlencode({'': yql_query}) + "&format=json" result = urlopen(yql_url).read() data = json.loads(result) res = makeWebhookResult(data) return res #def makeYqlQuery(req): # result = req.get("result") # parameters = result.get("parameters") #city = parameters.get("news.search") #if city is None: # return None def makeWebhookResult(res): articles = res.get('articles') if articles is None: return {} author = articles.get('author') if author is None: return {} title = articles.get('title') if title is None: return {} description= articles.get('description') url = articles.get('url') #units = channel.get('units') #condition = item.get('condition') #if condition is None: # return {} print(json.dumps(item, indent=4)) speech = "latest news" +author.get()+""+title.get()+""+description.get()+""+url.get() #print("Response:") print(speech) return { "speech": speech, "displayText": speech, # "data": data, # "contextOut": [], "source": "apiai-news-search" } if __name__ == '__main__': port = int(os.getenv('PORT', 5000)) print("Starting app on port %d" % port) app.run(debug=False, port=port, host='0.0.0.0')
Python
0.000003
@@ -1223,17 +1223,16 @@ n res%0A%0A%0A -# def make @@ -1249,17 +1249,16 @@ eq):%0A -# result @@ -1280,17 +1280,16 @@ lt%22)%0A -# paramet @@ -1327,13 +1327,12 @@ -#city +news = p @@ -1368,16 +1368,15 @@ -# if -city +news is @@ -1380,33 +1380,32 @@ is None:%0A -# return None%0A%0Ade
60cdd82801a4193901c24ffd224086dd5b1a0163
fix #1 and issues in the verification
app.py
app.py
import os import base64 from io import StringIO from flask import Flask, render_template, redirect, url_for, flash, session, \ abort from werkzeug.security import generate_password_hash, check_password_hash from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.login import LoginManager, UserMixin, login_user, logout_user, \ current_user from flask.ext.bootstrap import Bootstrap from flask.ext.wtf import Form from wtforms import StringField, PasswordField, SubmitField, RadioField from wtforms.validators import Required, Length, EqualTo # create application instance app = Flask(__name__) app.config.from_object('config') # initialize extensions bootstrap = Bootstrap(app) db = SQLAlchemy(app) lm = LoginManager(app) def generateToken(): return 123456 def sendToken(): print 'Use twilio API' class User(UserMixin, db.Model): """User model.""" __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(64), index=True) password_hash = db.Column(db.String(128)) phone = db.Column(db.String(64)) method = db.Column(db.String(16)) @property def password(self): raise AttributeError('password is not a readable attribute') @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password) @lm.user_loader def load_user(user_id): """User loader callback for Flask-Login.""" return User.query.get(int(user_id)) class RegisterForm(Form): """Registration form.""" username = StringField('Username', validators=[Required(), Length(1, 64)]) password = PasswordField('Password', validators=[Required()]) password_again = PasswordField('Password again', validators=[Required(), EqualTo('password')]) phone = StringField('Phone', validators=[Required(), Length(1, 64)]) method = RadioField('Preferred method:', choices=[('SMS','You will receice the code in SMS'),('Voice','You will receive the code in a Call')]) submit = SubmitField('Register') class LoginForm(Form): """Login form.""" username = StringField('Username', validators=[Required(), Length(1, 64)]) password = PasswordField('Password', validators=[Required()]) submit = SubmitField('Login') class TwoFactorForm(Form): """Verification code form.""" token = StringField('Token', validators=[Required(), Length(6, 6)]) submit = SubmitField('Verification') @app.route('/') def index(): return render_template('index.html') @app.route('/register', methods=['GET', 'POST']) def register(): """User registration route.""" if current_user.is_authenticated(): # if user is logged in we get out of here return redirect(url_for('index')) form = RegisterForm() if form.validate_on_submit(): user = User.query.filter_by(username=form.username.data).first() if user is not None: flash('Username already exists.') return redirect(url_for('register')) # add new user to the database user = User(username=form.username.data, password=form.password.data, phone=form.phone.data, method=form.method.data) db.session.add(user) db.session.commit() return render_template('register.html', form=form) @app.route('/verification', methods=['GET', 'POST']) def verification(): """two factor auth route.""" if current_user.is_authenticated(): # if user is logged in we check the token form = TwoFactorForm() if form.validate_on_submit(): if session['token'] == form.token.data: # log user in login_user(user) flash('You are now logged in!') return redirect(url_for('index')) flash('Invalid username, password or token.') return redirect(url_for('verification')) return render_template('verification.html', form=form) @app.route('/login', methods=['GET', 'POST']) def login(): """User login route.""" if current_user.is_authenticated(): # if user is logged in we get out of here return redirect(url_for('index')) form = LoginForm() if form.validate_on_submit(): user = User.query.filter_by(username=form.username.data).first() if user is None or not user.verify_password(form.password.data): flash('Invalid username, password or token.') return redirect(url_for('login')) # redirect to the two-factor auth page, passing token in the session session['code'] = generateToken() return redirect(url_for('verification')) return render_template('login.html', form=form) @app.route('/logout') def logout(): """User logout route.""" logout_user() return redirect(url_for('index')) # create database tables if they don't exist yet db.create_all() if __name__ == '__main__': app.run(host='0.0.0.0', debug=True)
Python
0
@@ -766,14 +766,16 @@ urn +%22 123456 +%22 %0A%0Ade @@ -1121,16 +1121,100 @@ ing(16)) +%0A %0A def __init__(self, **kwargs):%0A super(User, self).__init__(**kwargs) %0A%0A @p @@ -3688,28 +3688,66 @@ we -check the token%0A +get out of here%0A return redirect(url_for('index'))%0A @@ -3769,20 +3769,16 @@ rForm()%0A - if f @@ -3814,13 +3814,83 @@ -%09 + user = User.query.filter_by(username=session%5B'username'%5D).first()%0A +%09 if s @@ -3937,20 +3937,16 @@ - - # log us @@ -3951,20 +3951,16 @@ user in%0A - @@ -3988,28 +3988,24 @@ - - flash('You a @@ -4028,36 +4028,32 @@ !')%0A - - return redirect( @@ -4078,35 +4078,32 @@ - flash(' + Invalid username @@ -4094,38 +4094,16 @@ Invalid -username, password or token.') @@ -4095,36 +4095,32 @@ nvalid token.')%0A - return r @@ -4669,17 +4669,20 @@ username -, + or passwor @@ -4686,18 +4686,8 @@ word - or token. ')%0A @@ -4825,20 +4825,70 @@ ession%5B' -code +username'%5D = form.username.data%0A session%5B'token '%5D = gen
948100fde4a648e0f269c08fcb22ebdbc16948f1
move every get method into the new condor
app.py
app.py
# bultin library # external libraries from sanic import Sanic from sanic.response import text, json from condor.dbutil import requires_db from condor.models import Bibliography, RankingMatrix, TermDocumentMatrix, Document app = Sanic(__name__) @app.route("/ping") async def start(request): return text("pong") @app.route("/ranking", methods=["GET"]) @requires_db async def ranking(db, request): ranking_matrices = [ { "kind": matrix.kind, "build_options": matrix.build_options, "ranking_matrix_path": matrix.ranking_matrix_path } for matrix in list_ranking_matrices_from_db(db, count=10) ] return json(ranking_matrices) def list_ranking_matrices_from_db(db, count): """ List all ranking matrices """ ranking_matrices = db.query(RankingMatrix).order_by( RankingMatrix.created.desc() ).limit(count) return ranking_matrices @app.route("/bibliography") @requires_db async def format_list_bibliography(db, request): to_return = [{ "eid": bib.eid, "description": bib.description, "created": bib.created, "modified": bib.modified } for bib in list_bibliography_from_db(db, count=10)] return json(to_return) @app.route('/document') @requires_db async def list_documents(database, request): """ List the documents associated with a bibliography. """ bibliography_eid = request.args.get('bibliography', None) if not bibliography_eid: return json({ 'error': 'You must suply a bibliography eid.', 'details': 'Fill in the bibliography field.' }, status=400) to_return = [ { 'eid': doc.eid, 'title': doc.title, 'description': doc.description, 'created': doc.created, 'modified': doc.modified } for doc in Document.list(database, bibliography_eid) ] return json(to_return) def list_bibliography_from_db(db, count): """ List all the document sets. """ bibliography_sets = db.query(Bibliography).order_by( Bibliography.created.desc() ).limit(count) return bibliography_sets.all() @app.route("/matrix") @requires_db async def list_term_document_matrices(db, request): to_return = [{ "bibliography_eid": document.bibliography_eid, "bibliography_options": document.bibliography_options, "processing_options": document.processing_options, "term_list_path": document.term_list_path, "matrix_path": document.matrix_path } for document in list_term_document_matrices_from_db(db, count=10)] return json(to_return) def list_term_document_matrices_from_db(db, count): """ Lis all the term document matrix """ document_matrix_sets = db.query(TermDocumentMatrix).order_by( TermDocumentMatrix.created.desc() ).limit(count) return document_matrix_sets.all() if __name__ == "__main__": app.run( debug=True, host="0.0.0.0", port=8000 )
Python
0
@@ -615,50 +615,29 @@ in -list_r +R anking -_m +M atri -ces_from_db(db, count=10 +x.list(db )%0A @@ -679,241 +679,8 @@ s)%0A%0A -def list_ranking_matrices_from_db(db, count):%0A %22%22%22%0A List all ranking matrices%0A %22%22%22%0A ranking_matrices = db.query(RankingMatrix).order_by(%0A RankingMatrix.created.desc()%0A ).limit(count)%0A return ranking_matrices%0A %0A@ap @@ -731,15 +731,8 @@ def -format_ list @@ -743,17 +743,19 @@ liograph -y +ies (db, req @@ -926,22 +926,17 @@ bib in -list_b +B ibliogra @@ -938,37 +938,24 @@ iography -_from_db(db, count=10 +.list(db )%5D%0A r @@ -1698,247 +1698,8 @@ )%0A%0A%0A -def list_bibliography_from_db(db, count):%0A %22%22%22%0A List all the document sets.%0A %22%22%22%0A bibliography_sets = db.query(Bibliography).order_by(%0A Bibliography.created.desc()%0A ).limit(count)%0A return bibliography_sets.all()%0A%0A%0A @app @@ -2098,56 +2098,34 @@ in -list_term_d +TermD ocument -_m +M atri -ces_from_db(db, count=10 +x.list(db )%5D%0A @@ -2156,280 +2156,8 @@ )%0A%0A%0A -def list_term_document_matrices_from_db(db, count):%0A %22%22%22%0A Lis all the term document matrix%0A %22%22%22%0A document_matrix_sets = db.query(TermDocumentMatrix).order_by(%0A TermDocumentMatrix.created.desc()%0A ).limit(count)%0A return document_matrix_sets.all()%0A%0A%0A if _
e98b499704fbd9043be74e80dfe4cbae29ae47b9
send images in conversations
app.py
app.py
import os import config from flask import Flask, request, render_template from flask_sqlalchemy import SQLAlchemy from analytics import Analytics # --------------------------------------------------------------- # ACCESS_TOKEN = os.environ.get('ACCESS_TOKEN', config.ACCESS_TOKEN) VERIFY_TOKEN = os.environ.get('VERIFY_TOKEN', config.VERIFY_TOKEN) PAGE_ID = os.environ.get('PAGE_ID', config.PAGE_ID) ADMIN_ID = os.environ.get('ADMIN_ID', config.ADMIN_ID) # --------------------------------------------------------------- # app = Flask(__name__) app.config.from_object('config') db = SQLAlchemy(app) # --------------------------------------------------------------- # from models import User, WaitingListUser, ActiveChatsUser from templates import TextTemplate from DB_Wrappers import * usersdb = UsersDB(db=db) waitlistdb = WaitingListDB(db=db) activechatsdb = ActiveChatsDB(db=db) from modules import * setup_all() metrics = Analytics() Int = Interrupts() # --------------------------------------------------------------- # @app.route('/webview/', methods=['POST']) def getProfile(): try: print("FORM SUBMITTED", dict(request.form)) bio = request.form['bio'] interests = request.form['interests'] id = request.form['psid'] print("USER ID", id) user = usersdb.get(id) user.interests = interests user.bio = bio user.liked = True db.session.commit() return render_template('result.html') except Exception, e: print("FORM ERROR", str(e)) @app.route('/webview/', methods=['GET']) def render(): id = request.args.get('id') print("PROFILE ID", id) user = usersdb.get(id) bio = user.bio interests = user.interests liked = user.liked if liked == True: liked = '1' else: liked = '0' if bio is None: bio = "" if interests is None: interests = "" return render_template('profile.html', id=id, bio=bio, interests=interests, liked=liked) @app.route('/webhook/', methods=['GET', 'POST']) def webhook(): if request.method == 'POST': data = request.get_json(force=True) # analytics api post metrics.record(entry=data["entry"]) messaging_events = data['entry'][0]['messaging'] for event in messaging_events: sender = event['sender']['id'] print("EVENT", event) try: if sender != PAGE_ID and usersdb.hasDataOf(sender) is False: usersdb.add(sender) except Exception, e: print("ERROR", str(e)) try: if 'postback' in event and 'payload' in event['postback']: postback_payload = event['postback']['payload'] print("postback payload", postback_payload) handle_postback(payload=postback_payload, sender=sender) print("postback handled") continue elif 'message' in event and 'text' in event['message']: if Int.isValidCommand(event['message']['text']): print("interrupt detected", event['message']['text']) Int.handleCommand(command=event['message']['text'], sender=sender) print("interrupt handled") continue else: print("NOT POSTBACK OR INTERRUPT") except Exception, e: print("POSTBACK/INTERRUPT ERROR", str(e)) db.session.rollback() return '' if activechatsdb.isActive(sender): alias = activechatsdb.get_alias(sender) if 'message' in event and 'text' in event['message']: text = event['message']['text'] if 'quick_reply' in event['message'] and 'payload' in event['message']['quick_reply']: quick_reply_payload = event['message']['quick_reply']['payload'] handle_quick_reply(sender=sender, payload=quick_reply_payload) else: message = TextTemplate(text=alias+": "+text) recipient = activechatsdb.get_partner(sender) send_message(message=message.get_message(), id=recipient) elif 'message' in event and 'attachments' in event['message'] and 'type' in event['message']['attachments'][0]: print("IMAGE 1") if event['message']['attachments'][0]['type'] == "image": print("IMAGE 2") handle_image(id=sender, url=event['message']['attachments'][0]['payload']['url']) else: recipient = sender if 'message' in event and 'text' in event['message']: text = event['message']['text'] try: if text[:3] == ":::": handle_debug(text, id=sender) message = TextTemplate(text="Debug command executed") send_message(message.get_message(), id=recipient) continue except Exception, e: print("DEBUG ERROR", str(e)) if 'quick_reply' in event['message'] and 'payload' in event['message']['quick_reply']: quick_reply_payload = event['message']['quick_reply']['payload'] handle_quick_reply(sender=sender, payload=quick_reply_payload) else: if(isGreeting(text)): handle_greetings(text, sender, usersdb.get(sender).first_name) continue message = TextTemplate(text="I didn't understand what you intended. Type \"help\" to"+ " get the set of available commands. Use those commands or"+ " the menu options to interact with the bot") send_message(message.get_message(), id=recipient) return '' # 200 OK elif request.method == 'GET': # Verification if request.args.get('hub.verify_token') == VERIFY_TOKEN: return request.args.get('hub.challenge') else: return 'Error, wrong validation token' if __name__ == '__main__': app.run() #, port=int(os.environ.get('PORT', 4431))
Python
0
@@ -4692,18 +4692,22 @@ e_image( -id +sender =sender,
8fc38abecd4a9cba6579c7a422b957748115f450
disable CSRF token
app.py
app.py
from flask import Flask, render_template, flash from flask_wtf import Form from flask_wtf.file import FileField from tools import s3_upload import json app = Flask(__name__) app.config.from_object('config') class UploadForm(Form): example = FileField('Example File') @app.route('/', methods=['POST', 'GET']) def upload_page(): form = UploadForm() if form.validate_on_submit(): output = s3_upload(form.example) flash('{src} uploaded to S3 as {dst} and its urs is {url}'.format(src=form.example.data.filename, dst=output.split(" ")[0], url=output.split(" ")[1])) response = {} response['url'] = output.split(" ")[1] return json.dumps(response, indent=4) return render_template('example.html', form=form) if __name__ == '__main__': app.run()
Python
0.000001
@@ -368,16 +368,34 @@ oadForm( +csrf_enabled=False )%0D%0A i
e33174b6fcf8110478ec84016781ed65df7eb055
Add web-interface to utility
app.py
app.py
#!notify/bin/python3 import hug import os from pushbullet import Pushbullet @hug.cli() def create_note(title: hug.types.text, content: hug.types.text): api_key = os.environ["PB_API_KEY"] pb = Pushbullet(api_key) pb.push_note(title, content) if __name__ == '__main__': create_note.interface.cli()
Python
0.999987
@@ -72,16 +72,27 @@ ullet%0A%0A%0A +@hug.get()%0A @hug.cli
3bfbc1de88aadc5e0e20dea8443cc26f99c34c63
Remove type ignore
bot.py
bot.py
from __future__ import annotations import asyncio import logging import lzma import os import sys import tarfile from asyncio import Task from datetime import datetime from pathlib import Path from typing import Any, Iterable, Optional, Type, TypeVar, overload import aiohttp import toml from asyncqlio.db import DatabaseInterface from discord import AllowedMentions, Game, Intents, Message from discord.ext import commands from discord.ext.commands import Bot, Context, when_mentioned_or from discord.http import HTTPClient from config import Config from context import BContext from help import BHelp from utils import contextmanagers, exceptions from utils.aioutils import do_every C = TypeVar("C", bound=Context) class BeattieBot(Bot): """A very cute robot boy""" command_ignore = (commands.CommandNotFound, commands.CheckFailure) general_ignore = (ConnectionResetError,) archive_task: Optional[Task[Any]] http: HTTPClient session: aiohttp.ClientSession extra: dict[str, Any] def __init__( self, prefixes: tuple[str, ...], debug: bool = False, ): async def prefix_func(bot: Bot, message: Message) -> Iterable[str]: prefix = prefixes if guild := message.guild: guild_conf = await bot.config.get_guild(guild.id) # type: ignore if guild_pre := guild_conf.get("prefix"): prefix = prefix + (guild_pre,) return when_mentioned_or(*prefix)(self, message) help_command: commands.HelpCommand = BHelp() game = Game(name=f"{prefixes[0]}help") super().__init__( prefix_func, activity=game, case_insensitive=True, help_command=help_command, intents=Intents.all(), allowed_mentions=AllowedMentions.none(), log_handler=None, ) with open("config/config.toml") as file: data = toml.load(file) password = data.get("config_password", "") self.loglevel = data.get("loglevel", logging.WARNING) self.debug = debug dsn = f"postgresql://beattie:{password}@localhost/beattie" self.db = DatabaseInterface(dsn) self.config = Config(self) self.uptime = datetime.now().astimezone() self.extra = {} if debug: self.loglevel = logging.DEBUG self.archive_task = None else: self.archive_task = do_every(60 * 60 * 24, self.swap_logs) self.new_logger() async def setup_hook(self) -> None: self.session = aiohttp.ClientSession() await self.db.connect() await self.config.async_init() extensions = [f"cogs.{f.stem}" for f in Path("cogs").glob("*.py")] extensions.append("jishaku") for extension in extensions: try: await self.load_extension(extension) except Exception as e: print(f"Failed to load extension {extension}\n{type(e).__name__}: {e}") async def close(self) -> None: await self.session.close() await self.db.close() if self.archive_task is not None: self.archive_task.cancel() await super().close() async def swap_logs(self, new: bool = True) -> None: if new: self.new_logger() await asyncio.to_thread(self.archive_logs) def new_logger(self) -> None: logger = logging.getLogger() logger.setLevel(self.loglevel) now = datetime.now().astimezone() if self.debug: pre = "debug" else: pre = "discord" filename = now.strftime(f"{pre}%Y%m%d%H%M.log") handler = logging.FileHandler(filename=filename, encoding="utf-8", mode="w") handler.setFormatter( logging.Formatter("%(asctime)s:%(levelname)s:%(name)s: %(message)s") ) logger.addHandler(handler) self.logger = logger def archive_logs(self) -> None: logname = "logs.tar" if os.path.exists(logname): mode = "a" else: mode = "w" # get all logfiles but newest old_logs = sorted(Path(".").glob("discord*.log"), key=os.path.getmtime)[:-1] with tarfile.open(logname, mode) as tar: for log in old_logs: name = f"{log.name}.xz" with open(log, "rb") as r, lzma.open(name, "w") as w: for line in r: w.write(line) tar.add(name) os.unlink(name) log.unlink() async def handle_error(self, ctx: Context, e: Exception) -> None: if isinstance(e, (commands.CommandInvokeError, commands.ExtensionFailed)): e = e.original if isinstance(e, commands.MissingRequiredArgument): await ctx.send("Missing required arguments.") elif isinstance(e, commands.BadArgument): await ctx.send("Bad arguments.") elif isinstance(e, exceptions.ResponseError): await ctx.send( f"An HTTP request to <{e.url}> failed with error code {e.code}" ) elif not isinstance(e, self.command_ignore): await ctx.send(f"{type(e).__name__}: {e}") if ctx.command is not None: message = f"An error occurred in {ctx.command.name}" else: message = ( f"An error occured in guild {ctx.guild} channel #{ctx.channel}" ) self.logger.exception(message, exc_info=(type(e), e, e.__traceback__)) raise e from None async def on_ready(self) -> None: assert self.user is not None print("Logged in as") print(self.user.name) print(self.user.id) print("------") @overload async def get_context(self, message: Message) -> BContext: ... @overload async def get_context(self, message: Message, *, cls: Type[C]) -> C: ... async def get_context( self, message: Message, *, cls: Type[Context] = None ) -> Context: return await super().get_context(message, cls=cls or BContext) async def on_command_error(self, ctx: Context, e: Exception) -> None: if not hasattr(ctx.command, "on_error"): await self.handle_error(ctx, e) async def on_error(self, event_method: str, *args: Any, **kwargs: Any) -> None: _, e, _ = sys.exc_info() if isinstance(e, (commands.CommandInvokeError, commands.ExtensionFailed)): e = e.original if not isinstance(e, self.general_ignore): await super().on_error(event_method, *args, **kwargs) def get(self, *args: Any, **kwargs: Any) -> contextmanagers.get: return contextmanagers.get(self.session, *args, **kwargs)
Python
0.000001
@@ -1149,16 +1149,23 @@ nc(bot: +Beattie Bot, mes @@ -1335,24 +1335,8 @@ .id) - # type: ignore %0A
ed3ee7caae9ce754e2ec098e8889bfbed2198aa6
Print log msg before trying to write to log file
bot.py
bot.py
#!/usr/bin/python import init_twit as tw import markovgen, time, re, random, codecs # make a separate file for these reusable functions: bot.py # main bot-specific app logic in app.py corpus_file = 'corpus.txt' with open(corpus_file) as text: markov = markovgen.Markov(text) def log(msg): with codecs.open('log','a','utf-8') as f: f.write(msg+"\n") print msg def genTweet(): wc = random.randint(6,18) return markov.generate_markov_text(size=wc) def tweet(status,irtsi=None,at=None): try: if at and irtsi: status = "@"+at+" "+status tw.poster.statuses.update(status=status,in_reply_to_status_id=irtsi) else: pass tw.poster.statuses.update(status=status) except tw.TwitterError as error: log(error.response_data) else: if irtsi: status = "In reply to "+irtsi+": "+status log(status) def reply(txt,mention): asker = mention['from_user'] log(asker + " said " + mention['text']) status_id = str(mention['id']) if tw.last_id_replied < status_id: tw.last_id_replied = status_id while len(txt) > 123: txt = genTweet() tweet(txt,status_id,asker) while True: results = [] #results = tw.twitter.search(q="@"+tw.handle,since_id=tw.last_id_replied)['results'] #retweets = re.compile('rt\s',flags=re.I) #results = [response for response in results if not retweets.search(response['text'])] if not results: log("Nobody's talking to me...") else: [reply(genTweet(),result) for result in results] tweet(genTweet()) log("Sweet Dreams...") time.sleep(7600) # waits for two hours
Python
0
@@ -286,16 +286,27 @@ g(msg):%0A +%09print msg%0A %09with co @@ -359,27 +359,16 @@ sg+%22%5Cn%22) -%0A%09print msg %0A%0Adef ge
2f4c3f721649359f475dbfcbbf50bd3816ca436d
Set game and online status in init, change is_owner implementation for selfbots
bot.py
bot.py
import datetime import sys import aiohttp import discord from discord.ext import commands from config import Config from utils import contextmanagers, exceptions class BContext(commands.Context): """An extension of Context to add reply and mention methods, as well as support use with self bots""" async def reply(self, content, sep=',\n'): if self.me.bot and not isinsance(self.channel, dicord.DMChannel): content = f'{self.author.display_name}{sep}{content}' return await self.send(content) async def mention(self, content, sep=',\n'): if self.me.bot and not isinsance(self.channel, dicord.DMChannel): content = f'{self.author.mention}{sep}{content}' return await self.send(content) async def send(self, content=None, *, embed=None, **kwargs): if self.me.bot: return await super().send(content, embed=embed, **kwargs) elif content is not None: content = f'{self.message.content}\n{content}' await self.message.edit(content=content) return self.message elif embed is not None: await self.message.delete() return await super().send(embed=embed, **kwargs) def typing(self): if self.me.bot: return super().typing() else: return contextmanagers.null() class BeattieBot(commands.Bot): """An extension of Bot. Allow use with self bots and handles errors in an ordered way""" command_ignore = (commands.CommandNotFound, commands.CheckFailure) general_ignore = (ConnectionResetError, ) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.session = aiohttp.ClientSession(loop=self.loop) self.uptime = datetime.datetime.utcnow() self.config = Config(self) def __del__(self): self.session.close() try: delete = super().__del__ except AttributeError: pass else: delete() async def is_owner(self, member): if self.user.bot: return await super().is_owner(member) else: return True async def handle_error(self, ctx, e): e = getattr(e, 'original', e) if isinstance(e, commands.MissingRequiredArgument): await ctx.send('Missing required arguments.') elif isinstance(e, commands.BadArgument): await ctx.send('Bad arguments.') elif isinstance(e, exceptions.ResponseError): await ctx.send(f'An HTTP request failled with error code {e.code}') elif not isinstance(e, self.command_ignore): await ctx.send(f'{type(e).__name__}: {e}') raise e from None async def on_ready(self): print('Logged in as') print(self.user.name) print(self.user.id) print('------') if self.user.bot: await self.change_presence(game=discord.Game(name='b>help')) else: await self.change_presence(status=discord.Status.idle, afk=True) async def on_message(self, message): ctx = await self.get_context(message, cls=BContext) if ctx.prefix is not None: command = ctx.invoked_with if command: ctx.command = self.get_command(command.lower()) await self.invoke(ctx) async def on_guild_join(self, guild): bots = sum(m.bot for m in guild.members) if bots / len(guild.members) > 0.5: await guild.leave() else: await self.config.add(guild.id) async def on_guild_remove(self, guild): await self.config.remove(guild.id) async def on_member_join(self, member): if not self.user.bot: return guild = member.guild guild_conf = await self.config.get(guild.id) message = guild_conf['welcome'] if message: await guild.default_channel.send(message.format(member.mention)) async def on_member_remove(self, member): if not self.user.bot: return guild = member.guild guild_conf = await self.config.get(guild.id) message = guild_conf['farewell'] if message: await guild.default_channel.send(message.format(member.mention)) async def on_command_error(self, ctx, e): if not hasattr(ctx.command, 'on_error'): await self.handle_error(ctx, e) async def on_error(self, event_method, *args, **kwargs): _, e, _ = sys.exc_info() e = getattr(e, 'original', e) if not isinstance(e, self.general_ignore): await super().on_error(event_method, *args, **kwargs) def get(self, *args, **kwargs): return contextmanagers.get(self.session, *args, **kwargs) def tmp_dl(self, *args, **kwargs): return contextmanagers.tmp_dl(self.session, *args, **kwargs)
Python
0
@@ -1668,40 +1668,409 @@ s -uper().__init__(*args, **kwargs) +elf_bot = kwargs.get('self_bot')%0A if self_bot:%0A game = None%0A status = discord.Status.idle%0A else:%0A game = discord.Game(name='b%3Ehelp')%0A status = None%0A super().__init__(*args, **kwargs, game=game, status=status)%0A if self_bot:%0A self.loop.create_task(self.change_presence(afk=True))%0A self.owner_id = self.user.id %0A @@ -2402,161 +2402,8 @@ ()%0A%0A - async def is_owner(self, member):%0A if self.user.bot:%0A return await super().is_owner(member)%0A else:%0A return True%0A%0A @@ -3078,24 +3078,24 @@ lf.user.id)%0A + prin @@ -3109,198 +3109,8 @@ --') -%0A if self.user.bot:%0A await self.change_presence(game=discord.Game(name='b%3Ehelp'))%0A else:%0A await self.change_presence(status=discord.Status.idle, afk=True) %0A%0A
0d00da01ef8aa6534247e933c81c66b477d1ab3c
Use send_message instead of whisper in on_command_error.
bot.py
bot.py
from discord.ext import commands import discord from cogs.utils import checks import datetime, re import json, asyncio import copy description = """ Hello! I am a bot written by Danny to provide some nice utilities. """ initial_extensions = [ 'cogs.meta', 'cogs.splatoon', 'cogs.rng', 'cogs.mod', 'cogs.profile', 'cogs.tags' ] shitty_log = open('rdanny.log', 'w', encoding='utf-8') help_attrs = dict(hidden=True) bot = commands.Bot(command_prefix=['?', '!', '\u2757'], description=description, pm_help=None, help_attrs=help_attrs) @bot.event async def on_command_error(error, ctx): if isinstance(error, commands.NoPrivateMessage): await bot.whisper('This command cannot be used in private messages.') elif isinstance(error, commands.DisabledCommand): await bot.whisper('Sorry. This command is disabled and cannot be used.') @bot.event async def on_ready(): print('Logged in as:') print('Username: ' + bot.user.name) print('ID: ' + bot.user.id) print('------') bot.uptime = datetime.datetime.utcnow() bot.commands_executed = 0 for extension in initial_extensions: try: bot.load_extension(extension) except Exception as e: print('Failed to load extension {}\n{}: {}'.format(extension, type(e).__name__, e)) @bot.event async def on_command(command, ctx): bot.commands_executed += 1 message = ctx.message destination = None if message.channel.is_private: destination = 'Private Message' else: destination = '#{0.channel.name} ({0.server.name})'.format(message) print('{0.timestamp}: {0.author.name} in {1}: {0.content}'.format(message, destination), file=shitty_log) @bot.event async def on_message(message): mod = bot.get_cog('Mod') if mod is not None and not checks.is_owner_check(message): # check if the user is bot banned if message.author.id in mod.config.get('plonks', []): return # check if the channel is ignored # but first, resolve their permissions perms = message.channel.permissions_for(message.author) bypass_ignore = perms.manage_roles # if we don't have manage roles then we should # check if it's the owner of the bot or they have Bot Admin role. if not bypass_ignore: if not message.channel.is_private: bypass_ignore = discord.utils.get(message.author.roles, name='Bot Admin') is not None # now we can finally realise if we can actually bypass the ignore. if not bypass_ignore: if message.channel.id in mod.config.get('ignored', []): return # if someone private messages us with something that looks like a URL then # we should try to see if it's an invite to a discord server and join it if so. if message.channel.is_private and message.content.startswith('http'): try: invite = await bot.get_invite(message.content) await bot.accept_invite(invite) await bot.send_message(message.channel, 'Joined the server.') except: # if an error occurs at this point then ignore it and move on. pass finally: return await bot.process_commands(message) @bot.command(hidden=True) @checks.is_owner() async def load(*, module : str): """Loads a module.""" module = module.strip() try: bot.load_extension(module) except Exception as e: await bot.say('\U0001f52b') await bot.say('{}: {}'.format(type(e).__name__, e)) else: await bot.say('\U0001f44c') @bot.command(hidden=True) @checks.is_owner() async def unload(*, module : str): """Unloads a module.""" module = module.strip() try: bot.unload_extension(module) except Exception as e: await bot.say('\U0001f52b') await bot.say('{}: {}'.format(type(e).__name__, e)) else: await bot.say('\U0001f44c') @bot.command(pass_context=True, hidden=True) @checks.is_owner() async def debug(ctx, *, code : str): """Evaluates code.""" code = code.strip('` ') python = '```py\n{}\n```' result = None try: result = eval(code) except Exception as e: await bot.say(python.format(type(e).__name__ + ': ' + str(e))) return if asyncio.iscoroutine(result): result = await result await bot.say(python.format(result)) @bot.command(hidden=True) @checks.is_owner() async def announcement(*, message : str): # we copy the list over so it doesn't change while we're iterating over it servers = list(bot.servers) for server in servers: try: await bot.send_message(server, message) except discord.Forbidden: # we can't send a message for some reason in this # channel, so try to look for another one. me = server.me def predicate(ch): text = ch.type == discord.ChannelType.text return text and ch.permissions_for(me).send_messages channel = discord.utils.find(predicate, server.channels) if channel is not None: await bot.send_message(channel, message) finally: print('Sent message to {}'.format(server.name.encode('utf-8'))) # to make sure we don't hit the rate limit, we send one # announcement message every 5 seconds. await asyncio.sleep(5) @bot.command(pass_context=True, hidden=True) async def do(ctx, times : int, *, command): """Repeats a command a specified number of times.""" msg = copy.copy(ctx.message) msg.content = command for i in range(times): await bot.process_commands(msg) @bot.command() async def changelog(): """Gives a URL to the current bot changelog.""" await bot.say('https://gist.github.com/Rapptz/7a0d72b836dd0d9620f0') def load_credentials(): with open('credentials.json') as f: return json.load(f) if __name__ == '__main__': credentials = load_credentials() bot.run(credentials['email'], credentials['password']) shitty_log.close()
Python
0
@@ -676,24 +676,49 @@ ait bot. -whisper( +send_message(ctx.message.author, 'This co @@ -837,16 +837,41 @@ bot. -whisper( +send_message(ctx.message.author, 'Sor
132b422e81c8a3f3de4d1600acdc6a71327bfc1e
Update bro .py
bro.py
bro.py
def combiner(verbose, files=[]): import subprocess import os import time files = files.split("\n") files.pop(-1) if verbose == False: epoch = time.time() path = os.getcwd() os.mkdir(path + "/" + str(epoch)) os.chdir(path + "/" + str(epoch)) for file in files: subprocess.check_output(["bro","-r",file]) newfiles = subprocess.check_output(["ls"]).split() for newfile in newfiles: combinedfile = open("combined_" + str(newfile),"a+") newfile = open(newfile,"r") combinedfile.write(newfile) combinedfile.close() newfile.close() elif verbose == True: count = 1 epoch = time.time() path = os.getcwd() os.mkdir(path + "/" + str(epoch)) os.chdir(path + "/" + str(epoch)) print "Creating the folder " + str(epoch) + "in order to store Bro Logs safely." for file in files: print "Working on " + str(count) + " out of " + str(len(files)) subprocess.check_output(["bro","-r",file]) newfiles = subprocess.check_output(["ls"]).split() for newfile in newfiles: combinedfile = open("combined_" + str(newfile),"a+") newfile = open(newfile,"r") combinedfile.write(newfile) combinedfile.close() newfile.close() count += 1
Python
0.000001
@@ -1,16 +1,11 @@ def -combiner +bro (ver
4aad9aeb5acf0c8aba609a53f20107ec48cdfa2b
Initialise gpio
car.py
car.py
import time, os, sys import wiringpi as io class light(object): def __init__(self, pin): #make pins into output io.pinMode(pin,1) #set output low io.digitalWrite(pin,0) #set variables self.status = 0 self.pin = pin def on(self): #turn light on io.digitalWrite(self.pin,1) self.status = 1 def off(self): io.digitalWrite(self.pin,0) self.status = 0 def blink(self,times): for _ in times: self.on() time.sleep(2) self.off() time.sleep(2) lightFL = light(21) lightFL.on() lights = { "LeftFront":{"pin":21,"status":0}, "RightFront":{"pin":16,"status":0} } def lightCtrl(names,status): for i in names: io.digitalWrite(lights[i]["pin"],status) lights[i]["status"] = status def initResource(): try: io.wiringPiSetupGpio() except: print "GPIO issue", sys.exc_info()[0] for key, value in lights.items(): #make pins into output io.pinMode(value["pin"],1) #set output low io.digitalWrite(value["pin"],0) def lightTest(): lightCtrl(["LeftFront"],1) time.sleep(3) lightCtrl(["LeftFront"],0) lightCtrl(["RightFront"],1) time.sleep(3) lightCtrl(["RightFront"],0) time.sleep(1) lightCtrl(["LeftFront","RightFront"],1) time.sleep(3) lightCtrl(["LeftFront","RightFront"],0) #initResource() #lightTest()
Python
0.000003
@@ -602,16 +602,111 @@ eep(2)%0A%0A +#initialise%0Atry:%0A io.wiringPiSetupGpio()%0Aexcept:%0A print %22GPIO issue%22, sys.exc_info()%5B0%5D%0A%0A lightFL
43a26a77d84fb8547564518a8469be69ed852cf1
add discourse to csp
csp.py
csp.py
csp = { 'default-src': '\'self\'', 'style-src': [ '\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com' ], 'script-src': [ '\'self\'', 'cdn.httparchive.org', 'www.google-analytics.com', 'use.fontawesome.com', 'cdn.speedcurve.com', 'spdcrv.global.ssl.fastly.net' ], 'font-src': [ '\'self\'', 'fonts.gstatic.com' ], 'connect-src': [ '\'self\'', 'cdn.httparchive.org', 'discuss.httparchive.org', 'raw.githubusercontent.com', 'www.webpagetest.org' ], 'img-src': [ '\'self\'', 'discuss.httparchive.org', 'www.google-analytics.com', 's.g.doubleclick.net', 'stats.g.doubleclick.net' ] }
Python
0.002031
@@ -532,32 +532,59 @@ tparchive.org',%0A +%09%09'avatars.discourse.org',%0A %09%09'www.google-an @@ -654,8 +654,9 @@ et'%0A%09%5D%0A%7D +%0A
fa954f6db40cf59779dcdb2303f6afc1b18388f0
Remove debugging prints and increase float precision.
csv.py
csv.py
#!/usr/bin/python #This script will slurp in a trace file and output a CSV formatted #file with the timestamp as the first column and subsequent vehicle #data keys as separate columns in undefined order. #TODO: This implementation is dead simple -- it will read through the #tracefile once to identify all unique keys and build a dictionary #mapping key names to column numbers. It will then loop back through #the file to generate the CSV. #The next iteration will speculatively #read in some number of seconds of the tracefile and speculatively #assume that we have picked up all unique keys. If it encounters #another unique key after the speculative execution period, it can #default back to the correct two file read implementation. #Assumptions: each JSON object has a unique timestamp. import json import sys if len(sys.argv) < 3: sys.exit("Specify name of both an input file and an output file") input_file_name = sys.argv[1] output_file_name = sys.argv[2] current_column = 1 #The current column number. Column 0 is timestamp column_map = {} #csv_row returns a string with the value formatted in correct csv #order. def csv_row(timestamp, key, value, column_map): #Determine which column the key is in column_num = column_map[key] print "Column num of " + key + " is " + str(column_num) #Timestamp is always column 0 ret_string = str(timestamp) + "," for i in range(1, len(column_map) + 1): if (i == column_num): #Place the value in the correct column #print str(value) + "," ret_string += str(value) + "," else: ret_string += ',' #print "DEBUG: " + ret_string return ret_string #Open the input file and loop through with open(input_file_name, 'r') as input_file: for line in input_file: if not line.rstrip() : continue obj = json.loads(line) if not obj['name'] in column_map: column_map[obj['name']] = current_column current_column += 1 print column_map #Open the input file and loop through a second time with open(input_file_name, 'r') as input_file,\ open(output_file_name, 'w') as output_file: #Print a header for the output file specifying column values output_file.write("#timestamp," +\ ','.join(sorted(column_map, key = column_map.get))\ + "\n") for line in input_file: if not line.rstrip() : continue obj = json.loads(line) output_file.write(csv_row(obj['timestamp'],\ obj['name'],\ obj['value'],\ column_map) + "\n")
Python
0
@@ -1266,16 +1266,17 @@ y%5D%0A%0A +# print %22C @@ -1375,20 +1375,25 @@ tring = -str( +%22%25.3f%22 %25 timestam @@ -1393,17 +1393,16 @@ imestamp -) + %22,%22%0A @@ -2030,16 +2030,17 @@ n += 1%0A%0A +# print co
76c30e257119cda7a0372c40b9543ed825675626
Replace print statement by function, fix string literal to unicode
d2h.py
d2h.py
# -*- coding: utf-8 -*- import argparse import datetime import os import smtplib import time import urllib import urllib2 from xml.etree import ElementTree from email.mime.text import MIMEText try: from ConfigParser import SafeConfigParser except ImportError: from configparser import SafeConfigParser DELICIOUS_FEED_URL = 'https://api.del.icio.us/v1/posts/all' GMAIL_HOST = 'smtp.gmail.com' GMAIL_PORT = 587 def fetch_posts(username, password, offset): """ deliciousからフィードを取得して投稿リストを返す username deliciousのユーザ名 password deliciousのパスワード offset 単位は分 """ prev_time = datetime.datetime.fromtimestamp( time.mktime(datetime.datetime.utcnow().timetuple()) - offset * 60) data = urllib.urlencode( {'fromdt': prev_time.strftime('%Y-%m-%dT%H:%M:0Z')}) # Basic認証で最近のフィードを取得 manager = urllib2.HTTPPasswordMgrWithDefaultRealm() manager.add_password(None, DELICIOUS_FEED_URL, username, password) handler = urllib2.HTTPBasicAuthHandler(manager) opener = urllib2.build_opener(handler) response = opener.open(DELICIOUS_FEED_URL, data=data) print('Fetch %s?%s' % (DELICIOUS_FEED_URL, data)) # 投稿を抽出して辞書リストにする elem = ElementTree.parse(response) posts = [] for post in elem.findall('.//post'): posts.append({ 'title': post.get('description'), 'url': post.get('href'), 'tags': post.get('tag').split(), 'note': post.get('extended'), }) print 'Append `%(title)s %(tags)s%(note)s`' % posts[-1] return posts def sendmail(from_addr, to_addr, posts, gmail_auth=None): """ deliciousの投稿を指定アドレスに送信する from_addr 送信元アドレス(適当なアドレス) to_addr 送信先アドレス(はてブで取得した投稿用アドレス) posts fetch_postsで生成した投稿リスト(辞書のリスト) gmail_auth SMTPにGmailを利用する場合にユーザ名、パスワードのタプル指定 """ charset = 'iso-2022-jp' if gmail_auth: s = smtplib.SMTP(GMAIL_HOST, GMAIL_PORT) s.ehlo() s.starttls() s.login(*gmail_auth) print('SMTP connected: %s' % GMAIL_HOST) else: s = smtplib.SMTP() s.connect() print('SMTP connected: localhost') for post in posts: body = post['url'] + '\n' if post['tags']: body += u''.join([u'[%s]' % t for t in post['tags']]) if post['note']: body += post['note'] msg = MIMEText( body.encode(charset, 'ignore'), 'plain', charset) msg['Subject'] = post['title'].encode(charset, 'ignore') msg['From'] = from_addr msg['To'] = to_addr s.sendmail(from_addr, [to_addr], msg.as_string()) print u'Sent `%s` to %s.' % (post['title'], to_addr) s.close() print('SMTP closed') def command(): parser = argparse.ArgumentParser( description=u'deliciousのブックマークをはてブに送信します') parser.add_argument( 'offset', type=int, help=u'現在時刻からのオフセット(分)、この時間より新しい投稿が対象になる') parser.add_argument( '--config', default='~/.d2h', help=u'設定ファイルのパス、デフォルト:~/.d2h') parser.add_argument( '--delicious-username', dest='delicious_username', help=u'deliciousのユーザ名') parser.add_argument( '--delicious-password', dest='delicious_password', help=u'deliciousのパスワード') parser.add_argument( '--hatebu-address', dest='mail_to_addr', help=u'はてブ投稿用メールアドレス') parser.add_argument( '--from-address', dest='mail_from_addr', help=u'はてブへの送信元メールアドレス') parser.add_argument( '--gmail-username', dest='gmail_username', help=u'Gmailのユーザ名' ) parser.add_argument( '--gmail-password', dest='gmail_password', help=u'Gmailのパスワード' ) ns = parser.parse_args() config = parse_config(ns) for sec in ('delicious', 'mail'): for k, v in config[sec].items(): if not v: raise parser.error( '%s_%s doesnot exists.' % (sec.upper(), k.upper())) posts = fetch_posts( config['delicious']['username'], config['delicious']['password'], ns.offset) print u'Got %s items' % len(posts) if posts: if config['gmail']['username'] and config['gmail']['password']: sendmail( config['mail']['from_addr'], config['mail']['to_addr'], posts, gmail_auth=(config['gmail']['username'], config['gmail']['password'])) else: sendmail( config['mail']['from_addr'], config['mail']['to_addr'], posts) def parse_config(ns): """ パラメータを取得 """ if ns.config: filename = os.path.expanduser(ns.config) config = { 'delicious': {'username': '', 'password': ''}, 'mail': {'from_addr': '', 'to_addr': ''}, 'gmail': {'username': '', 'password': ''} } parser = SafeConfigParser() if os.path.exists(filename): parser.read(filename) for sec in config: for opt in config[sec]: if parser.has_option(sec, opt): config[sec][opt] = parser.get(sec, opt) override = getattr(ns, '%s_%s' % (sec, opt)) if override: config[sec][opt] = override return config if __name__ == '__main__': command()
Python
0.998662
@@ -1130,16 +1130,17 @@ print( +u 'Fetch %25 @@ -1510,17 +1510,18 @@ print - +(u 'Append @@ -1561,16 +1561,17 @@ osts%5B-1%5D +) %0A ret @@ -2038,32 +2038,33 @@ )%0A print( +u 'SMTP connected: @@ -2149,24 +2149,25 @@ print( +u 'SMTP connec @@ -2671,17 +2671,17 @@ print - +( u'Sent %60 @@ -2718,16 +2718,17 @@ to_addr) +) %0A s.c @@ -2744,16 +2744,17 @@ print( +u 'SMTP cl @@ -4140,17 +4140,17 @@ print - +( u'Got %25s @@ -4169,16 +4169,17 @@ n(posts) +) %0A if
5000dc1045d2771b85528b60991e9ac2aad7d69d
fix bug to merge dict
sync_settings/libs/exceptions.py
sync_settings/libs/exceptions.py
# -*- coding: utf-8 -*- import json import sys import traceback class GistException(Exception): def to_json(self): json_error = json.loads(json.dumps(self.args[0])) trace = traceback.extract_tb(sys.exc_info()[2])[-1] return json_error.update({ 'filename': str(trace[0]), 'line': str(trace[1]) })
Python
0
@@ -238,25 +238,12 @@ urn -json_error.update +dict (%7B%0A @@ -307,10 +307,24 @@ %5D)%0A %7D +, **json_error )%0A
26f6d383fba8f515a129df640724efe895e872a5
Fix missing comma
synthtool/gcp/gapic_generator.py
synthtool/gcp/gapic_generator.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from pathlib import Path import tempfile import platform import venv from synthtool import _tracked_paths from synthtool import cache from synthtool import log from synthtool import shell from synthtool.sources import git ARTMAN_VERSION = os.environ.get('SYNTHTOOL_ARTMAN_VERSION', 'latest') ARTMAN_VENV = cache.get_cache_dir() / 'artman_venv' GOOGLEAPIS_URL: str = 'git@github.com:googleapis/googleapis.git' GOOGLEAPIS_PRIVATE_URL: str = ( 'git@github.com:googleapis/googleapis-private.git') # Docker on mac by default cannot use the default temp file location # instead use the more standard *nix /tmp location\ if platform.system() == 'Darwin': tempfile.tempdir = '/tmp' class GAPICGenerator: def __init__(self): self._ensure_dependencies_installed() self._install_artman() self._clone_googleapis() def py_library(self, service: str, version: str, **kwargs) -> Path: ''' Generates the Python Library files using artman/GAPIC returns a `Path` object library: path to library. 'google/cloud/speech' version: version of lib. 'v1' ''' return self._generate_code(service, version, 'python', **kwargs) def node_library(self, service: str, version: str, **kwargs) -> Path: return self._generate_code(service, version, 'nodejs', **kwargs) nodejs_library = node_library def ruby_library(self, service: str, version: str, **kwargs) -> Path: return self._generate_code(service, version, 'ruby', **kwargs) def php_library(self, service: str, version: str, **kwargs) -> Path: return self._generate_code(service, version, 'php', **kwargs) def _generate_code(self, service, version, language, config_path=None, artman_output_name=None, private=False): # map the language to the artman argument and subdir of genfiles GENERATE_FLAG_LANGUAGE = { 'python': ('python_gapic', 'python'), 'nodejs': ('nodejs_gapic', 'js'), 'ruby': ('ruby_gapic', 'ruby'), 'php': ('php_gapic', 'php'), } if language not in GENERATE_FLAG_LANGUAGE: raise ValueError("provided language unsupported") gapic_arg, gen_language = GENERATE_FLAG_LANGUAGE[language] # Determine which googleapis repo to use if not private: googleapis = self.googleapis else: googleapis = self.googleapis_private if googleapis is None: raise RuntimeError( f'Unable to generate {config_path}, the googleapis repository' 'is unavailable.') # Run the code generator. # $ artman --config path/to/artman_api.yaml generate python_gapic if config_path is None: config_path = ( Path('google/cloud') / service / f"artman_{service}_{version}.yaml") elif Path(config_path).is_absolute(): config_path = Path(config_path).relative_to('/') else: config_path = Path('google/cloud') / service / Path(config_path) if not (googleapis / config_path).exists(): raise FileNotFoundError( f"Unable to find configuration yaml file: {config_path}.") log.debug(f"Running generator for {config_path}.") result = shell.run([ ARTMAN_VENV / 'bin' / 'artman', '--image', f'googleapis/artman:{ARTMAN_VERSION}' '--config', config_path, 'generate', gapic_arg], cwd=googleapis) if result.returncode: raise Exception(f"Failed to generate from {config_path}") # Expect the output to be in the artman-genfiles directory. # example: /artman-genfiles/python/speech-v1 if artman_output_name is None: artman_output_name = f"{service}-{version}" genfiles_dir = googleapis / 'artman-genfiles' / gen_language genfiles = genfiles_dir/artman_output_name if not genfiles.exists(): raise FileNotFoundError( f"Unable to find generated output of artman: {genfiles}.") log.success(f"Generated code into {genfiles}.") _tracked_paths.add(genfiles) return genfiles def _ensure_dependencies_installed(self): log.debug("Ensuring dependencies.") dependencies = ['docker', 'git'] failed_dependencies = [] for dependency in dependencies: return_code = shell.run( ['which', dependency], check=False).returncode if return_code: failed_dependencies.append(dependency) if failed_dependencies: raise EnvironmentError( f"Dependencies missing: {', '.join(failed_dependencies)}") def _install_artman(self): if not ARTMAN_VENV.exists(): venv.main([str(ARTMAN_VENV)]) if ARTMAN_VERSION != 'latest': version_specifier = f'=={ARTMAN_VERSION}' else: version_specifier = '' shell.run([ ARTMAN_VENV / 'bin' / 'pip', 'install', '--upgrade', f'googleapis-artman{version_specifier}']) log.debug('Pulling artman image.') shell.run(['docker', 'pull', f'googleapis/artman:{ARTMAN_VERSION}']) def _clone_googleapis(self): self.googleapis = git.clone(GOOGLEAPIS_URL, depth=1) try: self.googleapis_private = git.clone( GOOGLEAPIS_PRIVATE_URL, depth=1) except: log.warning( 'Could not clone googleapis-private, you will not be able to ' 'generate private API versions!')
Python
0.999999
@@ -4073,32 +4073,33 @@ ARTMAN_VERSION%7D' +, %0A '--
732cd1fd8447e8e6ca7581f30df759351f7480f2
allow running server on windows for testing
ext.py
ext.py
# standard python imports import json import os import subprocess import datetime # external imports from flask import request, abort, current_app from flask_login import current_user, login_user from sqlalchemy.orm.exc import NoResultFound # internal imports from main.app import app from main.users.models import User, OrganizationUser from main.resources.models import Resource, ControllerStatus from main.extension import Extension # current global instance of this extension flow_extension = None # a server extension class class Flow(Extension): def __init__(self): super(Flow, self).__init__() def view(self, resource, parent): if resource.name == 'Flow': return self.view_flow_diagram(resource, parent) else: return None # view a resource; return None if extension does not provide a viewer for resource def view_flow_diagram(self, resource, parent): return flow_app() # fix(later): use the current controller # create an instance of the extension class def create(): global flow_extension flow_extension = Flow() return flow_extension # display the data flow app (a single page app) @app.route('/ext/flow') def flow_app(): controller_infos = get_controller_info() default_dev_enabled = current_app.config.get('FLOW_DEV', False) # # Default working dir is root of "rhizo-server" # E.g. /home/user/rhizo-server # rhizo_server_version = subprocess.check_output(['git', 'describe', '--tags', '--always' ]).rstrip() flow_dir = os.path.dirname(os.path.realpath(__file__)) flow_server_version = subprocess.check_output([ 'git', '-C', '%s' % (flow_dir), 'describe', '--tags', '--always' ]).rstrip() flow_user = None if current_user.is_authenticated: flow_user = { 'user_name': current_user.user_name, 'full_name': current_user.full_name, 'email_address': current_user.email_address, 'role': current_user.role, 'isAdmin': (current_user.role == User.SYSTEM_ADMIN) } admin_enabled = 0 if int(request.args.get('admin', 0)) == 1 and flow_user['isAdmin']: admin_enabled = 1 return flow_extension.render_template('flow-app.html', controllers_json = json.dumps(controller_infos), use_codap = (request.args.get('use_codap', 0) or request.args.get('codap', 0)), dev_enabled = int(request.args.get('dev', default_dev_enabled)), admin_enabled = admin_enabled, flow_user = json.dumps(flow_user), rhizo_server_version = rhizo_server_version, flow_server_version = flow_server_version ) # used for students to specify a controller without logging in first; # if the controller is found, log them in using a student count (if one exists for the controller's organization) @app.route('/ext/flow/select', methods=['POST']) def select_controller(): controller_name = request.form.get('controller_name') try: controller = Resource.query.filter(Resource.name == controller_name, Resource.type == Resource.CONTROLLER_FOLDER, Resource.deleted == False).one() organization = controller.parent if organization.type != Resource.ORGANIZATION_FOLDER: abort(403) user_name = organization.name + '-student' user = User.query.filter(User.user_name == user_name, User.deleted == False).one() login_user(user, remember = True) # log the user in as a student return json.dumps({ 'id': controller.id, 'name': controller.name, 'path': controller.path(), }) except NoResultFound: abort(403) # # API for obtaining controller info # @app.route('/ext/flow/controllers', methods=['POST', 'GET']) def controller_info(): if current_user.role != User.SYSTEM_ADMIN: abort(403) info = get_controller_info() return json.dumps(info) # # Obtain an array of controllers # def get_controller_info(): controller_infos = [] if current_user.is_authenticated: org_users = OrganizationUser.query.filter(OrganizationUser.user_id == current_user.id) for org_user in org_users: org_id = org_users[0].organization controllers = Resource.query.filter(Resource.parent_id == org_user.organization_id, Resource.deleted == False, Resource.type == Resource.CONTROLLER_FOLDER) for controller in controllers: try: controller_status = ControllerStatus.query.filter(ControllerStatus.id == controller.id).one() if controller_status.last_watchdog_timestamp: online = controller_status.last_watchdog_timestamp > datetime.datetime.utcnow() - datetime.timedelta(seconds=120) else: online = False controller_infos.append({ 'id': controller.id, 'name': controller.name, 'path': controller.path(), 'online': online, 'last_online': '%s' % (controller_status.last_watchdog_timestamp), 'status': json.loads(controller_status.attributes) if controller_status.attributes else {}, }) except NoResultFound: pass return controller_infos
Python
0
@@ -1436,24 +1436,284 @@ erver%0A #%0A + flow_dir = os.path.dirname(os.path.realpath(__file__))%0A if app.config.get('FLOW_WINDOWS', False): # allow testing on Windows, where commands below don't work by default%0A rhizo_server_version = 'X'%0A flow_server_version = 'X'%0A else:%0A rhizo_se @@ -1803,32 +1803,36 @@ + + 'describe',%0A @@ -1819,32 +1819,36 @@ 'describe',%0A + @@ -1937,32 +1937,36 @@ + '--always' %5D).r @@ -1977,73 +1977,16 @@ p()%0A -%0A -flow_dir = os.path.dirname(os.path.realpath(__file__))%0A%0A + - flow @@ -2032,24 +2032,28 @@ ut(%5B 'git',%0A + @@ -2150,16 +2150,20 @@ + '%25s' %25 ( @@ -2217,32 +2217,36 @@ + + 'describe',%0A @@ -2285,32 +2285,36 @@ + '--tags',%0A @@ -2299,32 +2299,36 @@ '--tags',%0A +
898128d2723ebcc40ff0fd6ba00351058216477b
Remove test_legacy_model_properties.
tardis/tests/test_tardis_full.py
tardis/tests/test_tardis_full.py
import pytest import numpy as np import tardis import numpy.testing as nptesting from astropy import units as u import os from tardis.io.util import yaml_load_config_file from tardis.simulation.base import Simulation from tardis.model import Radial1DModel from tardis.io.config_reader import Configuration from tardis.montecarlo.base import MontecarloRunner from tardis.plasma.standard_plasmas import LegacyPlasmaArray def data_path(fname): return os.path.join(tardis.__path__[0], 'tests', 'data', fname) @pytest.mark.skipif(not pytest.config.getvalue("atomic-dataset"), reason='--atomic_database was not specified') class TestSimpleRun(): """ Very simple run """ @classmethod @pytest.fixture(scope="class", autouse=True) def setup(self): self.atom_data_filename = os.path.expanduser(os.path.expandvars( pytest.config.getvalue('atomic-dataset'))) assert os.path.exists(self.atom_data_filename), ("{0} atomic datafiles" " does not seem to " "exist".format( self.atom_data_filename)) self.config_yaml = yaml_load_config_file( 'tardis/io/tests/data/tardis_configv1_verysimple.yml') self.config_yaml['atom_data'] = self.atom_data_filename tardis_config = Configuration.from_config_dict(self.config_yaml) self.model = Radial1DModel(tardis_config) self.simulation = Simulation(tardis_config) self.simulation.legacy_run_simulation(self.model) def test_j_blue_estimators(self): j_blue_estimator = np.load( data_path('simple_test_j_blue_estimator.npy')) np.testing.assert_allclose(self.model.runner.j_blue_estimator, j_blue_estimator) def test_spectrum(self): luminosity_density = np.load( data_path('simple_test_luminosity_density_lambda.npy')) luminosity_density = luminosity_density * u.Unit( 'erg / (Angstrom s)') np.testing.assert_allclose( self.model.runner.spectrum.luminosity_density_lambda,luminosity_density) def test_virtual_spectrum(self): virtual_luminosity_density = np.load( data_path('simple_test_virtual_luminosity_density_lambda.npy')) virtual_luminosity_density = virtual_luminosity_density * u.Unit( 'erg / (Angstrom s)') np.testing.assert_allclose( self.model.runner.spectrum_virtual.luminosity_density_lambda, virtual_luminosity_density) def test_plasma_properties(self): pass def test_runner_properties(self): """Tests whether a number of runner attributes exist and also verifies their types Currently, runner attributes needed to call the model routine to_hdf5 are checked. """ virt_type = np.ndarray props_required_by_modeltohdf5 = dict([ ("virt_packet_last_interaction_type", virt_type), ("virt_packet_last_line_interaction_in_id", virt_type), ("virt_packet_last_line_interaction_out_id", virt_type), ("virt_packet_last_interaction_in_nu", virt_type), ("virt_packet_nus", virt_type), ("virt_packet_energies", virt_type), ]) required_props = props_required_by_modeltohdf5.copy() for prop, prop_type in required_props.items(): assert type(getattr(self.model.runner, prop)) == prop_type, ("wrong type of attribute '{}': expected {}, found {}".format(prop, prop_type, type(getattr(self.model.runner, prop)))) def test_legacy_model_properties(self): """Tests whether a number of model attributes exist and also verifies their types Currently, model attributes needed to run the gui and to call the model routine to_hdf5 are checked. Notes ----- The list of properties may be incomplete """ props_required_by_gui = dict([ ("converged", bool), ("iterations_executed", int), ("iterations_max_requested", int), ("current_no_of_packets", int), ("no_of_packets", int), ("no_of_virtual_packets", int), ]) props_required_by_tohdf5 = dict([ ("runner", MontecarloRunner), ("plasma_array", LegacyPlasmaArray), ("last_line_interaction_in_id", np.ndarray), ("last_line_interaction_out_id", np.ndarray), ("last_line_interaction_shell_id", np.ndarray), ("last_line_interaction_in_id", np.ndarray), ("last_line_interaction_angstrom", u.quantity.Quantity), ]) required_props = props_required_by_gui.copy() required_props.update(props_required_by_tohdf5) for prop, prop_type in required_props.items(): assert type(getattr(self.model, prop)) == prop_type, ("wrong type of attribute '{}': expected {}, found {}".format(prop, prop_type, type(getattr(self.model, prop))))
Python
0
@@ -357,69 +357,8 @@ ner%0A -from tardis.plasma.standard_plasmas import LegacyPlasmaArray%0A %0A%0A%0Ad @@ -3668,1513 +3668,4 @@ )))%0A -%0A%0A def test_legacy_model_properties(self):%0A %22%22%22Tests whether a number of model attributes exist and also verifies%0A their types%0A%0A Currently, model attributes needed to run the gui and to call the model%0A routine to_hdf5 are checked.%0A%0A Notes%0A -----%0A The list of properties may be incomplete%0A%0A %22%22%22%0A%0A props_required_by_gui = dict(%5B%0A (%22converged%22, bool),%0A (%22iterations_executed%22, int),%0A (%22iterations_max_requested%22, int),%0A (%22current_no_of_packets%22, int),%0A (%22no_of_packets%22, int),%0A (%22no_of_virtual_packets%22, int),%0A %5D)%0A props_required_by_tohdf5 = dict(%5B%0A (%22runner%22, MontecarloRunner),%0A (%22plasma_array%22, LegacyPlasmaArray),%0A (%22last_line_interaction_in_id%22, np.ndarray),%0A (%22last_line_interaction_out_id%22, np.ndarray),%0A (%22last_line_interaction_shell_id%22, np.ndarray),%0A (%22last_line_interaction_in_id%22, np.ndarray),%0A (%22last_line_interaction_angstrom%22, u.quantity.Quantity),%0A %5D)%0A%0A required_props = props_required_by_gui.copy()%0A required_props.update(props_required_by_tohdf5)%0A%0A for prop, prop_type in required_props.items():%0A%0A assert type(getattr(self.model, prop)) == prop_type, (%22wrong type of attribute '%7B%7D': expected %7B%7D, found %7B%7D%22.format(prop, prop_type, type(getattr(self.model, prop))))%0A
94c30a0efe0c3597678c64f46735ca7cd9990ccd
Revert Settings.py, only added admin schema
templatesAndSettings/settings.py
templatesAndSettings/settings.py
""" Keep this file untracked """ # SECURITY WARNING: keep the secret key used in production secret! secret_key = 'random_secret_key_like_so_7472873649836' media_root = '/Users/lsetiawan/Desktop/shared_ubuntu/APL/ODM2/ODM2-Admin/ODM2CZOData/upfiles/' media_url = '/odm2testapp/upfiles/' # Application definition custom_template_path = '/admin/ODM2CZOData/' #admin_shortcuts_path = '/admin/' url_path = 'admin/' static_root = '/Users/lsetiawan/Desktop/shared_ubuntu/APL/ODM2/ODM2-Admin/static'#'C:/Users/leonmi/Google Drive/ODM2Djangoadmin/static' debug = True template_debug = True # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases ODM2_configs = { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'odm2', 'USER': 'lsetiawan', 'PASSWORD': '', 'HOST': 'localhost', #micro server '52.20.81.11' 'PORT': '5432', 'OPTIONS': { 'options': '-c search_path=admin,odm2,odm2extra' } } # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ static_url = '/static/' from templatesAndSettings.base import *
Python
0
@@ -161,32 +161,34 @@ dia_root = ' +C: /Users/l setiawan/Des @@ -179,54 +179,38 @@ rs/l -setiawan/Desktop/shared_ubuntu/APL/ODM2/ODM2-A +eonmi/Google Drive/ODM2Djangoa dmin @@ -410,76 +410,8 @@ t = -'/Users/lsetiawan/Desktop/shared_ubuntu/APL/ODM2/ODM2-Admin/static'# 'C:/ @@ -665,20 +665,17 @@ NAME': ' -odm2 +x ',%0A @@ -690,17 +690,9 @@ ': ' -lsetiawan +x ',%0A @@ -711,16 +711,17 @@ WORD': ' +x ',%0A @@ -736,17 +736,9 @@ ': ' -localhost +x ', # @@ -786,12 +786,9 @@ ': ' -5432 +x ',%0A @@ -994,16 +994,16 @@ tatic/'%0A - from tem @@ -1033,8 +1033,9 @@ import * +%0A
59367ba641fd33a78da38a42389d73d9f250dc36
Remove duplicate import in compat.py
tensorflow/python/util/compat.py
tensorflow/python/util/compat.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functions for Python 2 vs. 3 compatibility. ## Conversion routines In addition to the functions below, `as_str` converts an object to a `str`. @@as_bytes @@as_text @@as_str_any @@path_to_str ## Types The compatibility module also provides the following types: * `bytes_or_text_types` * `complex_types` * `integral_types` * `real_types` """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import numbers as _numbers import numpy as _np import six as _six from tensorflow.python.util.all_util import remove_undocumented from tensorflow.python.util.tf_export import tf_export from tensorflow.python.util.tf_export import tf_export def as_bytes(bytes_or_text, encoding='utf-8'): """Converts either bytes or unicode to `bytes`, using utf-8 encoding for text. Args: bytes_or_text: A `bytes`, `str`, or `unicode` object. encoding: A string indicating the charset for encoding unicode. Returns: A `bytes` object. Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ if isinstance(bytes_or_text, _six.text_type): return bytes_or_text.encode(encoding) elif isinstance(bytes_or_text, bytes): return bytes_or_text else: raise TypeError('Expected binary or unicode string, got %r' % (bytes_or_text,)) def as_text(bytes_or_text, encoding='utf-8'): """Returns the given argument as a unicode string. Args: bytes_or_text: A `bytes`, `str`, or `unicode` object. encoding: A string indicating the charset for decoding unicode. Returns: A `unicode` (Python 2) or `str` (Python 3) object. Raises: TypeError: If `bytes_or_text` is not a binary or unicode string. """ if isinstance(bytes_or_text, _six.text_type): return bytes_or_text elif isinstance(bytes_or_text, bytes): return bytes_or_text.decode(encoding) else: raise TypeError('Expected binary or unicode string, got %r' % bytes_or_text) # Convert an object to a `str` in both Python 2 and 3. if _six.PY2: as_str = as_bytes tf_export('compat.as_bytes', 'compat.as_str')(as_bytes) tf_export('compat.as_text')(as_text) else: as_str = as_text tf_export('compat.as_bytes')(as_bytes) tf_export('compat.as_text', 'compat.as_str')(as_text) @tf_export('compat.as_str_any') def as_str_any(value): """Converts to `str` as `str(value)`, but use `as_str` for `bytes`. Args: value: A object that can be converted to `str`. Returns: A `str` object. """ if isinstance(value, bytes): return as_str(value) else: return str(value) @tf_export('compat.path_to_str') def path_to_str(path): """Returns the file system path representation of a `PathLike` object, else as it is. Args: path: An object that can be converted to path representation. Returns: A `str` object. """ if hasattr(path, '__fspath__'): path = as_str_any(path.__fspath__()) return path # Numpy 1.8 scalars don't inherit from numbers.Integral in Python 3, so we # need to check them specifically. The same goes from Real and Complex. integral_types = (_numbers.Integral, _np.integer) tf_export('compat.integral_types').export_constant(__name__, 'integral_types') real_types = (_numbers.Real, _np.integer, _np.floating) tf_export('compat.real_types').export_constant(__name__, 'real_types') complex_types = (_numbers.Complex, _np.number) tf_export('compat.complex_types').export_constant(__name__, 'complex_types') # Either bytes or text. bytes_or_text_types = (bytes, _six.text_type) tf_export('compat.bytes_or_text_types').export_constant(__name__, 'bytes_or_text_types') _allowed_symbols = [ 'as_str', 'bytes_or_text_types', 'complex_types', 'integral_types', 'real_types', ] remove_undocumented(__name__, _allowed_symbols)
Python
0
@@ -1331,63 +1331,8 @@ ort%0A -from tensorflow.python.util.tf_export import tf_export%0A %0A%0Ade
6ad81047a282f0e67ceebe9e25d7f0201e636a84
Fix icu dependency checksum.
tensorflow_serving/workspace.bzl
tensorflow_serving/workspace.bzl
# TensorFlow Serving external dependencies that can be loaded in WORKSPACE # files. load("@org_tensorflow//third_party:repo.bzl", "tf_http_archive") load("@org_tensorflow//tensorflow:workspace.bzl", "tf_workspace") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") def tf_serving_workspace(): """All TensorFlow Serving external dependencies.""" tf_workspace(path_prefix = "", tf_repo_name = "org_tensorflow") # ===== gRPC dependencies ===== native.bind( name = "libssl", actual = "@boringssl//:ssl", ) # gRPC wants the existence of a cares dependence but its contents are not # actually important since we have set GRPC_ARES=0 in tools/bazel.rc native.bind( name = "cares", actual = "@grpc//third_party/nanopb:nanopb", ) # ===== RapidJSON (rapidjson.org) dependencies ===== http_archive( name = "com_github_tencent_rapidjson", urls = [ "https://github.com/Tencent/rapidjson/archive/v1.1.0.zip", ], sha256 = "8e00c38829d6785a2dfb951bb87c6974fa07dfe488aa5b25deec4b8bc0f6a3ab", strip_prefix = "rapidjson-1.1.0", build_file = "@//third_party/rapidjson:BUILD", ) # ===== libevent (libevent.org) dependencies ===== http_archive( name = "com_github_libevent_libevent", urls = [ "https://github.com/libevent/libevent/archive/release-2.1.8-stable.zip", ], sha256 = "70158101eab7ed44fd9cc34e7f247b3cae91a8e4490745d9d6eb7edc184e4d96", strip_prefix = "libevent-release-2.1.8-stable", build_file = "@//third_party/libevent:BUILD", ) # ===== Override TF & TF Text defined 'ICU'. (we need a version that contains all data). http_archive( name = "icu", strip_prefix = "icu-release-64-2", sha256 = "dfc62618aa4bd3ca14a3df548cd65fe393155edd213e49c39f3a30ccd618fc27", urls = [ "https://github.com/unicode-org/icu/archive/release-64-2.zip", ], build_file = "//third_party/icu:BUILD", ) # ===== Pin `com_google_absl` with the same version(and patch) with Tensorflow. tf_http_archive( name = "com_google_absl", build_file = str(Label("@org_tensorflow//third_party:com_google_absl.BUILD")), # TODO: Remove the patch when https://github.com/abseil/abseil-cpp/issues/326 is resolved # and when TensorFlow is build against CUDA 10.2 patch_file = str(Label("@org_tensorflow//third_party:com_google_absl_fix_mac_and_nvcc_build.patch")), sha256 = "f368a8476f4e2e0eccf8a7318b98dafbe30b2600f4e3cf52636e5eb145aba06a", # SHARED_ABSL_SHA strip_prefix = "abseil-cpp-df3ea785d8c30a9503321a3d35ee7d35808f190d", urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/abseil/abseil-cpp/archive/df3ea785d8c30a9503321a3d35ee7d35808f190d.tar.gz", "https://github.com/abseil/abseil-cpp/archive/df3ea785d8c30a9503321a3d35ee7d35808f190d.tar.gz", ], ) # ===== TF.Text dependencies # NOTE: Before updating this version, you must update the test model # and double check all custom ops have a test: # https://github.com/tensorflow/text/blob/master/oss_scripts/model_server/save_models.py http_archive( name = "org_tensorflow_text", sha256 = "f64647276f7288d1b1fe4c89581d51404d0ce4ae97f2bcc4c19bd667549adca8", strip_prefix = "text-2.2.0", urls = [ "https://github.com/tensorflow/text/archive/v2.2.0.zip", ], patches = ["@//third_party/tf_text:tftext.patch"], patch_args = ["-p1"], repo_mapping = {"@com_google_re2": "@com_googlesource_code_re2"}, ) http_archive( name = "com_google_sentencepiece", strip_prefix = "sentencepiece-1.0.0", sha256 = "c05901f30a1d0ed64cbcf40eba08e48894e1b0e985777217b7c9036cac631346", urls = [ "https://github.com/google/sentencepiece/archive/1.0.0.zip", ], ) http_archive( name = "com_google_glog", sha256 = "1ee310e5d0a19b9d584a855000434bb724aa744745d5b8ab1855c85bff8a8e21", strip_prefix = "glog-028d37889a1e80e8a07da1b8945ac706259e5fd8", urls = [ "https://mirror.bazel.build/github.com/google/glog/archive/028d37889a1e80e8a07da1b8945ac706259e5fd8.tar.gz", "https://github.com/google/glog/archive/028d37889a1e80e8a07da1b8945ac706259e5fd8.tar.gz", ], )
Python
0.99917
@@ -1850,72 +1850,72 @@ = %22 -dfc62618aa4bd3ca14a3df548cd65fe393155edd213e49c39f3a30ccd618fc27 +10cd92f1585c537d937ecbb587f6c3b36a5275c87feabe05d777a828677ec32f %22,%0A
5245e51aad4f7693f46b0621f2611c8a03c9ccb1
Change variable names for readability
gui.py
gui.py
#! python3 # -*- coding: utf-8 -*- import sys from main import supported_comics, verify_xpath from comic import Comic from PyQt5.QtWidgets import (QWidget, QLabel, QComboBox, QApplication, QPushButton, QTextEdit, QCheckBox, QLineEdit, QMessageBox) import click class Example(QWidget): def __init__(self): super().__init__() self.initUI() def initUI(self): self.url_defined_comic = QLabel("WebComicToCBZ", self) comic_list = QComboBox(self) for name in list(supported_comics.keys()): comic_list.addItem(name) self.make_cbz_checkbox = QCheckBox("Make a .cbz file", self) download_button = QPushButton("Download", self) download_button.clicked.connect(lambda: self.download(str(comic_list.currentText()), self.make_cbz_checkbox.isChecked())) self.dialog_box = QTextEdit(self) self.dialog_box.setReadOnly(True) self.name = QLineEdit(self) self.url = QLineEdit(self) self.next_page_xpath = QLineEdit(self) self.image_xpath = QLineEdit(self) self.name_label = QLabel("Name of the comic:", self) self.url_label = QLabel("First page URL:", self) self.next_page_xpath_label = QLabel("Next page XPath:", self) self.image_xpath_label = QLabel("Comic Image XPath:", self) comic_list.move(50, 50) download_button.move(50, 80) self.url_defined_comic.move(50, 150) self.dialog_box.move(50, 180) self.make_cbz_checkbox.move(275, 20) custom_button = QPushButton("Download", self) custom_button.clicked.connect(lambda: self.custom(self.name.text(), self.url.text(), self.next_page_xpath.text(), self.image_xpath.text(), self.make_cbz_checkbox.isChecked())) self.name_label.move(350, 50) self.name.move(475, 45) self.url_label.move(350, 80) self.url.move(475, 75) self.next_page_xpath_label.move(350, 110) self.next_page_xpath.move(475, 105) self.image_xpath_label.move(350, 140) self.image_xpath.move(475, 135) custom_button.move(475, 170) comic_list.activated[str].connect(self.onActivated) self.setFixedSize(700, 400) self.setWindowTitle('WebComicToCBZ') self.show() def onActivated(self, text): text = supported_comics[text][0] self.url_defined_comic.setText(text) self.url_defined_comic.adjustSize() def download(self, name, make_cbz): comic = Comic(*supported_comics[name]) comic.download(name) if make_cbz: Comic.make_cbz(name, name) def custom(self, comic_name, first_page_url, next_page_xpath, image_xpath, make_cbz): validation = verify_xpath(first_page_url, next_page_xpath, image_xpath) confirmation = QMessageBox() message = "".join(["Page {}: \nPage URL: {}\nImage URL: {}\n".format(i+1, validation[i][0], validation[i][1]) for i in range(3)]) confirmation.setText(message) confirmation.setInformativeText("Verify that the links above are correct before proceeding.") confirmation.setStandardButtons(QMessageBox.Ok | QMessageBox.Cancel) confirmation.buttonClicked.connect(lambda: self.custom_download(comic_name, first_page_url, next_page_xpath, image_xpath, make_cbz)) confirmation.setWindowTitle("Confirmation") confirmation.exec_() def custom_download(self, comic_name, first_page_url, next_page_xpath, image_xpath, make_cbz): comic = Comic(first_page_url, next_page_xpath, image_xpath) comic.download(comic_name) if make_cbz: Comic.make_cbz(comic_name, comic_name) def show_on_console(message): """ Displays usual message on GUI instead of console """ ex.dialog_box.append(str(message) + "n") QApplication.processEvents() click.echo = show_on_console if __name__ == '__main__': app = QApplication(sys.argv) ex = Example() sys.exit(app.exec_())
Python
0.000002
@@ -379,23 +379,19 @@ %0A%0Aclass -Example +GUI (QWidget @@ -2284,27 +2284,31 @@ ct(self. -onActivated +change_url_text )%0A%0A @@ -2417,19 +2417,23 @@ def -onActivated +change_url_text (sel @@ -3914,18 +3914,19 @@ %22%22%22%0A -ex +gui .dialog_ @@ -3952,14 +3952,8 @@ age) - + %22n%22 )%0A @@ -4083,20 +4083,17 @@ -ex = Example +gui = GUI ()%0A
2536526a383d1b2a921277970584ef5d3ba6073d
revert LIF base model
lif.py
lif.py
from numpy import * from pylab import * ## setup parameters and state variables T = 1000 # total time to simulate (msec) dt = 0.125 # simulation time step (msec) time = arange(0, T+dt, dt) # time array t_rest = 0 # initial refractory time ## LIF properties Vm = zeros(len(time)) # potential (V) trace over time Rm = 1 # resistance (kOhm) Cm = 10 # capacitance (uF) tau_m = Rm*Cm # time constant (msec) tau_ref = 4 # refractory period (msec) Vth = 1 # spike threshold (V) V_spike = 0.5 # spike delta (V) ## Input stimulus #I = 1.5 # input current (A) ## iterate over each time step for i, t in enumerate(time): if t > t_rest: I = (math.sin(t / 50) + 1) Vm[i] = Vm[i-1] + (-Vm[i-1] + I*Rm) / tau_m * dt if Vm[i] >= Vth: Vm[i] += V_spike t_rest = t + tau_ref ## plot membrane potential trace plot(time, Vm) title('Leaky Integrate-and-Fire Example') ylabel('Membrane Potential (V)') xlabel('Time (msec)') ylim([0,2]) show()
Python
0
@@ -84,18 +84,17 @@ = -10 +2 00 @@ -687,16 +687,23 @@ lta (V)%0A +I = 1.5 %0A## Inpu @@ -848,43 +848,8 @@ st:%0A - I = (math.sin(t / 50) + 1)%0A
e4e532027763b4e59ac94c5b78799632dafa3018
fix div
nju.py
nju.py
import RPi.GPIO as GPIO import time GPIO.setmode(GPIO.BCM) class Gfx(object): def __init__(self, width, height): self.width = width self.height = height self.pins = { 'A0': 17, 'E1': 22, 'E2': 21, 'D0': 23, 'D1': 24, 'D2': 25, 'D3': 12, 'D4': 16, 'D5': 20, 'D6': 26, 'D7': 19 } self.data_pins = [ 'D0', 'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7' ] for pin in self.pins: GPIO.setup(self.pins[pin], GPIO.OUT) GPIO.output(self.pins[pin], 0) def init(self): """initialize display""" GPIO.output(self.pins['A0'], 0) GPIO.output(self.pins['E1'], 1) GPIO.output(self.pins['E2'], 1) init_sequence = [0xae, 0xa4, 0xa9, 0xe2, 0xa0, 0xaf] for cmd in init_sequence: self.cmd(cmd, 0) self.cmd(cmd, 1) def cmd(self, char, enable): """send command""" GPIO.output(self.pins['A0'], 0) self.send(char, enable) def data(self, char, enable): """send data""" GPIO.output(self.pins['A0'], 1) self.send(char, enable) def send(self, data, enable): """Write to gpio""" GPIO.output(self.pins['E1'], 0) GPIO.output(self.pins['E2'], 0) for i in self.data_pins: value = data & 0x01 GPIO.output(self.pins[i], value) data >>= 1 GPIO.output(self.pins['E'+str(enable+1)], 1) time.sleep(0.00025) GPIO.output(self.pins['E1'], 0) GPIO.output(self.pins['E2'], 0) def set_xy(self, x, y): """set xy pos""" if x < self.width/2: self.cmd(0xB8 | y, 0) self.cmd(0x00 | x, 0) else: self.cmd(0xB8 | y, 1) self.cmd(0x00 | (x - self.width/2), 1) def draw_pixels(self, x, y, c=0xff): """draw a pixel /line""" self.set_xy(x, y/8) if x < self.width/2: self.data(c, 0) else: self.data(c, 1) def fill(self, c=0): for j in range(0, self.height/8): for i in range(0, self.width): self.set_xy(i, j) if i < self.width/2: self.data(c, 0) else: self.data(c, 1) g = Gfx(122, 32) g.init() g.fill(0) g.fill(255) g.draw_pixels(2, 0, 128) g.draw_pixels(3, 0, 128) g.draw_pixels(7, 0, 128) g.draw_pixels(8, 0, 128) g.draw_pixels(1, 9, 7) g.draw_pixels(9, 9, 7) g.draw_pixels(2, 9, 8) g.draw_pixels(3, 9, 16) g.draw_pixels(4, 9, 33) g.draw_pixels(5, 9, 66) g.draw_pixels(6, 9, 33) g.draw_pixels(7, 9, 16) g.draw_pixels(8, 9, 8) g.draw_pixels(15, 9, 127) g.draw_pixels(16, 9, 65) g.draw_pixels(17, 9, 65) g.draw_pixels(18, 9, 62) g.draw_pixels(20, 9, 38) g.draw_pixels(21, 9, 73) g.draw_pixels(22, 9, 73) g.draw_pixels(23, 9, 50) g.draw_pixels(25, 9, 127) g.draw_pixels(26, 9, 9) g.draw_pixels(27, 9, 9) g.draw_pixels(28, 9, 6) g.draw_pixels(30, 9, 98) g.draw_pixels(31, 9, 81) g.draw_pixels(32, 9, 73) g.draw_pixels(33, 9, 70) g.draw_pixels(35, 9, 62) g.draw_pixels(36, 9, 65) g.draw_pixels(37, 9, 65) g.draw_pixels(38, 9, 62) g.draw_pixels(40, 9, 4) g.draw_pixels(41, 9, 2+64) g.draw_pixels(42, 9, 127) g.draw_pixels(43, 9, 64) g.draw_pixels(40, 9, 4) g.draw_pixels(41, 9, 2+64) g.draw_pixels(42, 9, 127) g.draw_pixels(43, 9, 64) g.draw_pixels(45, 9, 97) g.draw_pixels(46, 9, 25) g.draw_pixels(47, 9, 5) g.draw_pixels(48, 9, 3) # g.fill(0) # g.draw_pixels(0, 0) # g.draw_pixels(70, 16, 129) # g.draw_pixels(120, 2, 153)
Python
0.000017
@@ -1937,15 +1937,16 @@ dth/ +/ 2), 1)%0A - %0A @@ -2041,16 +2041,17 @@ xy(x, y/ +/ 8)%0A @@ -2208,16 +2208,17 @@ .height/ +/ 8):%0A @@ -2458,16 +2458,18 @@ fill(0)%0A +# g.fill(2
ff7365e780624a1ef66c12a6d7b61448a3f9294c
fix flake8 warnings in zapwallettxes.py
test/functional/zapwallettxes.py
test/functional/zapwallettxes.py
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the zapwallettxes functionality. - start three bitcoind nodes - create four transactions on node 0 - two are confirmed and two are unconfirmed. - restart node 1 and verify that both the confirmed and the unconfirmed transactions are still available. - restart node 0 and verify that the confirmed transactions are still available, but that the unconfirmed transaction has been zapped. """ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * class ZapWalletTXesTest (BitcoinTestFramework): def __init__(self): super().__init__() self.setup_clean_chain = True self.num_nodes = 3 def setup_network(self): super().setup_network() connect_nodes_bi(self.nodes,0,2) def run_test (self): self.log.info("Mining blocks...") self.nodes[0].generate(1) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), 50) txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) self.sync_all() self.nodes[0].generate(1) self.sync_all() txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11) txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10) tx0 = self.nodes[0].gettransaction(txid0) assert_equal(tx0['txid'], txid0) #tx0 must be available (confirmed) tx1 = self.nodes[0].gettransaction(txid1) assert_equal(tx1['txid'], txid1) #tx1 must be available (confirmed) tx2 = self.nodes[0].gettransaction(txid2) assert_equal(tx2['txid'], txid2) #tx2 must be available (unconfirmed) tx3 = self.nodes[0].gettransaction(txid3) assert_equal(tx3['txid'], txid3) #tx3 must be available (unconfirmed) #restart bitcoind self.stop_node(0) self.nodes[0] = self.start_node(0,self.options.tmpdir) tx3 = self.nodes[0].gettransaction(txid3) assert_equal(tx3['txid'], txid3) #tx must be available (unconfirmed) self.stop_node(0) #restart bitcoind with zapwallettxes self.nodes[0] = self.start_node(0,self.options.tmpdir, ["-zapwallettxes=1"]) assert_raises(JSONRPCException, self.nodes[0].gettransaction, [txid3]) #there must be an exception because the unconfirmed wallettx0 must be gone by now tx0 = self.nodes[0].gettransaction(txid0) assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed if __name__ == '__main__': ZapWalletTXesTest ().main ()
Python
0
@@ -710,10 +710,260 @@ ort -*%0A +(assert_equal,%0A assert_raises,%0A bitcoind_processes,%0A connect_nodes_bi,%0A JSONRPCException,%0A ) %0A%0Acl @@ -1223,18 +1223,20 @@ f.nodes, + 0, + 2)%0A%0A @@ -1247,17 +1247,16 @@ run_test - (self):%0A @@ -1411,32 +1411,24 @@ .sync_all()%0A - %0A ass @@ -1469,24 +1469,16 @@ (), 50)%0A - %0A @@ -1710,24 +1710,16 @@ c_all()%0A - %0A @@ -1869,24 +1869,16 @@ (), 10)%0A - %0A @@ -1957,25 +1957,27 @@ d'%5D, txid0) -# + # tx0 must be @@ -1994,32 +1994,24 @@ (confirmed)%0A - %0A tx1 @@ -2090,17 +2090,19 @@ txid1) -# + # tx1 must @@ -2123,32 +2123,24 @@ (confirmed)%0A - %0A tx2 @@ -2219,17 +2219,19 @@ txid2) -# + # tx2 must @@ -2254,32 +2254,24 @@ nconfirmed)%0A - %0A tx3 @@ -2350,17 +2350,19 @@ txid3) -# + # tx3 must @@ -2385,32 +2385,24 @@ nconfirmed)%0A - %0A #re @@ -2605,17 +2605,19 @@ txid3) -# + # tx must @@ -2643,24 +2643,16 @@ firmed)%0A - %0A @@ -2809,24 +2809,16 @@ es=1%22%5D)%0A - %0A @@ -2898,16 +2898,17 @@ # + there mu @@ -3072,17 +3072,19 @@ txid0) -# + # tx0 (con @@ -3194,16 +3194,14 @@ Test - ().main - ()%0A
bf2683749f7748447a679685a85068c5fb9aa7a5
add work around for assert_called_once
test/parser/testlogfileparser.py
test/parser/testlogfileparser.py
# -*- coding: utf-8 -*- # # Copyright (c) 2019, the cclib development team # # This file is part of cclib (http://cclib.github.io) and is distributed under # the terms of the BSD 3-Clause License. """Unit tests for the logfileparser module.""" import io import os import sys import tempfile import unittest from six import add_move, MovedModule add_move(MovedModule('mock', 'mock', 'unittest.mock')) from six.moves import mock from six.moves.urllib.request import urlopen import cclib __filedir__ = os.path.dirname(__file__) __filepath__ = os.path.realpath(__filedir__) __datadir__ = os.path.join(__filepath__, "..", "..") class FileWrapperTest(unittest.TestCase): def test_file_seek(self): """Can we seek anywhere in a file object?""" fpath = os.path.join(__datadir__,"data/ADF/basicADF2007.01/dvb_gopt.adfout") with open(fpath, 'r') as fobject: wrapper = cclib.parser.logfileparser.FileWrapper(fobject) wrapper.seek(0, 0) self.assertEqual(wrapper.pos, 0) wrapper.seek(10, 0) self.assertEqual(wrapper.pos, 10) wrapper.seek(0, 2) self.assertEqual(wrapper.pos, wrapper.size) def test_url_seek(self): """Can we seek only to the end of an url stream?""" url = "https://raw.githubusercontent.com/cclib/cclib/master/data/ADF/basicADF2007.01/dvb_gopt.adfout" stream = urlopen(url) wrapper = cclib.parser.logfileparser.FileWrapper(stream) # Unfortunately, the behavior of this wrapper differs between Python 2 and 3, # so we need to diverge the assertions. We should try to keep the code as # consistent as possible, but the Errors raised are actually different. wrapper.seek(0, 2) self.assertEqual(wrapper.pos, wrapper.size) if sys.version_info[0] == 2: self.assertRaises(AttributeError, wrapper.seek, 0, 0) self.assertRaises(AttributeError, wrapper.seek, 0, 1) else: self.assertRaises(io.UnsupportedOperation, wrapper.seek, 0, 0) self.assertRaises(io.UnsupportedOperation, wrapper.seek, 0, 1) def test_stdin_seek(self): """We shouldn't be able to seek anywhere in standard input.""" wrapper = cclib.parser.logfileparser.FileWrapper(sys.stdin) self.assertRaises(IOError, wrapper.seek, 0, 0) self.assertRaises(IOError, wrapper.seek, 0, 1) def test_data_stdin(self): """Check that the same attributes are parsed when a file is piped through standard input.""" logfiles = [ "data/ADF/basicADF2007.01/dvb_gopt.adfout", "data/GAMESS/basicGAMESS-US2017/C_bigbasis.out", ] get_attributes = lambda data: [a for a in data._attrlist if hasattr(data, a)] for lf in logfiles: path = "%s/%s" % (__datadir__, lf) expected_attributes = get_attributes(cclib.io.ccread(path)) with open(path) as handle: contents = handle.read() # This is fix strings not being unicode in Python2. try: stdin = io.StringIO(contents) except TypeError: stdin = io.StringIO(unicode(contents)) stdin.seek = sys.stdin.seek data = cclib.io.ccread(stdin) self.assertEqual(get_attributes(data), expected_attributes) class LogfileTest(unittest.TestCase): """Unit tests for the Logfile class.""" def test_parse_check_values(self): """Are custom checks performed after parsing finishes? The purpose of this test is not to comprehensively cover all the checks, but rather to make sure the call and logging works. The unit tests for the data class should have comprehensive coverage. """ _, path = tempfile.mkstemp() logfileclass = cclib.parser.logfileparser.Logfile logfileclass.__abstractmethods__ = set() parser = logfileclass(path) parser.extract = lambda self, inputfile, line: None parser.logger = mock.Mock() parser.etenergies = [1, -1] parser.parse() parser.logger.error.assert_called_once() if __name__ == "__main__": unittest.main()
Python
0.000001
@@ -4148,16 +4148,33 @@ parse()%0A + try:%0A @@ -4214,16 +4214,265 @@ _once()%0A + except AttributeError: # assert_called_once is not availible until python 3.6%0A self.assertEqual(parser.logger.error.call_count, 1, %22Expected mock to have been called once. Called %7B%7D times.%22.format(parser.logger.error.call_count))%0A %0A%0Aif __n
74799081cd800cecd10e3b2248cf39c37ff42818
Move credentials so we can use them throughout the application
run.py
run.py
import json import os from google.cloud import pubsub import google.auth from micromanager import MicroManager from micromanager.resources import Resource from stackdriver import StackdriverParser # Load configuration project_id = os.environ.get('PROJECT_ID') subscription_name = os.environ.get('SUBSCRIPTION_NAME') opa_url = os.environ.get('OPA_URL') enforce_policy = os.environ.get('ENFORCE', '').lower() == 'true' # Instantiate our micromanager mmconfig = { 'policy_engines': [ { 'type': 'opa', 'url': opa_url } ] } mm = MicroManager(mmconfig) running_config = { 'configured_policies': mm.get_configured_policies(), 'policy_enforcement': "enabled" if enforce_policy else "disabled" } print(json.dumps(running_config)) def callback(pubsub_message): log = {} try: log_message = json.loads(pubsub_message.data) except (json.JSONDecodeError, AttributeError): # We can't parse the log message, nothing to do here pubsub_message.ack() return try: asset_info = StackdriverParser.get_asset(log_message) if asset_info is None: # We did not recognize any assets in this message pubsub_message.ack() return if asset_info.get('operation_type') != 'write': # No changes, no need to check anything pubsub_message.ack() return except Exception: # If we fail to get asset info from the message, the message must be # bad pubsub_message.ack() return try: log['asset_info'] = asset_info resource = Resource.factory('gcp', asset_info) v = mm.violations(resource) log['violation_count'] = len(v) log['remediation_count'] = 0 if enforce_policy: for (engine, violation) in v: engine.remediate(resource, violation) log['remediation_count'] += 1 except Exception as e: # Catch any other exceptions so we can acknowledge the message. # Otherwise they start to fill up the buffer of unacknowledged messages log['exception'] = str(e) pubsub_message.ack() # Now allow the thread to raise the exception raise e finally: print(json.dumps(log, separators=(',', ':'))) pubsub_message.ack() if __name__ == "__main__": # We're using the application default credentials, but defining them # explicitly so its easy to plug-in credentials using your own preferred # method app_creds, _ = google.auth.default() subscriber = pubsub.SubscriberClient(credentials=app_creds) subscription_path = 'projects/{project_id}/subscriptions/{sub}'.format( project_id=project_id, sub=subscription_name ) future = subscriber.subscribe( subscription_path, callback=callback ) print("Listening for pubsub messages on {}...".format(subscription_path)) try: future.result() except Exception: future.cancel() raise
Python
0
@@ -415,16 +415,205 @@ 'true'%0A%0A +# We're using the application default credentials, but defining them%0A# explicitly so its easy to plug-in credentials using your own preferred%0A# method%0Aapp_creds, _ = google.auth.default()%0A%0A # Instan @@ -1871,16 +1871,39 @@ set_info +, credentials=app_creds )%0A%0A @@ -2624,213 +2624,8 @@ %22:%0A%0A - # We're using the application default credentials, but defining them%0A # explicitly so its easy to plug-in credentials using your own preferred%0A # method%0A app_creds, _ = google.auth.default()%0A%0A
3228b640d74dd1b06e9d96fb8265cc8c952074f6
solve Flatten layer issue
run.py
run.py
from keras.models import Model from keras.layers import (Activation, Dropout, AveragePooling2D, Input, Flatten, MaxPooling2D, Convolution2D) from firemodule import FireModule from keras.datasets import cifar10, mnist from keras.optimizers import SGD from keras.utils import np_utils import numpy as np datasets = { "mnist": mnist, "cifar": cifar10 } (x_train, y_train), (x_test, y_test) = datasets["cifar"].load_data() x_train = x_train.astype('float32') / 255. x_test = x_test.astype('float32') / 255. y_train = np_utils.to_categorical(y_train) y_test = np_utils.to_categorical(y_test) inputs = Input(x_train.shape[1:]) layer = Convolution2D(96, 7, 7)(inputs) layer = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(layer) layer = FireModule(16, 64)(layer) layer = FireModule(16, 64)(layer) layer = FireModule(32, 128)(layer) layer = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(layer) layer = FireModule(32, 128)(layer) layer = FireModule(48, 192)(layer) layer = FireModule(48, 192)(layer) layer = FireModule(64, 256)(layer) layer = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(layer) layer = FireModule(64, 256)(layer) layer = Dropout(0.5)(layer) layer = Convolution2D(10, 1, 1)(layer) layer = AveragePooling2D((10, 10))(layer) layer = Flatten()(layer) layer = Activation("softmax")(layer) model = Model(input = inputs, output = layer) model.compile(x_train, y_train, optimizer = SGD(0.01, momentum = 0.85)) model.predict(x_test, y_test) model.save("squeezenet.dump")
Python
0.000002
@@ -23,17 +23,29 @@ rt Model +, Sequential %0A - from ker @@ -629,18 +629,16 @@ test)%0A%0A%0A -%0A%0A inputs = @@ -1270,14 +1270,12 @@ 2D(( -10, 10 +2, 2 ))(l @@ -1339,25 +1339,24 @@ ax%22)(layer)%0A -%0A model = Mode @@ -1408,64 +1408,130 @@ ile( -x_train, y_train, optimizer = SGD(0.01, momentum = 0.85) +loss = %22categorical_crossentropy%22,%0A optimizer = %22rmsprop%22, metrics = %5B%22accuracy%22%5D)%0Amodel.fit(x_train, y_train )%0Amo
454740f2657efa88efa16abdba93dc427bcf4d70
Add try catch to capture all the exceptions that might generate anywhere todo: need to capture exceptions in specific places and raise them to log from the main catch
run.py
run.py
from PdfProcessor import * import argparse from datetime import datetime import ConfigParser import ProcessLogger parser = argparse.ArgumentParser(description='Processes the pdf and extracts the text') parser.add_argument('-i','--infile', help='File path of the input pdf file.', required=True) parser.add_argument('-o','--outdir', help='File name of the output csv file.', required=True) results = parser.parse_args() logger = ProcessLogger.getLogger('run') logger.info("Processing started at %s ", str(datetime.now())) logger.info("input: %s", results.infile) logger.info("outdir: %s", results.outdir) configParser = ConfigParser.RawConfigParser() configParser.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'settings.config')) pdfProcessor = PDFProcessor(results.infile, results.outdir) pdfProcessor.setConfigParser(configParser) if pdfProcessor.isStructured(): pdfProcessor.extractTextFromStructuredDoc() else: pdfProcessor.extractTextFromScannedDoc() pdfProcessor.writeStats() logger.info("Processing ended at %s ", str(datetime.now()));
Python
0.000002
@@ -106,16 +106,33 @@ ssLogger +%0Aimport traceback %0A%0Aparser @@ -427,24 +427,33 @@ rse_args()%0A%0A +try:%0A logger = Pro @@ -480,16 +480,20 @@ ('run')%0A + logger.i @@ -546,16 +546,20 @@ now()))%0A + logger.i @@ -591,16 +591,20 @@ infile)%0A + logger.i @@ -641,16 +641,20 @@ ir) %0A%0A%0A + configPa @@ -691,16 +691,20 @@ arser()%0A + configPa @@ -791,16 +791,20 @@ fig'))%0A%0A + pdfProce @@ -855,16 +855,20 @@ outdir)%0A + pdfProce @@ -902,16 +902,20 @@ Parser)%0A + if pdfPr @@ -938,24 +938,28 @@ ured():%0A + + pdfProcessor @@ -994,14 +994,22 @@ c()%0A + else:%0A + @@ -1049,16 +1049,20 @@ edDoc()%0A + pdfProce @@ -1079,16 +1079,299 @@ Stats()%0A +except OSError as e:%0A logger.error(%22OSError: %25s %5B%25s%5D in %25s%22, e.strerror, e.errno, e.filename);%0A logger.debug(traceback.format_exception(*sys.exc_info()))%0Aexcept Exception as e:%0A logger.error(%22Exception: %25s %22, e);%0A logger.debug(traceback.format_exception(*sys.exc_info())) %0Alogger.
ec09e3b35d431232feb0df1577b3fe6578b68704
Remove old SSL code from run.py
run.py
run.py
import logging import os import sys import ssl import json import uvloop import asyncio from aiohttp import web from setproctitle import setproctitle from virtool.app import create_app from virtool.app_init import get_args, configure sys.dont_write_bytecode = True logger = logging.getLogger("aiohttp.server") setproctitle("virtool") args = get_args() configure(verbose=args.verbose) asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) loop = asyncio.get_event_loop() settings_path = os.path.join(sys.path[0], "settings.json") skip_setup = os.path.isfile(settings_path) try: with open(settings_path, "r") as handle: settings_temp = json.load(handle) except FileNotFoundError: settings_temp = dict() if __name__ == "__main__": ssl_context = None if settings_temp.get("use_ssl", False): cert_path = settings_temp.get("cert_path", None) key_path = settings_temp.get("key_path", None) if cert_path and key_path: ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) ssl_context.load_cert_chain(cert_path, key_path) app = create_app( loop, skip_setup=skip_setup, force_version=args.force_version, no_sentry=args.no_sentry ) host = args.host or settings_temp.get("server_host", "localhost") if args.port: port = int(args.port) else: port = settings_temp.get("server_port", 9950) web.run_app(app, host=host, port=port, ssl_context=ssl_context)
Python
0.000002
@@ -33,19 +33,8 @@ sys%0A -import ssl%0A impo @@ -41,16 +41,16 @@ rt json%0A + import u @@ -744,364 +744,8 @@ _%22:%0A - ssl_context = None%0A%0A if settings_temp.get(%22use_ssl%22, False):%0A cert_path = settings_temp.get(%22cert_path%22, None)%0A key_path = settings_temp.get(%22key_path%22, None)%0A%0A if cert_path and key_path:%0A ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)%0A ssl_context.load_cert_chain(cert_path, key_path)%0A%0A @@ -1073,16 +1073,16 @@ 9950)%0A%0A + web. @@ -1118,31 +1118,6 @@ port -, ssl_context=ssl_context )%0A
c15de13fa8dae840349463f6853f3edd3784ba6d
Update connect_db.py
connect_db.py
connect_db.py
from couchdb import Server # server = Server() # connects to the local_server # >>> remote_server = Server('http://example.com:5984/') # >>> secure_remote_server = Server('https://username:password@example.com:5984/') class db_server(object): def __init__(self,username,login): self.secure_server=Server('http://%s:%s@130.56.252.58:5984' %(username,login)) self.db=self.secure_server["tweet"] def insert(self,data): try: doc_id,doc_rev=self.db.save(data) except Exception as e: with open('log','a') as f: f.write(str(e)+'\n') f.write((data['_id']+'\n'))
Python
0
@@ -1,8 +1,25 @@ +#!/usr/bin/python %0Afrom co
68510d03ba9d2a2dc5b2a471a92b5976306ffc39
Fix variable name
VMEncryption/main/oscrypto/OSEncryptionState.py
VMEncryption/main/oscrypto/OSEncryptionState.py
#!/usr/bin/env python # # VM Backup extension # # Copyright 2015 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.7+ # import os.path import re from collections import namedtuple from Common import * from CommandExecutor import * from BekUtil import * from DiskUtil import * from EncryptionConfig import * class OSEncryptionState(object): def __init__(self, state_name, context): super(OSEncryptionState, self).__init__() self.state_name = state_name self.context = context self.state_executed = False self.state_marker = os.path.join(self.context.encryption_environment.os_encryption_markers_path, self.state_name) self.command_executor = CommandExecutor(self.context.logger) self.disk_util = DiskUtil(hutil=self.context.hutil, patching=self.context.distro_patcher, logger=self.context.logger, encryption_environment=self.context.encryption_environment) self.bek_util = BekUtil(disk_util=self.disk_util, logger=self.context.logger) self.encryption_config = EncryptionConfig(encryption_environment=self.context.encryption_environment, logger=self.context.logger) rootfs_mountpoint = '/' if self._is_in_memfs_root(): rootfs_mountpoint = '/oldroot' rootfs_sdx_path = self._get_fs_partition(rootfs_mountpoint)[0] if rootfs_sdx_path == "none": self.context.logger.log("rootfs_sdx_path is none, parsing UUID from fstab") rootfs_uuid = self._parse_rootfs_uuid_from_fstab() self.context.logger.log("rootfs_uuid: {0}".format(rootfs_uuid)) rootfs_sdx_path = self.disk_util.query_dev_sdx_path_by_uuid(rootfs_uuid) self.context.logger.log("rootfs_sdx_path: {0}".format(rootfs_sdx_path)) self.rootfs_block_device = self.disk_util.query_dev_id_path_by_sdx_path(rootfs_sdx_path) if not self.rootfs_block_device.startswith('/dev'): distro_name = self.context.distro_patcher.distro_info[0] self.rootfs_block_device = '/dev/sda1' if distro_name == 'Ubuntu' else '/dev/sda2' self.context.logger.log("rootfs_block_device: {0}".format(self.rootfs_block_device)) def should_enter(self): self.context.logger.log("OSEncryptionState.should_enter() called for {0}".format(self.state_name)) if self.state_executed: self.context.logger.log("State {0} has already executed, not entering".format(self.state_name)) return False if not os.path.exists(self.state_marker): self.context.logger.log("State marker {0} does not exist, state {1} can be entered".format(self.state_marker, self.state_name)) return True else: self.context.logger.log("State marker {0} exists, state {1} has already executed".format(self.state_marker, self.state_name)) return False def should_exit(self): self.context.logger.log("OSEncryptionState.should_exit() called for {0}".format(self.state_name)) if not os.path.exists(self.state_marker): self.disk_util.make_sure_path_exists(self.context.encryption_environment.os_encryption_markers_path) self.context.logger.log("Creating state marker {0}".format(self.state_marker)) self.disk_util.touch_file(self.state_marker) self.state_executed = True self.context.logger.log("state_executed for {0}: {1}".format(self.state_name, self.state_executed)) return self.state_executed def _get_fs_partition(self, fs): result = None dev = os.lstat(fs).st_dev for line in file('/proc/mounts'): line = [s.decode('string_escape') for s in line.split()[:3]] if dev == os.lstat(line[1]).st_dev: result = tuple(line) return result def _is_in_memfs_root(self): mounts = file('/proc/mounts', 'r').read() return bool(re.search(r'/\s+tmpfs', mounts)) def _parse_rootfs_uuid_from_fstab(self): contents = file('/etc/fstab', 'r').read() matches = re.findall(r'UUID=(.*?)\s+/\s+', s) if matches: return matches[0] OSEncryptionStateContext = namedtuple('OSEncryptionStateContext', ['hutil', 'distro_patcher', 'logger', 'encryption_environment'])
Python
0.99981
@@ -5036,16 +5036,23 @@ +/%5Cs+', +content s)%0A
dc53ec2c73b5ec18fe77ed7fdb12a1e50664952b
clean imports
courriers/views.py
courriers/views.py
# -*- coding: utf-8 -*- from django.views.generic import ListView, DetailView, FormView, TemplateView from django.core.urlresolvers import reverse from django.db.models import Q from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404 from django.utils.functional import cached_property from django.views.generic.base import TemplateResponseMixin from django.utils import translation from .settings import PAGINATE_BY from .models import Newsletter, NewsletterList from .forms import SubscriptionForm, UnsubscribeForm from .utils import ajaxify_template_var class AJAXResponseMixin(TemplateResponseMixin): ajax_template_name = None def get_template_names(self): names = super(AJAXResponseMixin, self).get_template_names() if self.request.is_ajax(): if self.ajax_template_name: names = [self.ajax_template_name] + names else: names = ajaxify_template_var(names) + names return names class NewsletterListView(AJAXResponseMixin, ListView): model = Newsletter context_object_name = "newsletters" template_name = "courriers/newsletter_list.html" paginate_by = PAGINATE_BY def dispatch(self, *args, **kwargs): return super(NewsletterListView, self).dispatch(*args, **kwargs) @cached_property def newsletter_list(self): return get_object_or_404( NewsletterList.objects.all(), slug=self.kwargs.get("slug") ) def get_queryset(self): lang = translation.get_language() qs = self.newsletter_list.newsletters.status_online() if lang: qs = qs.filter(newsletter_segment__lang=lang) qs = qs.order_by("-published_at") return qs def get_context_data(self, **kwargs): context = super(NewsletterListView, self).get_context_data(**kwargs) context["newsletter_list"] = self.newsletter_list return context class NewsletterDetailView(AJAXResponseMixin, DetailView): model = Newsletter context_object_name = "newsletter" template_name = "courriers/newsletter_detail.html" def get_queryset(self): return self.model.objects.status_online() def get_context_data(self, **kwargs): context = super(NewsletterDetailView, self).get_context_data(**kwargs) context["newsletter_list"] = self.object.newsletter_list return context class BaseNewsletterListFormView(AJAXResponseMixin, FormView): model = NewsletterList context_object_name = "newsletter_list" @cached_property def object(self): slug = self.kwargs.get("slug", None) if slug: return get_object_or_404(self.model, slug=slug) return None def post(self, request, *args, **kwargs): self.get_context_data(**kwargs) form_class = self.get_form_class() form = self.get_form(form_class) if form.is_valid(): return self.form_valid(form) return self.form_invalid(form) def get_form_kwargs(self): kwargs = super(BaseNewsletterListFormView, self).get_form_kwargs() if self.object: kwargs["newsletter_list"] = self.object return kwargs def get_context_data(self, **kwargs): context = super(BaseNewsletterListFormView, self).get_context_data(**kwargs) if self.object: context[self.context_object_name] = self.object return context def form_valid(self, form): if self.request.user.is_authenticated(): form.save(self.request.user) else: form.save() return HttpResponseRedirect(self.get_success_url()) class NewsletterListSubscribeView(BaseNewsletterListFormView): template_name = "courriers/newsletter_list_subscribe_form.html" form_class = SubscriptionForm def get_success_url(self): return reverse("newsletter_list_subscribe_done") class NewsletterRawDetailView(AJAXResponseMixin, DetailView): model = Newsletter template_name = "courriers/newsletter_raw_detail.html" def get_context_data(self, **kwargs): context = super(NewsletterRawDetailView, self).get_context_data(**kwargs) context["items"] = self.object.items.all() for item in context["items"]: item.newsletter = self.object return context class NewsletterListUnsubscribeView(BaseNewsletterListFormView): template_name = "courriers/newsletter_list_unsubscribe.html" def get_form_class(self): return UnsubscribeForm def get_initial(self): initial = super(NewsletterListUnsubscribeView, self).get_initial() email = self.request.GET.get("email", None) if email: initial["email"] = email return initial.copy() def get_success_url(self): if self.object: return reverse( "newsletter_list_unsubscribe_done", kwargs={"slug": self.object.slug} ) return reverse("newsletter_list_unsubscribe_done") class NewsletterListUnsubscribeDoneView(AJAXResponseMixin, TemplateView): template_name = "courriers/newsletter_list_unsubscribe_done.html" model = NewsletterList context_object_name = "newsletter_list" def get_context_data(self, **kwargs): context = super(NewsletterListUnsubscribeDoneView, self).get_context_data( **kwargs ) slug = self.kwargs.get("slug", None) if slug: context[self.context_object_name] = get_object_or_404(self.model, slug=slug) return context class NewsletterListSubscribeDoneView(AJAXResponseMixin, TemplateView): template_name = "courriers/newsletter_list_subscribe_done.html" model = NewsletterList context_object_name = "newsletter_list"
Python
0.000001
@@ -144,39 +144,8 @@ rse%0A -from django.db.models import Q%0A from
c1f11cbd217391dd51566e0b2a8377c5c13772f6
Remove unused imports
courriers/views.py
courriers/views.py
# -*- coding: utf-8 -*- from django.views.generic import View, ListView, DetailView, FormView, TemplateView from django.views.generic.edit import FormMixin from django.views.generic.detail import SingleObjectMixin from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.shortcuts import get_object_or_404, render_to_response from django.utils.functional import cached_property from .settings import PAGINATE_BY from .models import Newsletter, NewsletterList from .forms import SubscriptionForm, UnsubscribeForm, UnsubscribeAllForm class NewsletterListView(ListView): model = Newsletter context_object_name = 'newsletters' template_name = 'courriers/newsletter_list.html' paginate_by = PAGINATE_BY @cached_property def newsletter_list(self): return get_object_or_404(NewsletterList, slug=self.kwargs.get('slug')) def get_queryset(self): qs = self.newsletter_list.newsletters.status_online().order_by('published_at') lang = self.kwargs.get('lang', None) if lang: qs = qs.filter(languages__contains=lang) return qs def get_context_data(self, **kwargs): context = super(NewsletterListView, self).get_context_data(**kwargs) context['newsletter_list'] = self.newsletter_list return context class NewsletterDetailView(DetailView): model = Newsletter context_object_name = 'newsletter' template_name = 'courriers/newsletter_detail.html' def get_context_data(self, **kwargs): context = super(NewsletterDetailView, self).get_context_data(**kwargs) context['form'] = SubscriptionForm(user=self.request.user, newsletter_list=self.model.newsletter_list) return context class NewsletterFormView(SingleObjectMixin, FormView): template_name = 'courriers/newsletter_list_subscribe_form.html' form_class = SubscriptionForm model = Newsletter context_object_name = 'newsletter' def post(self, request, *args, **kwargs): self.object = self.get_object() return super(NewsletterFormView, self).post(request, *args, **kwargs) def get_context_data(self, **kwargs): context = super(NewsletterFormView, self).get_context_data(**kwargs) context.update(SingleObjectMixin.get_context_data(self, **kwargs)) return context def get_form_kwargs(self): return dict(super(NewsletterFormView, self).get_form_kwargs(), **{ 'newsletter_list': self.object.newsletter_list }) def form_valid(self, form): if self.request.user.is_authenticated(): form.save(self.request.user) else: form.save() return HttpResponseRedirect(self.get_success_url()) def get_success_url(self): return reverse('newsletter_list_subscribe_done') class NewsletterRawDetailView(DetailView): model = Newsletter template_name = 'courriers/newsletter_raw_detail.html' def get_context_data(self, **kwargs): context = super(NewsletterRawDetailView, self).get_context_data(**kwargs) context['items'] = self.object.items.all() return context class NewsletterListUnsubscribeView(FormMixin, TemplateView): template_name = 'courriers/newsletter_list_unsubscribe.html' model = NewsletterList context_object_name = 'newsletter_list' def get_form_class(self): if not self.kwargs.get('slug', None): return UnsubscribeAllForm return UnsubscribeForm def get_initial(self): initial = super(NewsletterListUnsubscribeView, self).get_initial() email = self.request.GET.get('email', None) if email: initial['email'] = email return initial.copy() def get_form_kwargs(self): kwargs = super(NewsletterListUnsubscribeView, self).get_form_kwargs() if self.object: return dict(kwargs, **{ 'newsletter_list': self.object }) return kwargs def get_context_data(self, **kwargs): context = super(NewsletterListUnsubscribeView, self).get_context_data(**kwargs) form_class = self.get_form_class() context['form'] = self.get_form(form_class) if self.object: context[self.context_object_name] = self.object return context @cached_property def object(self): slug = self.kwargs.get('slug', None) if slug: return get_object_or_404(self.model, slug=slug) return None def post(self, request, *args, **kwargs): self.get_context_data(**kwargs) form_class = self.get_form_class() form = self.get_form(form_class) if form.is_valid(): return self.form_valid(form) return self.form_invalid(form) def form_valid(self, form): form.save() return super(NewsletterListUnsubscribeView, self).form_valid(form) def get_success_url(self): if self.object: return reverse('newsletter_list_unsubscribe_done', kwargs={'slug': self.object.slug}) return reverse('newsletter_list_unsubscribe_done') class NewsletterListUnsubscribeDoneView(TemplateView): template_name = "courriers/newsletter_list_unsubscribe_done.html" model = NewsletterList context_object_name = 'newsletter_list' def get_context_data(self, **kwargs): context = super(NewsletterListUnsubscribeDoneView, self).get_context_data(**kwargs) slug = self.kwargs.get('slug', None) if slug: context[self.context_object_name] = get_object_or_404(self.model, slug=slug) return context class NewsletterListSubscribeDoneView(TemplateView): template_name = "courriers/newsletter_list_subscribe_done.html" model = NewsletterList context_object_name = 'newsletter_list'
Python
0.000001
@@ -53,14 +53,8 @@ port - View, Lis @@ -341,28 +341,8 @@ _404 -, render_to_response %0Afro
73f335371db10008a2d221b777350f0b584abde6
use new TimingGenerator
nexys_video.py
nexys_video.py
#!/usr/bin/env python3 from nexys_base import * from litevideo.output.hdmi.s7 import S7HDMIOutClocking from litevideo.output.hdmi.s7 import S7HDMIOutPHY from litevideo.output.core import TimingGenerator class VideoOutSoC(BaseSoC): def __init__(self, platform, *args, **kwargs): BaseSoC.__init__(self, platform, *args, **kwargs) pads = platform.request("hdmi_out") self.comb += pads.scl.eq(1) # # # self.submodules.vtg = ClockDomainsRenamer("pix")(TimingGenerator(1)) self.comb += [ self.vtg.timing.valid.eq(1), self.vtg.timing.hres.eq(1280), self.vtg.timing.hsync_start.eq(1390), self.vtg.timing.hsync_end.eq(1430), self.vtg.timing.hscan.eq(1650), self.vtg.timing.vres.eq(720), self.vtg.timing.vsync_start.eq(725), self.vtg.timing.vsync_end.eq(730), self.vtg.timing.vscan.eq(750), self.vtg.pixels.valid.eq(1) ] self.submodules.hdmi_clocking = S7HDMIOutClocking(self.crg.clk100, pads) self.submodules.hdmi_phy = S7HDMIOutPHY(pads) self.comb += [ self.hdmi_phy.hsync.eq(self.vtg.phy.hsync), self.hdmi_phy.vsync.eq(self.vtg.phy.vsync), self.hdmi_phy.de.eq(self.vtg.phy.de), self.hdmi_phy.r.eq(0x00), self.hdmi_phy.g.eq(0x00), self.hdmi_phy.b.eq(0xff), self.vtg.phy.ready.eq(1) ] def main(): parser = argparse.ArgumentParser(description="Nexys LiteX SoC") builder_args(parser) soc_sdram_args(parser) parser.add_argument("--nocompile-gateware", action="store_true") args = parser.parse_args() platform = nexys.Platform() soc = VideoOutSoC(platform, **soc_sdram_argdict(args)) builder = Builder(soc, output_dir="build", compile_gateware=not args.nocompile_gateware, csr_csv="test/csr.csv") vns = builder.build() if __name__ == "__main__": main()
Python
0
@@ -98,17 +98,16 @@ locking%0A -%0A from lit @@ -148,17 +148,16 @@ IOutPHY%0A -%0A from lit @@ -507,17 +507,16 @@ nerator( -1 ))%0A @@ -554,22 +554,20 @@ elf.vtg. -timing +sink .valid.e @@ -594,22 +594,20 @@ elf.vtg. -timing +sink .hres.eq @@ -631,30 +631,28 @@ self.vtg. -timing +sink .hsync_start @@ -683,22 +683,20 @@ elf.vtg. -timing +sink .hsync_e @@ -729,22 +729,20 @@ elf.vtg. -timing +sink .hscan.e @@ -772,22 +772,20 @@ elf.vtg. -timing +sink .vres.eq @@ -808,30 +808,28 @@ self.vtg. -timing +sink .vsync_start @@ -859,22 +859,20 @@ elf.vtg. -timing +sink .vsync_e @@ -908,14 +908,12 @@ vtg. -timing +sink .vsc @@ -926,50 +926,8 @@ 750) -,%0A%0A self.vtg.pixels.valid.eq(1) %0A @@ -1136,19 +1136,22 @@ elf.vtg. -phy +source .hsync), @@ -1195,19 +1195,22 @@ elf.vtg. -phy +source .vsync), @@ -1251,19 +1251,22 @@ elf.vtg. -phy +source .de),%0A @@ -1398,19 +1398,22 @@ elf.vtg. -phy +source .ready.e
3e7d2c771d6335411eb602240914b5cd3e15c227
Maintain only one list instead of two
crawler/crawler.py
crawler/crawler.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import re import requests from lxml import html import redis redis_server = redis.StrictRedis(host='localhost', port=6379) def extract_num(raw): """Extract num from the unicode string.""" matched = re.search(r'\d+', raw).group() return matched def update_toplist(): """Upadate songlists in the toplist.""" for songlist in redis_server.lrange('toplist', 0, -1): crawl_detailed_page(redis_server.hget(songlist, 'url')) def crawl_detailed_page(url): """Get info from the songlist page.""" response = requests.get(url) tree = html.fromstring(response.text) filter_num = int(redis_server.get('filter_num')) or 0 played = extract_num(tree.cssselect('strong')[0].text) if played < filter_num: return key = 'wangyi:' + re.search(r'(?<=id=)\d+$', url).group() title = tree.cssselect('h2')[0].text comments = extract_num(tree.cssselect('.u-btni-cmmt i')[0].text) shares = extract_num(tree.cssselect('.u-btni-share i')[0].text) favourites = extract_num(tree.cssselect('.u-btni-fav i')[0].text) if tree.cssselect('.tags'): tags = ' '.join([item.text for item in tree.cssselect('.u-tag i')]) else: tags = '' result = {"title": title, "url": url, "played": played, "comments": comments, "shares": shares, "favourites": favourites, "tags": tags} redis_server.rpush('songlists', key) redis_server.hmset(key, result) def crawl_the_page(url): """Crawl all the songlists in one page.""" base_url = 'http://music.163.com' tree = html.fromstring(requests.get(url).text) for item in tree.cssselect('.u-cover > a'): crawl_detailed_page(base_url + item.get('href')) next_page = tree.cssselect('.znxt')[0].get('href') if next_page != 'javascript:void(0)': crawl_the_page(base_url + next_page) redis_server.sort('songlists', start=0, num=400, by='*->played', desc=True, store='toplist') filter_num = redis_server.hget(redis_server.lindex('toplist', -1), 'played') redis_server.set('filter_num', filter_num) for songlist in set.difference( set(redis_server.lrange('songlists', 0, -1)), set(redis_server.lrange('toplist', 0, -1))): redis_server.delete(songlist) redis_server.lrem('songlists', 0, songlist)
Python
0.00033
@@ -40,16 +40,74 @@ -8 -*-%0A%0A +from __future__ import absolute_import, unicode_literals%0A%0A import r @@ -1553,24 +1553,22 @@ ver. -r +l push(' -song +top list -s ', k @@ -2042,25 +2042,23 @@ r.sort(' -song +top list -s ', start @@ -2069,9 +2069,9 @@ num= -4 +3 00, @@ -2323,90 +2323,8 @@ in -set.difference(%0A set(redis_server.lrange('songlists', 0, -1)),%0A set( redi @@ -2354,16 +2354,16 @@ t', +30 0, -1) -)) :%0A @@ -2360,36 +2360,32 @@ 0, -1):%0A - - redis_server.del @@ -2398,24 +2398,16 @@ nglist)%0A - redi @@ -2420,34 +2420,28 @@ er.l -rem('song +trim('top list -s ', 0, -songlist +299 )%0A
3c31ce72856a651d5365c6dc3949f9ffe482cba9
removing a __name__ == '__main__' bit
crawler/threads.py
crawler/threads.py
# -*- coding: utf-8 -*- from threading import Thread, Event from Queue import Empty, Queue import lxml.html as parser from .functions import fetch_element_att, fetch_element_text, \ fetch_links, fetch_url, parse_content, parse_headers from .events import NewUrlEvent, NewURLDataEvent, NewNoteEvent, send_event from .queues import URLQueue class Dispatcher(Thread): """ Sends and receives signals from all the other threads in the application. """ name = 'dispatcher' daemon = True def __init__(self, timeout=600.0, fetchers=2, base=None, gui=None): Thread.__init__(self) self.killer = Event() self.base_url = base self.timeout = timeout self.fetchers = fetchers self.gui = gui # queues self.url_queue = URLQueue() self.content_queue = Queue() self.signal_queue = Queue() # other threads for i in xrange(0, self.fetchers): setattr(self, 'fetcher{}'.format(i), Fetcher(self.url_queue, self.signal_queue, self.killer)) self.parser = Parser(self.content_queue, self.signal_queue, self.base_url, self.killer) def run(self): for i in xrange(0, self.fetchers): getattr(self, 'fetcher{}'.format(i)).start() self.parser.start() while not self.killer.is_set(): try: action, val = self.signal_queue.get(True, self.timeout) except Empty: self.killer.set() continue else: self.handle_signal(action, val) self.signal_queue.task_done() while 1: try: action, val = self.signal_queue.get(True, 10) except Empty: break else: self.handle_signal(action, val) self.signal_queue.task_done() def handle_signal(self, action, val): if 'add_urls' == action: for url in val: new = self.url_queue.add_url(url) if new: send_event(self.gui, NewUrlEvent(url)) elif 'add_content' == action: self.content_queue.put(val) elif 'send_note' == action: url, e = val send_event(self.gui, NewNoteEvent(url, e)) elif 'url_meta' == action: url, dict_ = val send_event(self.gui, NewURLDataEvent(url, dict_)) elif 'stop' == action: self.killer.set() else: pass # nothin' class Fetcher(Thread): """ Fetches URLs and sends an event to the application with their status codes. Also places the responses body into a Queue for further processing by ParserThread """ name = 'fetcher' daemon = True def __init__(self, url_queue, signal_queue, killer): Thread.__init__(self) self.urls = url_queue self.signal = signal_queue self.killer = killer def run(self): while not self.killer.is_set(): try: url = self.urls.get_nowait() except Empty: continue else: self.handle_url(url) self.urls.task_done() while 1: try: url = self.urls.get(True, 10) except Empty: break else: self.handle_url(url) self.urls.task_done() def handle_url(self, url): content, headers, status, notes = fetch_url(url) if status is None: self.signal.put(('send_note', (url, notes))) return if content is not None: self.signal.put(('add_content', (url, content))) if headers is not None: meta = parse_headers(headers) meta['status'] = status self.signal.put(('url_meta', (url, meta))) if notes is not None: self.signal.put(('send_note', (url, notes))) class Parser(Thread): """ Parses the HTML sent from FetcherThread. Finds the title, meta description meta keywords, and rel canonical """ name = 'parser' daemon = True def __init__(self, content_queue, signal_queue, base_url, killer): Thread.__init__(self) self.content = content_queue self.signal = signal_queue self.url = base_url self.killer = killer def run(self): while not self.killer.is_set(): try: url, to_parse = self.content.get_nowait() except Empty: continue else: self.parse_content(url, to_parse) self.content.task_done() while 1: try: url, to_parse = self.content.get(True, 10) except Empty: break else: self.parse_content(url, to_parse) self.content.task_done() def parse_content(self, url, to_parse): parsed = parse_content(to_parse) if parsed is None: self.signal.put(('send_note', (url, 'HTML parsing error'))) return if not self.killer.is_set(): links = fetch_links(parsed) if links is not None: out_links = set() for l in links: if l.startswith(self.url): out_links.add(l) self.signal.put(('add_urls', out_links)) out = { 'title': '--', 'desc': '--', 'kw': '--', 'canonical': '--', 'h1': '--', 'h2': '--' } title = fetch_element_text(parsed, 'title') if title is not None: out['title'] = title desc = fetch_element_att(parsed, 'meta[name=description]', 'content') if desc is not None: out['desc'] = desc kw = fetch_element_att(parsed, 'meta[name=keywords]', 'content') if kw is not None: out['kw'] = kw canonical = fetch_element_att(parsed, 'link[rel=canonical]', 'href') if canonical is not None: out['canonical'] = canonical h1 = fetch_element_text(parsed, 'h1') if h1 is not None: out['h1'] = h1 h2 = fetch_element_text(parsed, 'h2') if h2 is not None: out['h2'] = h2 self.signal.put(('url_meta', (url, out))) if __name__ == '__main__': urls = ['http://www.classicalguitar.org/about', 'http://www.classicalguitar.org'] d = Dispatcher(base='http://www.classicalguitar.org/') d.signal_queue.put(('add_urls', urls)) d.start()
Python
0.999027
@@ -6670,238 +6670,8 @@ t))) -%0A%0Aif __name__ == '__main__':%0A urls = %5B'http://www.classicalguitar.org/about', 'http://www.classicalguitar.org'%5D%0A d = Dispatcher(base='http://www.classicalguitar.org/')%0A d.signal_queue.put(('add_urls', urls))%0A d.start() %0A
a2f7b49c992eec2aa2d37b65c9e601850d3ba208
fix twitter/mastodon interface (really)
retweetbot.py
retweetbot.py
#!/usr/bin/env python import twitter import requests import pytoml as toml import trigger from time import sleep import traceback class RetweetBot(object): """ This bot retweets all tweets which 1) mention him, 2) contain at least one of the triggerwords provided. api: The api object, generated with your oAuth keys, responsible for communication with twitter rest API triggers: a list of words, one of them has to be in a tweet for it to be retweeted last_mention: the ID of the last tweet which mentioned you """ def __init__(self, trigger, config, historypath="last_mention"): """ Initializes the bot and loads all the necessary data. :param historypath: Path to the file with ID of the last retweeted Tweet """ self.config = config keys = self.get_api_keys() self.api = twitter.Api(consumer_key=keys[0], consumer_secret=keys[1], access_token_key=keys[2], access_token_secret=keys[3]) self.historypath = historypath try: self.user_id = self.config['tapp']['shutdown_contact_userid'] self.screen_name = \ self.config['tapp']['shutdown_contact_screen_name'] except KeyError: self.no_shutdown_contact = True self.last_mention = self.get_history(self.historypath) self.trigger = trigger def get_api_keys(self): """ How to get these keys is described in doc/twitter_api.md After you received keys, store them in your ticketfrei.cfg like this: [tapp] consumer_key = "..." consumer_secret = "..." [tuser] access_token_key = "..." access_token_secret = "..." :return: keys: list of these 4 strings. """ keys = [] keys.append(self.config['tapp']['consumer_key']) keys.append(self.config['tapp']['consumer_secret']) keys.append(self.config['tuser']['access_token_key']) keys.append(self.config['tuser']['access_token_secret']) return keys def get_history(self, path): """ This counter is needed to keep track of your mentions, so you don't double RT them :param path: string: contains path to the file where the ID of the last_mention is stored. :return: last_mention: ID of the last tweet which mentioned the bot """ try: with open(path, "r+") as f: last_mention = f.read() except IOError: with open(path, "w+"): last_mention = "" return last_mention def format_mastodon(self, status): """ Bridge your Retweets to mastodon. :todo vmann: add all the mastodon API magic. :param status: Object of a tweet. :return: toot: text tooted on mastodon, e.g. "_b3yond: There are uniformed controllers in the U2 at Opernhaus." """ toot = status.user.name + ": " + status.text return toot def crawl_mentions(self): """ crawls all Tweets which mention the bot from the twitter rest API. :return: list of Status objects """ while 1: try: mentions = self.api.GetMentions(since_id=self.last_mention) return mentions except twitter.TwitterError: print("[ERROR] Rate Limit exceeded, trying again in a minute") sleep(60) except requests.exceptions.ConnectionError: print("[ERROR] Bad Connection.") sleep(10) def retweet(self, status): """ Retweets a given tweet. :param status: A tweet object. :return: toot: string of the tweet, to toot on mastodon. """ while 1: try: self.api.PostRetweet(status.id) return self.format_mastodon(status) # maybe one day we get rid of this error. If not, try to uncomment # these lines. except twitter.error.TwitterError: print("[ERROR] probably you already retweeted this tweet.") return () except requests.exceptions.ConnectionError: print("[ERROR] Bad Connection.") sleep(10) def tweet(self, post): """ Tweet a post. :param post: String with the text to tweet. """ while 1: try: self.api.PostUpdate(status=post) return except requests.exceptions.ConnectionError: print("[ERROR] Bad Connection.") sleep(10) def flow(self, to_tweet=()): """ The flow of crawling mentions and retweeting them. :param to_tweet: list of strings to tweet :return list of retweeted tweets, to toot on mastodon """ # Tweet the toots the Retootbot gives to us for post in to_tweet: self.tweet(post) # Store all mentions in a list of Status Objects mentions = self.crawl_mentions() mastodon = [] for status in mentions: # Is the Text of the Tweet in the triggerlist? if self.trigger.is_ok(status.text): # Retweet status mastodon.append(self.retweet(status)) # save the id so it doesn't get crawled again self.last_mention = status.id # Return Retweets for tooting on mastodon return mastodon def shutdown(self): """ If something breaks, it shuts down the bot and messages the owner. """ print("[ERROR] Shit went wrong, closing down.") if self.no_shutdown_contact: return with open(self.historypath, "w") as f: f.write(str(self.last_mention)) self.api.PostDirectMessage("Help! I broke down. restart me pls :$", self.user_id, self.screen_name) if __name__ == "__main__": # create an Api object with open('ticketfrei.cfg') as configfile: config = toml.load(configfile) trigger = trigger.Trigger(config) bot = RetweetBot(trigger, config) try: while True: bot.flow() sleep(6) except: traceback.print_exc() bot.shutdown()
Python
0
@@ -4325,18 +4325,20 @@ return -() +None %0A @@ -5473,24 +5473,15 @@ -mastodon.append( +toot = self @@ -5496,16 +5496,82 @@ (status) +%0A if toot:%0A mastodon.append(toot )%0A%0A
cb8d69635d1cc40ed3748589142433208b089c4e
Update test/runner.py to allow test run by names (#930)
rest-api/test/runner.py
rest-api/test/runner.py
#!/usr/bin/env python2 # Copyright 2015 Google Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """App Engine local test runner example. This program handles properly importing the App Engine SDK so that test modules can use google.appengine.* APIs and the Google App Engine testbed. Example invocation: $ python runner.py ~/google-cloud-sdk """ import argparse import os import sys import unittest def fixup_paths(path): """Adds GAE SDK path to system path and appends it to the google path if that already exists.""" # Not all Google packages are inside namespace packages, which means # there might be another non-namespace package named `google` already on # the path and simply appending the App Engine SDK to the path will not # work since the other package will get discovered and used first. # This emulates namespace packages by first searching if a `google` package # exists by importing it, and if so appending to its module search path. try: import google google.__path__.append("{0}/google".format(path)) except ImportError: pass sys.path.insert(0, path) def main(sdk_path, test_path, test_pattern): # If the SDK path points to a Google Cloud SDK installation # then we should alter it to point to the GAE platform location. if os.path.exists(os.path.join(sdk_path, 'platform/google_appengine')): sdk_path = os.path.join(sdk_path, 'platform/google_appengine') # Make sure google.appengine.* modules are importable. fixup_paths(sdk_path) # Make sure all bundled third-party packages are available. import dev_appserver dev_appserver.fix_sys_path() # Loading appengine_config from the current project ensures that any # changes to configuration there are available to all tests (e.g. # sys.path modifications, namespaces, etc.) try: import appengine_config (appengine_config) except ImportError: print 'Note: unable to import appengine_config.' # Discover and run tests. suite = unittest.loader.TestLoader().discover(test_path, test_pattern) return unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == '__main__': # Disables logging for the duration of the tests import logging logging.disable(logging.CRITICAL) parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( 'sdk_path', help='The path to the Google App Engine SDK or the Google Cloud SDK.') parser.add_argument( '--test-path', help='The path to look for tests, defaults to the current directory.', default=os.getcwd()) parser.add_argument( '--test-pattern', help='The file pattern for test modules, defaults to *_test.py.', default='*_test.py') parser.add_argument( '--no-coverage', dest='coverage', action='store_false', help='Turn off the coverage report' ) args = parser.parse_args() if args.coverage: from coverage import Coverage cov = Coverage(omit=[ '*/lib/*', '*/appengine-mapreduce/*', '*/site-packages/*', '*/google_appengine/*', '*/test/*', ]) cov.start() result = main(args.sdk_path, args.test_path, args.test_pattern) if args.coverage: cov.stop() cov.save() cov.html_report() if not result.wasSuccessful(): sys.exit(1)
Python
0
@@ -1655,19 +1655,36 @@ _pattern +, test_names=None ):%0A - # If t @@ -2492,16 +2492,115 @@ tests.%0A + if test_names:%0A suite = unittest.loader.TestLoader().loadTestsFromNames(test_names)%0A else:%0A suite @@ -2664,16 +2664,16 @@ attern)%0A - return @@ -3505,16 +3505,185 @@ ort'%0A ) +%0A parser.add_argument(%0A '--test-names',%0A dest='test_names',%0A nargs='+',%0A help='List of tests to run explicitly. Overrides --test-path & --test-pattern.'%0A ) %0A%0A args @@ -3935,16 +3935,16 @@ tart()%0A%0A - result @@ -3999,16 +3999,44 @@ _pattern +, test_names=args.test_names )%0A%0A if