commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
e3f53b37a276680bd12806ed14d09065d35d583e | Debug logs | dataactcore/scripts/agency_move_s3_files.py | dataactcore/scripts/agency_move_s3_files.py | import boto3
import logging
import argparse
from dataactcore.config import CONFIG_BROKER
from dataactcore.logging import configure_logging
from dataactvalidator.health_check import create_app
logger = logging.getLogger(__name__)
def move_published_agency_files(old_code, new_code):
""" Given the provided old and new agency codes, move the published files from the old agency directory to the new
Args:
old_code: The old agency code to copy from
new_code: The new agency code to move to
"""
if not old_code.endswith('/'):
old_code += '/'
if not new_code.endswith('/'):
new_code += '/'
# Note: the submissions bucket (aws_bucket) is not being used here as that path is based on submission ids
# DABS directory structure
# [certified bucket]/[agency code]/[fy]/[time period]/[publish history id]/[files]
s3 = boto3.resource('s3', region_name=CONFIG_BROKER['aws_region'])
certified_bucket = s3.Bucket(CONFIG_BROKER['certified_bucket'])
files_in_bucket = list(certified_bucket.objects.all())
logger.info('Moving published DABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket if old_file_path.key.startswith(old_code)]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
logger.info('CERIFIED_BUCKET: {}'.format(CONFIG_BROKER['certified_bucket']))
logger.info('OLD FILE PATH: {}'.format(old_file_path))
logger.info('NEW FILE PATH: {}'.format(new_file_path))
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published DABS submission files from {} to {}'.format(old_code, new_code))
# FABS directory structure
# [certified bucket]/FABS/[agency code]/[fy]/[time period]/[files]
logger.info('Moving published FABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket
if old_file_path.key.startswith('FABS/{}'.format(old_code))]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
logger.info('CERIFIED_BUCKET: {}'.format(CONFIG_BROKER['certified_bucket']))
logger.info('OLD FILE PATH: {}'.format(old_file_path))
logger.info('NEW FILE PATH: {}'.format(new_file_path))
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published FABS submission files from {} to {}'.format(old_code, new_code))
def main():
""" Move all submission files in S3 for an agency that has changed its code """
parser = argparse.ArgumentParser(description='Initialize the DATA Act Broker.')
parser.add_argument('-o', '--old_code', help='The old agency code to copy from', required=True)
parser.add_argument('-n', '--new_code', help='The new agency code to move to', required=True)
args = parser.parse_args()
logger.info('Moving published submission files')
move_published_agency_files(args.old_code, args.new_code)
logger.info('Finished moving published submission files')
if __name__ == '__main__':
configure_logging()
with create_app().app_context():
main() | import boto3
import logging
import argparse
from dataactcore.config import CONFIG_BROKER
from dataactcore.logging import configure_logging
from dataactvalidator.health_check import create_app
logger = logging.getLogger(__name__)
def move_published_agency_files(old_code, new_code):
""" Given the provided old and new agency codes, move the published files from the old agency directory to the new
Args:
old_code: The old agency code to copy from
new_code: The new agency code to move to
"""
if not old_code.endswith('/'):
old_code += '/'
if not new_code.endswith('/'):
new_code += '/'
# Note: the submissions bucket (aws_bucket) is not being used here as that path is based on submission ids
# DABS directory structure
# [certified bucket]/[agency code]/[fy]/[time period]/[publish history id]/[files]
s3 = boto3.resource('s3', region_name=CONFIG_BROKER['aws_region'])
certified_bucket = s3.Bucket(CONFIG_BROKER['certified_bucket'])
files_in_bucket = list(certified_bucket.objects.all())
logger.info('Moving published DABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket if old_file_path.key.startswith(old_code)]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published DABS submission files from {} to {}'.format(old_code, new_code))
# FABS directory structure
# [certified bucket]/FABS/[agency code]/[fy]/[time period]/[files]
logger.info('Moving published FABS submission files from {} to {}'.format(old_code, new_code))
old_file_paths = [old_file_path.key for old_file_path in files_in_bucket
if old_file_path.key.startswith('FABS/{}'.format(old_code))]
for old_file_path in old_file_paths:
new_file_path = old_file_path.replace(old_code, new_code, 1)
s3.Object(CONFIG_BROKER['certified_bucket'], new_file_path).copy_from(CopySource=old_file_path)
s3.Object(CONFIG_BROKER['certified_bucket'], old_file_path).delete()
logger.info('Moved published FABS submission files from {} to {}'.format(old_code, new_code))
def main():
""" Move all submission files in S3 for an agency that has changed its code """
parser = argparse.ArgumentParser(description='Initialize the DATA Act Broker.')
parser.add_argument('-o', '--old_code', help='The old agency code to copy from', required=True)
parser.add_argument('-n', '--new_code', help='The new agency code to move to', required=True)
args = parser.parse_args()
logger.info('Moving published submission files')
move_published_agency_files(args.old_code, args.new_code)
logger.info('Finished moving published submission files')
if __name__ == '__main__':
configure_logging()
with create_app().app_context():
main() | Python | 0.000001 |
ac3697fbb5202437d8285cacaba89dbaba30de69 | fix refactoring error | util.py | util.py | import logging
A_THRU_H = 'ABCDEFGH'
# pre-compute an array mapping to algebraic notation
NUMERICAL_TO_ALGEBRAIC = ["{}{}".format(l, n) for n in range(8, 0, -1) for l in A_THRU_H]
# pre-compute a dict mapping to the index
ALGEBRAIC_TO_NUMERICAL = {a:n for n, a in enumerate(NUMERICAL_TO_ALGEBRAIC)}
TOP_LEFT_SQUARE = 0
BOTTOM_RIGHT_SQUARE = 63
def to_algebraic(numeric_index):
try:
return NUMERICAL_TO_ALGEBRAIC[numeric_index]
except IndexError:
return numeric_index
def to_numeric(algebraic_notation):
try:
return ALGEBRAIC_TO_NUMERICAL[algebraic_notation.upper()]
except IndexError:
return algebraic_notation
def get_move_facts(origin, move):
square_if_moved = origin + move
current_col = origin % 8
col_if_moved = (origin + move) % 8
col_dist_if_moved = abs(current_col - col_if_moved)
row_dist = get_row_distance(origin, move)
return (square_if_moved,
current_col,
col_if_moved,
col_dist_if_moved,
row_dist)
def get_row_distance(src, move):
src_row = src // 8
row_if_moved = (src + move) // 8
return abs(src_row - row_if_moved)
def is_on_board(square):
return TOP_LEFT_SQUARE <= square <= BOTTOM_RIGHT_SQUARE
def is_valid_move(src_square, move):
return is_on_board(src_square + move)
| import logging
A_THRU_H = 'ABCDEFGH'
# pre-compute an array mapping to algebraic notation
NUMERICAL_TO_ALGEBRAIC = ["{}{}".format(l, n) for n in range(8, 0, -1) for l in A_THRU_H]
# pre-compute a dict mapping to the index
ALGEBRAIC_TO_NUMERICAL = {a:n for n, a in enumerate(NUMERICAL_TO_ALGEBRAIC)}
TOP_LEFT_SQUARE = 0
BOTTOM_RIGHT_SQUARE = 63
def to_algebraic(numeric_index):
try:
return NUMERICAL_TO_ALGEBRAIC[numeric_index]
except IndexError:
return index
def to_numeric(algebraic_notation):
try:
return ALGEBRAIC_TO_NUMERICAL[algebraic_notation.upper()]
except IndexError:
return algebraic_notation
def get_move_facts(origin, move):
square_if_moved = origin + move
current_col = origin % 8
col_if_moved = (origin + move) % 8
col_dist_if_moved = abs(current_col - col_if_moved)
row_dist = get_row_distance(origin, move)
return (square_if_moved,
current_col,
col_if_moved,
col_dist_if_moved,
row_dist)
def get_row_distance(src, move):
src_row = src // 8
row_if_moved = (src + move) // 8
return abs(src_row - row_if_moved)
def is_on_board(square):
return TOP_LEFT_SQUARE <= square <= BOTTOM_RIGHT_SQUARE
def is_valid_move(src_square, move):
return is_on_board(src_square + move)
| Python | 0.000005 |
ae6bb29262421bcdb9f28bed8fce99517fa4ecc1 | Update tests. | st2common/tests/unit/test_content_utils.py | st2common/tests/unit/test_content_utils.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from oslo.config import cfg
from st2common.content.utils import get_packs_base_paths
from st2tests import config as tests_config
class ContentUtilsTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
tests_config.parse_args()
def test_get_pack_base_paths(self):
cfg.CONF.content.system_packs_base_path = ''
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1'])
# Multiple paths, no trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple paths, trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple same paths
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Assert system path is always first
cfg.CONF.content.system_packs_base_path = '/opt/system'
cfg.CONF.content.packs_base_paths = '/opt/path2:/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/system', '/opt/path2', '/opt/path1'])
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from oslo.config import cfg
from st2common.content.utils import get_packs_base_paths
from st2tests import config as tests_config
class ContentUtilsTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
tests_config.parse_args()
def test_get_pack_base_paths(self):
cfg.CONF.content.system_packs_base_path = ''
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1'])
# Multiple paths, no trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple paths, trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple same paths
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Assert system path is always first
cfg.CONF.content.system_packs_base_path = '/opt/system'
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/system', '/opt/path1'])
| Python | 0 |
65524f41729d1ddcda9ecb66947b85119c80cd18 | format util.py | util.py | util.py | #!/usr/bin/env python
import couchdb, sys
from oaipmh.client import Client
from oaipmh.common import Identify, Metadata, Header
from oaipmh.metadata import MetadataRegistry, oai_dc_reader , MetadataReader
def get_database(url,name):
try:
couch = couchdb.Server(url)
db = couch[name]
return db;
except:
return None
def create_database(url,name):
db = get_database(url,name)
if db == None:
couch = couchdb.Server(url)
db = couch.create(name)
return db
def get_documents(main_url, database_name, url, reader, prefix, format):
registry = MetadataRegistry()
registry.registerReader(prefix, reader)
client = Client(url, registry)
return_stuff = []
for record in client.listRecords(metadataPrefix=prefix):
r = record[1]
value = format(r)
if value != None:
return_stuff.append(value)
if len(return_stuff) >= 10000:
sync_files(main_url, database_name, return_stuff)
return_stuff = []
sync_files(main_url, database_name, return_stuff)
def save_file(db, id, data):
try:
doc = db[id]
except:
doc = None
if doc == None:
db[id] = data
else:
doc['identifier'] = data['identifier']
doc['title']= data['title']
db[id] = doc
def sync_files(main_url, database_name, files_to_replicate):
db = get_database(main_url,database_name)
if db == None:
db = create_database(main_url,database_name)
db.update(files_to_replicate)
def index_documents(oai_url,main_url,database_name, reader, prefix, format):
get_documents(main_url, database_name, oai_url, reader,prefix, format)
| #!/usr/bin/env python
import couchdb, sys
from oaipmh.client import Client
from oaipmh.common import Identify, Metadata, Header
from oaipmh.metadata import MetadataRegistry, oai_dc_reader , MetadataReader
def get_database(url,name):
try:
couch = couchdb.Server(url)
db = couch[name]
return db;
except:
return None
def create_database(url,name):
db = get_database(url,name)
if db == None:
couch = couchdb.Server(url)
db = couch.create(name)
return db
def get_documents(main_url, database_name, url, reader, prefix, format):
registry = MetadataRegistry()
registry.registerReader(prefix, reader)
client = Client(url, registry)
return_stuff = []
for record in client.listRecords(metadataPrefix=prefix):
r = record[1]
value = format(r)
if value != None:
return_stuff.append(value)
if len(return_stuff) >= 10000:
sync_files(main_url, database_name, return_stuff)
return_stuff = []
sync_files(main_url, database_name, return_stuff)
def save_file(db, id, data):
try:
doc = db[id]
except:
doc = None
if doc == None:
db[id] = data
else:
doc['identifier'] = data['identifier']
doc['title']= data['title']
db[id] = doc
def sync_files(main_url, database_name, files_to_replicate):
db = get_database(main_url,database_name)
if db == None:
db = create_database(main_url,database_name)
db.update(files_to_replicate)
def index_documents(oai_url,main_url,database_name, reader, prefix, format):
get_documents(main_url, database_name, oai_url, reader,prefix, format)
#sync_files(main_url, database_name, files_to_replicate)
| Python | 0.000009 |
58412bf4ac5adb78c82060c259803c745c52f861 | Bump version | stock_request_picking_type/__manifest__.py | stock_request_picking_type/__manifest__.py | # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
'name': 'Stock Request Picking Type',
'summary': 'Add Stock Requests to the Inventory App',
'version': '12.0.1.1.0',
'license': 'LGPL-3',
'website': 'https://github.com/stock-logistics-warehouse',
'author': 'Open Source Integrators, '
'Odoo Community Association (OCA)',
'category': 'Warehouse Management',
'depends': [
'stock_request',
],
'data': [
'data/stock_picking_type.xml',
'views/stock_request_views.xml',
'views/stock_picking_views.xml',
],
'development_status': 'Beta',
'maintainers': ['max3903']
}
| # Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
'name': 'Stock Request Picking Type',
'summary': 'Add Stock Requests to the Inventory App',
'version': '12.0.1.0.0',
'license': 'LGPL-3',
'website': 'https://github.com/stock-logistics-warehouse',
'author': 'Open Source Integrators, '
'Odoo Community Association (OCA)',
'category': 'Warehouse Management',
'depends': [
'stock_request',
],
'data': [
'data/stock_picking_type.xml',
'views/stock_request_views.xml',
'views/stock_picking_views.xml',
],
'development_status': 'Beta',
'maintainers': ['max3903']
}
| Python | 0 |
bc6512080bd67413a3136e171be2cc1479254caf | Change startup experiment. | enactiveagents/EnactiveAgents.py | enactiveagents/EnactiveAgents.py | """
Entry module of the application.
"""
import sys
import pygame
from appstate import AppState
import settings
import events
from view import view
from view import agentevents
from controller import controller
import experiment.basic
import webserver
class HeartBeat(events.EventListener):
"""
Class implementing the heartbeat of the application.
"""
def run(self):
"""
Process PyGame events until halt is true.
"""
self.halt = False
print("Starting heartbeat.")
time_elapsed = 0
while True:
AppState.get_state().get_event_manager().post_event(events.ControlEvent())
ticked = False
if AppState.get_state().is_running() and time_elapsed >= settings.SIMULATION_STEP_TIME:
print "------- t = %s" % AppState.get_state().get_t()
AppState.get_state().get_event_manager().post_event(events.TickEvent())
time_elapsed = 0
ticked = True
AppState.get_state().get_event_manager().post_event(events.DrawEvent(ticked and AppState.get_state().get_save_simulation_renders()))
time_elapsed += AppState.get_state().get_clock().tick(settings.MAX_FPS)
if ticked:
AppState.get_state().increment_t()
def notify(self, event):
if isinstance(event, events.QuitEvent):
sys.exit()
def init():
"""
Initialize pygame.
:returns: The surface of the pygame display.
"""
print("Loading pygame modules.")
pygame.display.init()
AppState.get_state().set_clock(pygame.time.Clock())
flags = pygame.DOUBLEBUF
surface = pygame.display.set_mode(
(
AppState.get_state().get_world().get_width() * settings.CELL_WIDTH,
AppState.get_state().get_world().get_height() * settings.CELL_HEIGHT,
),
flags)
surface.set_alpha(None)
pygame.display.set_caption('Enactive Agents v2')
return surface
def main():
"""
Main function of the application.
"""
# Initialize the event manager.
event_manager = events.EventManager()
AppState.get_state().set_event_manager(event_manager)
# Initialize and register the application heartbeat.
heart_beat = HeartBeat()
event_manager.register_listener(heart_beat)
# Initialize and register the world.
#experiment_ = experiment.experiment.Experiment.load_experiment("20161126T003019.p")
experiment_ = experiment.basic.BasicVisionExperiment()
AppState.get_state().set_experiment(experiment_)
world = experiment_.get_world()
event_manager.register_listener(world)
AppState.get_state().set_world(world)
# Initialize pygame.
surface = init()
# Initialize and register the view.
main_view = view.View(surface)
event_manager.register_listener(main_view)
# Initialize the website trace history view.
trace_view = agentevents.AgentEvents()
event_manager.register_listener(trace_view)
# Initialize and register the controller.
main_controller = controller.Controller()
event_manager.register_listener(main_controller)
# Add the experiment controller to the controller
main_controller.set_experiment_controller(lambda e, coords: experiment_.controller(e, main_view.window_coords_to_world_coords(coords)))
# Start the webserver.
webserver.trace_view = trace_view
webserver.start()
# Start the heartbeat.
heart_beat.run()
if __name__ == '__main__':
"""
Application entry-point.
"""
main()
| """
Entry module of the application.
"""
import sys
import pygame
from appstate import AppState
import settings
import events
from view import view
from view import agentevents
from controller import controller
import experiment.basic
import webserver
class HeartBeat(events.EventListener):
"""
Class implementing the heartbeat of the application.
"""
def run(self):
"""
Process PyGame events until halt is true.
"""
self.halt = False
print("Starting heartbeat.")
time_elapsed = 0
while True:
AppState.get_state().get_event_manager().post_event(events.ControlEvent())
ticked = False
if AppState.get_state().is_running() and time_elapsed >= settings.SIMULATION_STEP_TIME:
print "------- t = %s" % AppState.get_state().get_t()
AppState.get_state().get_event_manager().post_event(events.TickEvent())
time_elapsed = 0
ticked = True
AppState.get_state().get_event_manager().post_event(events.DrawEvent(ticked and AppState.get_state().get_save_simulation_renders()))
time_elapsed += AppState.get_state().get_clock().tick(settings.MAX_FPS)
if ticked:
AppState.get_state().increment_t()
def notify(self, event):
if isinstance(event, events.QuitEvent):
sys.exit()
def init():
"""
Initialize pygame.
:returns: The surface of the pygame display.
"""
print("Loading pygame modules.")
pygame.display.init()
AppState.get_state().set_clock(pygame.time.Clock())
flags = pygame.DOUBLEBUF
surface = pygame.display.set_mode(
(
AppState.get_state().get_world().get_width() * settings.CELL_WIDTH,
AppState.get_state().get_world().get_height() * settings.CELL_HEIGHT,
),
flags)
surface.set_alpha(None)
pygame.display.set_caption('Enactive Agents v2')
return surface
def main():
"""
Main function of the application.
"""
# Initialize the event manager.
event_manager = events.EventManager()
AppState.get_state().set_event_manager(event_manager)
# Initialize and register the application heartbeat.
heart_beat = HeartBeat()
event_manager.register_listener(heart_beat)
# Initialize and register the world.
#experiment_ = experiment.experiment.Experiment.load_experiment("20161126T003019.p")
experiment_ = experiment.basic.BasicCoexsistenceExperiment()
AppState.get_state().set_experiment(experiment_)
world = experiment_.get_world()
event_manager.register_listener(world)
AppState.get_state().set_world(world)
# Initialize pygame.
surface = init()
# Initialize and register the view.
main_view = view.View(surface)
event_manager.register_listener(main_view)
# Initialize the website trace history view.
trace_view = agentevents.AgentEvents()
event_manager.register_listener(trace_view)
# Initialize and register the controller.
main_controller = controller.Controller()
event_manager.register_listener(main_controller)
# Add the experiment controller to the controller
main_controller.set_experiment_controller(lambda e, coords: experiment_.controller(e, main_view.window_coords_to_world_coords(coords)))
# Start the webserver.
webserver.trace_view = trace_view
webserver.start()
# Start the heartbeat.
heart_beat.run()
if __name__ == '__main__':
"""
Application entry-point.
"""
main()
| Python | 0 |
9d2f25c2a262a992c79ea5a224c5abc616dd4cb8 | remove space. | lib/acli/__init__.py | lib/acli/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
usage: acli [--version] [--help]
<command> [<args>...]
options:
-h, --help help
The most common commands are:
account Get account info
ec2 Manage ec2 instances
elb Manage elb instances
ami Manage amis
asg Manage auto-scaling groups
lc Manage launch configurations
eip Manage elastic ips
secgroup Manage security groups
See 'acli help <command>'
"""
from __future__ import (absolute_import, print_function)
from docopt import docopt
from colorama import init
from acli.services import (ec2, elb, account, cloudwatch)
from acli.config import Config
from acli.output.ec2 import (output_ec2_list, output_ec2_info,
output_amis, output_ami_info)
from acli.output.cloudwatch import output_ec2_stats
from acli.output.elb import (output_elbs, output_elb_info)
from acli import utils
init(autoreset=True)
def real_main():
args = docopt(__doc__,
version='0.0.1',
options_first=True)
aws_config = Config(args)
argv = [args['<command>']] + args['<args>']
if args['<command>'] == 'account':
from acli.commands import account as command_account
# acc_res = docopt(command_account.__doc__, argv=argv)
iam_conn = account.get_iam_conn(aws_config)
print("alias: {0} | id: {1}".format(", ".join(account.get_account_aliases(iam_conn)),
account.get_account_id(iam_conn)))
exit()
if args['<command>'] == 'ec2':
from acli.commands import ec2 as command_ec2
ec2_res = docopt(command_ec2.__doc__, argv=argv)
if ec2_res.get('list'):
ec2.ec2_list(aws_config)
elif ec2_res.get('info'):
ec2.ec2_info(aws_config, instance_id=ec2_res.get('<instance_id>'))
elif ec2_res.get('stats'):
cloudwatch.ec2_stats(aws_config=aws_config, instance_id=ec2_res.get('<instance_id>'))
exit()
if args['<command>'] == 'elb':
from acli.commands import elb as command_elb
elb_res = docopt(command_elb.__doc__, argv=argv)
if elb_res.get('list'):
elb.elb_list(aws_config)
elif elb_res.get('info'):
elb.elb_info(aws_config, elb_name=elb_res.get('<elb_name>'))
exit()
if args['<command>'] == 'ami':
from acli.commands import ami as command_ami
ami_res = docopt(command_ami.__doc__, argv=argv)
if ami_res.get('list'):
ec2.list_amis(aws_config)
elif ami_res.get('info'):
ec2.ami_info(aws_config, ami_id=ami_res.get('<ami_id>'))
exit()
elif args['<command>'] in ['help', None] and args['<args>']:
if args['<args>'][0] == 'ec2':
from acli.commands import ec2 as command_ec2
print(docopt(command_ec2.__doc__, argv=argv))
elif args['<command>'] in ['help', None] and not args['<args>']:
print("usage: acli help <command>")
else:
exit("%r is not an acli command. See 'acli help." % args['<command>'])
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
usage: acli [--version] [--help]
<command> [<args>...]
options:
-h, --help help
The most common commands are:
account Get account info
ec2 Manage ec2 instances
elb Manage elb instances
ami Manage amis
asg Manage auto-scaling groups
lc Manage launch configurations
eip Manage elastic ips
secgroup Manage security groups
See 'acli help <command>'
"""
from __future__ import (absolute_import, print_function)
from docopt import docopt
from colorama import init
from acli.services import (ec2, elb, account, cloudwatch)
from acli.config import Config
from acli.output.ec2 import (output_ec2_list, output_ec2_info,
output_amis, output_ami_info)
from acli.output.cloudwatch import output_ec2_stats
from acli.output.elb import (output_elbs, output_elb_info)
from acli import utils
init(autoreset=True)
def real_main():
args = docopt(__doc__,
version='0.0.1',
options_first=True)
aws_config = Config(args)
argv = [args['<command>']] + args['<args>']
if args['<command>'] == 'account':
from acli.commands import account as command_account
# acc_res = docopt(command_account.__doc__, argv=argv)
iam_conn = account.get_iam_conn(aws_config)
print("alias: {0} | id: {1}".format(", ".join(account.get_account_aliases(iam_conn)),
account.get_account_id(iam_conn)))
exit()
if args['<command>'] == 'ec2':
from acli.commands import ec2 as command_ec2
ec2_res = docopt(command_ec2.__doc__, argv=argv)
if ec2_res.get('list'):
ec2.ec2_list(aws_config)
elif ec2_res.get('info'):
ec2.ec2_info(aws_config, instance_id=ec2_res.get('<instance_id>'))
elif ec2_res.get('stats'):
cloudwatch.ec2_stats(aws_config=aws_config, instance_id=ec2_res.get('<instance_id>'))
exit()
if args['<command>'] == 'elb':
from acli.commands import elb as command_elb
elb_res = docopt(command_elb.__doc__, argv=argv)
if elb_res.get('list'):
elb.elb_list(aws_config)
elif elb_res.get('info'):
elb.elb_info(aws_config, elb_name=elb_res.get('<elb_name>'))
exit()
if args['<command>'] == 'ami':
from acli.commands import ami as command_ami
ami_res = docopt(command_ami.__doc__, argv=argv)
if ami_res.get('list'):
ec2.list_amis(aws_config)
elif ami_res.get('info'):
ec2.ami_info(aws_config, ami_id=ami_res.get('<ami_id>'))
exit()
elif args['<command>'] in ['help', None] and args['<args>']:
if args['<args>'][0] == 'ec2':
from acli.commands import ec2 as command_ec2
print(docopt(command_ec2.__doc__, argv=argv))
elif args['<command>'] in ['help', None] and not args['<args>']:
print("usage: acli help <command>")
else:
exit("%r is not an acli command. See 'acli help." % args['<command>'])
| Python | 0.000413 |
f887c7c5fc0be7e86ebddb28b6d785878ae88121 | Add projects to locals in projects_archive | projects/views.py | projects/views.py | from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render, get_object_or_404, redirect
from .models import Project
from .forms import ProjectForm, RestrictedProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/add.html', locals())
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if request.user == project.user and (project.status == 'unrevised'
or project.status == 'returned'):
data = request.POST if request.POST else None
form = ProjectForm(data=data, user=request.user, instance=project)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/edit.html', locals())
else:
return redirect('members:user-projects')
@permission_required('projects.change_project', login_url="members:user-projects")
def edit_status(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
data = request.POST if request.POST else None
form = RestrictedProjectForm(data=data, instance=project)
if form.is_valid():
project.save()
return redirect('members:user-projects')
return render(request, 'projects/edit_status.html', locals())
def projects_archive(request):
unrevised = Project.objects.filter(status='unrevised')
returned = Project.objects.filter(status='returned')
pending = Project.objects.filter(status='pending')
approved = Project.objects.filter(status='approved')
rejected = Project.objects.filter(status='rejected')
projects = Project.objects.all()
return render(request, 'projects/archive.html', locals())
def show_project(request, project_id):
project = get_object_or_404(Project, id=project_id)
return render(request, 'projects/show_project.html', {'project_show' : project})
| from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render, get_object_or_404, redirect
from .models import Project
from .forms import ProjectForm, RestrictedProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/add.html', locals())
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if request.user == project.user and (project.status == 'unrevised'
or project.status == 'returned'):
data = request.POST if request.POST else None
form = ProjectForm(data=data, user=request.user, instance=project)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/edit.html', locals())
else:
return redirect('members:user-projects')
@permission_required('projects.change_project', login_url="members:user-projects")
def edit_status(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
data = request.POST if request.POST else None
form = RestrictedProjectForm(data=data, instance=project)
if form.is_valid():
project.save()
return redirect('members:user-projects')
return render(request, 'projects/edit_status.html', locals())
def projects_archive(request):
unrevised = Project.objects.filter(status='unrevised')
returned = Project.objects.filter(status='returned')
pending = Project.objects.filter(status='pending')
approved = Project.objects.filter(status='approved')
rejected = Project.objects.filter(status='rejected')
return render(request, 'projects/archive.html', locals())
def show_project(request, project_id):
project = get_object_or_404(Project, id=project_id)
return render(request, 'projects/show_project.html', {'project_show' : project})
| Python | 0 |
608f667f8d3a9faa8fc41777b2006c325afff61c | Fix var names. | vote.py | vote.py | import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgstr'].keys():
option = dict(task_run_id=None, msgstr=None)
option['task_run_id'] = k
option['msgstr'] = e.task_runs_df[t.id]['msgstr'][k]
options.append(option)
t.info['msgstr_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| Python | 0.000002 |
a8681015902101192caeaff6c755069d406f3d0e | Support NonNode << Node, limit scope in conf_load. | conf.py | conf.py | """
Pyconf DSL for generating JSON or Protobuf configuration.
"""
class Node(object):
def __init__(self, value):
self._value = value
def execute(self):
def _unwrap(item):
if isinstance(item, Node):
return item.execute()
else:
return item
if isinstance(self._value, dict):
meta = {}
data = {}
for k, v in self._value.iteritems():
if k.startswith('__') and k.endswith('__'):
meta[k] = v
else:
data[k] = _unwrap(v)
if '__post__' in meta:
return meta['__post__'](meta, data)
else:
return data
elif isinstance(self._value, (list, tuple)):
return map(_unwrap, self._value)
else:
return self._value
def __call__(self, **kwargs):
output = {}
if self._value is not None:
if not isinstance(self._value, dict):
raise TypeError('Cannot extend non-dict node %s' % (type(self._value),))
output.update(self._value)
for k, v in kwargs.iteritems():
output[k] = v
return Node(output)
def __getattr__(self, attr):
if attr.startswith('_'):
raise AttributeError('Private or meta attr %s' % (attr,))
if self._value is None:
return self
if not isinstance(self._value, dict):
raise TypeError('Cannot get attr of non-dict node %s' % (type(self._value),))
output = self._value.get(attr)
if not isinstance(output, Node):
output = Node(output)
return output
def __getitem__(self, fn):
if callable(fn):
return self(__post__ = fn)
order = fn
if isinstance(order, (str, unicode)):
order = order.strip().split()
return self(__post__ = lambda meta, value: [value.get(key) for key in order])
def __add__(self, ls):
if isinstance(ls, Node):
ls = ls._value
if not isinstance(ls, (list, tuple)):
raise TypeError('Cannot append node with non-array data %s' % (type(ls),))
orig = self._value
if orig is None:
orig = ()
if not isinstance(orig, (list, tuple)):
raise TypeError('Cannot append non-array node %s' % (type(self._value),))
return Node(tuple(orig) + tuple(ls))
def __lshift__(self, right):
if isinstance(right, Node):
right = right._value
if not isinstance(right, dict):
raise TypeError('Cannot extend node with non-dict data %s' % (type(right),))
return self(**right)
def __rlshift__(self, left):
right = self._value
if right is None:
right = {}
if not isinstance(right, dict):
raise TypeError('Cannot extend node with non-dict data %s' % (type(right),))
return left(**right)
conf = Node(None)
def array(*args):
return Node(args)
def run(path, builtins=None):
import imp
import runpy
import os.path
conf_builtins = {
'conf': conf,
'array': array,
}
if builtins is not None:
conf_builtins.update(builtins)
def conf_load(dirty_path):
path = os.path.normpath(dirty_path)
if path.startswith('.') or path.startswith('/'):
raise ValueError('Invalid conf_load path %s' % (dirty_path,))
result_dict = runpy.run_path(path, init_globals=conf_builtins)
mod = imp.new_module(path)
for k, v in result_dict.iteritems():
if k.startswith('_'):
continue
setattr(mod, k, v)
return mod
conf_builtins['load'] = conf_load
result = runpy.run_path(path, init_globals=conf_builtins)
output = {}
for k, v in result.iteritems():
if isinstance(v, Node):
output[k] = v.execute()
return output
if __name__ == '__main__':
import json
import sys
ret = run(sys.argv[1])['CONFIG']
print json.dumps(ret)
| """
Pyconf DSL for generating JSON or Protobuf configuration.
"""
class Node(object):
def __init__(self, value):
self._value = value
def execute(self):
def _unwrap(item):
if isinstance(item, Node):
return item.execute()
else:
return item
if isinstance(self._value, dict):
meta = {}
data = {}
for k, v in self._value.iteritems():
if k.startswith('__') and k.endswith('__'):
meta[k] = v
else:
data[k] = _unwrap(v)
if '__post__' in meta:
return meta['__post__'](meta, data)
else:
return data
elif isinstance(self._value, (list, tuple)):
return map(_unwrap, self._value)
else:
return self._value
def __call__(self, **kwargs):
output = {}
if self._value is not None:
if not isinstance(self._value, dict):
raise TypeError('Cannot extend non-dict node %s' % (type(self._value),))
output.update(self._value)
for k, v in kwargs.iteritems():
output[k] = v
return Node(output)
def __getattr__(self, attr):
if attr.startswith('_'):
raise AttributeError('Private or meta attr %s' % (attr,))
if self._value is None:
return self
if not isinstance(self._value, dict):
raise TypeError('Cannot get attr of non-dict node %s' % (type(self._value),))
output = self._value.get(attr)
if not isinstance(output, Node):
output = Node(output)
return output
def __getitem__(self, fn):
if callable(fn):
return self(__post__ = fn)
order = fn
if isinstance(order, (str, unicode)):
order = order.strip().split()
return self(__post__ = lambda meta, value: [value.get(key) for key in order])
def __add__(self, ls):
if isinstance(ls, Node):
ls = ls._value
if not isinstance(ls, (list, tuple)):
raise TypeError('Cannot append node with non-array data %s' % (type(ls),))
orig = self._value
if orig is None:
orig = ()
if not isinstance(orig, (list, tuple)):
raise TypeError('Cannot append non-array node %s' % (type(self._value),))
return Node(tuple(orig) + tuple(ls))
def __lshift__(self, right):
if isinstance(right, Node):
right = right._value
if not isinstance(right, dict):
raise TypeError('Cannot extend node with non-dict data %s' % (type(right),))
return self(**right)
conf = Node(None)
def array(*args):
return Node(args)
def run(path, builtins=None):
import imp
import runpy
conf_builtins = {
'conf': conf,
'array': array,
}
if builtins is not None:
conf_builtins.update(builtins)
def conf_load(path):
result_dict = runpy.run_path(path, init_globals=conf_builtins)
mod = imp.new_module(path)
for k, v in result_dict.iteritems():
if k.startswith('_'):
continue
setattr(mod, k, v)
return mod
conf_builtins['load'] = conf_load
result = runpy.run_path(path, init_globals=conf_builtins)
output = {}
for k, v in result.iteritems():
if isinstance(v, Node):
output[k] = v.execute()
return output
if __name__ == '__main__':
import json
import sys
ret = run(sys.argv[1])['CONFIG']
print json.dumps(ret)
| Python | 0 |
5347040b86f02a0abec4da5c3060b094908bb9b5 | Simplify argument handling logic. | wpcr.py | wpcr.py | #!/usr/bin/python
import numpy
import scipy.signal
tau = numpy.pi * 2
max_samples = 1000000
debug = False
# determine the clock frequency
# input: magnitude spectrum of clock signal (numpy array)
# output: FFT bin number of clock frequency
def find_clock_frequency(spectrum):
maxima = scipy.signal.argrelextrema(spectrum, numpy.greater_equal)[0]
while maxima[0] < 2:
maxima = maxima[1:]
if maxima.any():
threshold = max(spectrum[2:-1])*0.8
indices_above_threshold = numpy.argwhere(spectrum[maxima] > threshold)
return maxima[indices_above_threshold[0]]
else:
return 0
def midpoint(a):
mean_a = numpy.mean(a)
mean_a_greater = numpy.ma.masked_greater(a, mean_a)
high = numpy.ma.median(mean_a_greater)
mean_a_less_or_equal = numpy.ma.masked_array(a, ~mean_a_greater.mask)
low = numpy.ma.median(mean_a_less_or_equal)
return (high + low) / 2
# whole packet clock recovery
# input: real valued NRZ-like waveform (array, tuple, or list)
# must have at least 2 samples per symbol
# must have at least 2 symbol transitions
# output: list of symbols
def wpcr(a):
if len(a) < 4:
return []
b = a > midpoint(a)
d = numpy.diff(b)**2
if len(numpy.argwhere(d > 0)) < 2:
return []
f = scipy.fft(d, len(a))
p = find_clock_frequency(abs(f))
if p == 0:
return []
cycles_per_sample = (p*1.0)/len(f)
clock_phase = 0.5 + numpy.angle(f[p])/(tau)
if clock_phase <= 0.5:
clock_phase += 1
symbols = []
for i in range(len(a)):
if clock_phase >= 1:
clock_phase -= 1
symbols.append(a[i])
clock_phase += cycles_per_sample
if debug:
print("peak frequency index: %d / %d" % (p, len(f)))
print("samples per symbol: %f" % (1.0/cycles_per_sample))
print("clock cycles per sample: %f" % (cycles_per_sample))
print("clock phase in cycles between 1st and 2nd samples: %f" % (clock_phase))
print("clock phase in cycles at 1st sample: %f" % (clock_phase - cycles_per_sample/2))
print("symbol count: %d" % (len(symbols)))
return symbols
# convert soft symbols into bits (assuming binary symbols)
def slice_bits(symbols):
bits=[]
for element in symbols:
if element >= numpy.average(symbols):
bits.append(1)
else:
bits.append(0)
return bits
def read_from_stdin():
return numpy.frombuffer(sys.stdin.buffer.read(), dtype=numpy.float32)
# If called directly from command line, take input file (or stdin) as a stream
# of floats and print binary symbols found therein.
if __name__ == '__main__':
import sys
debug = True
if len(sys.argv) > 1 and sys.argv[1] != '-':
samples = numpy.fromfile(sys.argv[1], dtype=numpy.float32)
else:
samples = read_from_stdin()
symbols=wpcr(samples)
bits=slice_bits(symbols)
print(bits)
| #!/usr/bin/python
import numpy
import scipy.signal
tau = numpy.pi * 2
max_samples = 1000000
debug = False
# determine the clock frequency
# input: magnitude spectrum of clock signal (numpy array)
# output: FFT bin number of clock frequency
def find_clock_frequency(spectrum):
maxima = scipy.signal.argrelextrema(spectrum, numpy.greater_equal)[0]
while maxima[0] < 2:
maxima = maxima[1:]
if maxima.any():
threshold = max(spectrum[2:-1])*0.8
indices_above_threshold = numpy.argwhere(spectrum[maxima] > threshold)
return maxima[indices_above_threshold[0]]
else:
return 0
def midpoint(a):
mean_a = numpy.mean(a)
mean_a_greater = numpy.ma.masked_greater(a, mean_a)
high = numpy.ma.median(mean_a_greater)
mean_a_less_or_equal = numpy.ma.masked_array(a, ~mean_a_greater.mask)
low = numpy.ma.median(mean_a_less_or_equal)
return (high + low) / 2
# whole packet clock recovery
# input: real valued NRZ-like waveform (array, tuple, or list)
# must have at least 2 samples per symbol
# must have at least 2 symbol transitions
# output: list of symbols
def wpcr(a):
if len(a) < 4:
return []
b = a > midpoint(a)
d = numpy.diff(b)**2
if len(numpy.argwhere(d > 0)) < 2:
return []
f = scipy.fft(d, len(a))
p = find_clock_frequency(abs(f))
if p == 0:
return []
cycles_per_sample = (p*1.0)/len(f)
clock_phase = 0.5 + numpy.angle(f[p])/(tau)
if clock_phase <= 0.5:
clock_phase += 1
symbols = []
for i in range(len(a)):
if clock_phase >= 1:
clock_phase -= 1
symbols.append(a[i])
clock_phase += cycles_per_sample
if debug:
print("peak frequency index: %d / %d" % (p, len(f)))
print("samples per symbol: %f" % (1.0/cycles_per_sample))
print("clock cycles per sample: %f" % (cycles_per_sample))
print("clock phase in cycles between 1st and 2nd samples: %f" % (clock_phase))
print("clock phase in cycles at 1st sample: %f" % (clock_phase - cycles_per_sample/2))
print("symbol count: %d" % (len(symbols)))
return symbols
# convert soft symbols into bits (assuming binary symbols)
def slice_bits(symbols):
bits=[]
for element in symbols:
if element >= numpy.average(symbols):
bits.append(1)
else:
bits.append(0)
return bits
def read_from_stdin():
return numpy.frombuffer(sys.stdin.buffer.read(), dtype=numpy.float32)
# If called directly from command line, take input file (or stdin) as a stream
# of floats and print binary symbols found therein.
if __name__ == '__main__':
import sys
debug = True
if len(sys.argv) > 1:
if sys.argv[1] == '-':
samples = read_from_stdin()
else:
samples = numpy.fromfile(sys.argv[1], dtype=numpy.float32)
else:
samples = read_from_stdin()
symbols=wpcr(samples)
bits=slice_bits(symbols)
print(bits)
| Python | 0.000013 |
3478bf108ce6992239c638e6e662a6e53204ae46 | Update wsgi.py for port | wsgi.py | wsgi.py | from app import create_app
application = create_app()
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
application.run(host='0.0.0.0', port=port) | from app import create_app
application = create_app()
if __name__ == '__main__':
application.run() | Python | 0 |
6d643c1f4fca74e66513d0461fc358bb1dd21349 | add method to parse out [xml-handlers] section in process.cfg | lib/config_parser.py | lib/config_parser.py | from ConfigParser import ConfigParser
defaults = {'parse': 'defaultparse',
'clean': 'True',
'consolidate': 'True',
'datadir': '/data/patentdata/patents/2013',
'dataregex': 'ipg\d{6}.xml',
'years': None,
'downloaddir' : None}
def extract_process_options(handler):
"""
Extracts the high level options from the [process] section
of the configuration file. Returns a dictionary of the options
"""
result = {}
result['parse'] = handler.get('process','parse')
result['clean'] = handler.get('process','clean') == 'True'
result['consolidate'] = handler.get('process','consolidate') == 'True'
result['outputdir'] = handler.get('process','outputdir')
return result
def extract_parse_options(handler, section):
"""
Extracts the specific parsing options from the parse section
as given by the [parse] config option in the [process] section
"""
options = {}
options['datadir'] = handler.get(section,'datadir')
options['dataregex'] = handler.get(section,'dataregex')
options['years'] = handler.get(section,'years')
options['downloaddir'] = handler.get(section,'downloaddir')
if options['years'] and options['downloaddir']:
options['datadir'] = options['downloaddir']
return options
def get_config_options(configfile):
"""
Takes in a filepath to a configuration file, returns
two dicts representing the process and parse configuration options.
See `process.cfg` for explanation of the optiosn
"""
handler = ConfigParser(defaults)
handler.read(configfile)
process_config = extract_process_options(handler)
parse_config = extract_parse_options(handler, process_config['parse'])
return process_config, parse_config
def get_year_list(yearstring):
"""
Given a [yearstring] of forms
year1
year1-year2
year1,year2,year3
year1-year2,year3-year4
Expands into a list of year integers, and returns
"""
years = []
for subset in yearstring.split(','):
if subset == 'default':
years.append('default')
continue
sublist = subset.split('-')
start = int(sublist[0])
end = int(sublist[1])+1 if len(sublist) > 1 else start+1
years.extend(range(start,end))
return years
def get_xml_handlers(configfile):
"""
Called by parse.py to generate a lookup dictionary for which parser should
be used for a given file. Imports will be handled in `parse.py`
"""
handler = ConfigParser()
handler.read(configfile)
xmlhandlers = {}
for yearrange, handler in handler.items('xml-handlers'):
for year in get_year_list(yearrange):
xmlhandlers[year] = handler
return xmlhandlers
| from ConfigParser import ConfigParser
defaults = {'parse': 'defaultparse',
'clean': 'True',
'consolidate': 'True',
'datadir': '/data/patentdata/patents/2013',
'dataregex': 'ipg\d{6}.xml',
'years': None,
'downloaddir' : None}
def extract_process_options(handler):
"""
Extracts the high level options from the [process] section
of the configuration file. Returns a dictionary of the options
"""
result = {}
result['parse'] = handler.get('process','parse')
result['clean'] = handler.get('process','clean') == 'True'
result['consolidate'] = handler.get('process','consolidate') == 'True'
result['outputdir'] = handler.get('process','outputdir')
return result
def extract_parse_options(handler, section):
"""
Extracts the specific parsing options from the parse section
as given by the [parse] config option in the [process] section
"""
options = {}
options['datadir'] = handler.get(section,'datadir')
options['dataregex'] = handler.get(section,'dataregex')
options['years'] = handler.get(section,'years')
options['downloaddir'] = handler.get(section,'downloaddir')
if options['years'] and options['downloaddir']:
options['datadir'] = options['downloaddir']
return options
def get_config_options(configfile):
"""
Takes in a filepath to a configuration file, returns
two dicts representing the process and parse configuration options.
See `process.cfg` for explanation of the optiosn
"""
handler = ConfigParser(defaults)
handler.read(configfile)
process_config = extract_process_options(handler)
parse_config = extract_parse_options(handler, process_config['parse'])
return process_config, parse_config
| Python | 0 |
cf0f7f129bb54c70f60e19e2ec9d82a67f430aaf | replace urllib2 to requests lib | coti.py | coti.py | #!/usr/bin/python
import json
import urllib2
import requests
from bs4 import BeautifulSoup
from datetime import datetime
def chaco():
try:
soup = BeautifulSoup(
requests.get('http://www.cambioschaco.com.py/php/imprimir_.php', timeout=8).text, "html.parser")
compra = soup.find_all('tr')[3].contents[5].string[:5].replace('.', '')
venta = soup.find_all('tr')[3].contents[7].string[:5].replace('.', '')
except requests.ConnectionError:
compra, venta = 0, 0
return int(compra), int(venta)
def maxi():
try:
soup = BeautifulSoup(
requests.get('http://www.maxicambios.com.py/', timeout=8).text, "html.parser")
compra = soup.find_all(class_='lineas1')[0].contents[
7].string.replace('.', '')
venta = soup.find_all(class_='lineas1')[0].contents[
5].string.replace('.', '')
except requests.ConnectionError:
compra, venta = 0, 0
return int(compra), int(venta)
def alberdi():
try:
soup = BeautifulSoup(
requests.get('http://www.cambiosalberdi.com/', timeout=8).text, "html.parser")
compra = soup.find_all(
class_="span2 pagination-right")[0].string.replace('.', '')
venta = soup.find_all(
class_="span2 pagination-right")[1].string.replace('.', '')
except requests.ConnectionError:
compra, venta = 0, 0
return int(compra), int(venta)
def bcp():
pass
def create_json():
mcompra, mventa = maxi()
ccompra, cventa = chaco()
acompra, aventa = alberdi()
respjson = {
'dolarpy': {
'maxicambios': {
'compra': mcompra,
'venta': mventa
},
'cambioschaco': {
'compra': ccompra,
'venta': cventa
},
'cambiosalberdi': {
'compra': acompra,
'venta': aventa
}
},
"updated": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
return json.dumps(respjson, sort_keys=True, indent=4, separators=(',', ': '))
def get_output():
with open('/tmp/dolar.json', 'r') as f:
response = f.read()
return response
def write_output():
response = create_json()
with open('/tmp/dolar.json', 'w') as f:
f.write(response)
write_output()
| #!/usr/bin/python
import json
import urllib2
from bs4 import BeautifulSoup
from datetime import datetime
def chaco():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.cambioschaco.com.py/php/imprimir_.php').read(), "html.parser")
compra = soup.find_all('tr')[3].contents[5].string[:5].replace('.', '')
venta = soup.find_all('tr')[3].contents[7].string[:5].replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def maxi():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.maxicambios.com.py/').read(), "html.parser")
compra = soup.find_all(class_='lineas1')[0].contents[
7].string.replace('.', '')
venta = soup.find_all(class_='lineas1')[0].contents[
5].string.replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def alberdi():
try:
soup = BeautifulSoup(
urllib2.urlopen('http://www.cambiosalberdi.com/').read(), "html.parser")
compra = soup.find_all(
class_="span2 pagination-right")[0].string.replace('.', '')
venta = soup.find_all(
class_="span2 pagination-right")[1].string.replace('.', '')
except urllib2.URLError:
compra, venta = 0, 0
return int(compra), int(venta)
def bcp():
pass
def create_json():
mcompra, mventa = maxi()
ccompra, cventa = chaco()
acompra, aventa = alberdi()
respjson = {
'dolarpy': {
'maxicambios': {
'compra': mcompra,
'venta': mventa
},
'cambioschaco': {
'compra': ccompra,
'venta': cventa
},
'cambiosalberdi': {
'compra': acompra,
'venta': aventa
}
},
"updated": datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
return json.dumps(respjson, sort_keys=True, indent=4, separators=(',', ': '))
def get_output():
with open('/tmp/dolar.json', 'r') as f:
response = f.read()
return response
def write_output():
response = create_json()
with open('/tmp/dolar.json', 'w') as f:
f.write(response)
write_output()
| Python | 0.000205 |
4d5cc0dfc6f9f460cfc54dfebf2061428ae2ee97 | implement a removing of gitlab's objects | crud.py | crud.py | '''
generic CRUD oparations for the gitlab's objects
'''
import http
class Crud():
def __init__(self, path):
self.path = path
'''
get an object by system's name and id
'''
def byId(self, sysNam, id):
return http.get(sysNam, '%s/%d' % (self.path, id))
'''
add a new instance of an object
'''
def add(self, sysNam, data):
return http.post(sysNam, self.path, data)
'''
delete an instcnce by id
'''
def delete(self, sysNam, id):
return http.delete(sysNam, '%s/%d' % (self.path, id))
| '''
generic CRUD oparations for the gitlab's objects
'''
import http
class Crud():
def __init__(self, path):
self.path = path
'''
get an object by system's name and id
'''
def byId(self, sysNam, id):
return http.get(sysNam, '%s/%d' % (self.path, id))
'''
add a new instance of an object
'''
def add(self, sysNam, data):
return http.post(sysNam, self.path, data)
| Python | 0.999861 |
baa81fb776af4b6811bf434a75f808f0aeae056b | fix load watering-topic from config | main.py | main.py | import argparse
import json
import logging
import logging.config
import os
import paho.mqtt.client as mqtt
import yaml
from services.data_service import DataService
from services.watering_service import WateringService
from services.config_service import ConfigService
def load_args():
# setup commandline argument parser
parser = argparse.ArgumentParser()
parser.add_argument('--env')
return parser.parse_args()
def setup_logging(default_level=logging.INFO):
path = os.path.join(os.getcwd(), 'config', 'logging.yml')
if os.path.exists(path):
# load from config
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
def create_mqtt_client(config):
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
user = config['user']
if user is not None:
# use authentication
client.username_pw_set(user, config['password'])
client.connect(config['host'], config['port'])
return client
def handle_receive_sensor_values(payload):
# transform payload to JSON
sensor_values = json.loads(payload.decode('utf-8'))
try:
temperature = int(sensor_values['Temperature'])
humidity = int(sensor_values['Humidity'])
soil_moisture = int(sensor_values['SoilMoisture'])
sensors_id = data_service.save_sensor_values(temperature, humidity, soil_moisture)
if sensors_id is not None:
watering_milliseconds = watering_service.calculate_milliseconds(soil_moisture)
if watering_milliseconds > 200:
watering(watering_milliseconds)
except ValueError:
logger.error('convert sensor-values error', exc_info=True)
def handle_watering(payload):
try:
watering_milliseconds = int(payload)
data_service.save_watering(watering_milliseconds)
except ValueError:
logger.error('convert watering-milliseconds error', exc_info=True)
def watering(milliseconds):
mqtt_client.publish(mqtt_config['topics']['watering'], milliseconds)
def on_connect(client, userdata, flags_dict, rc):
if rc != 0:
logger.error('MQTT connection error: ' + str(rc))
return
logger.info('MQTT connected')
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(mqtt_config['topics']['generic'])
def on_message(client, userdata, msg):
logger.debug('receive message "%s": %s', msg.topic, str(msg.payload))
if msg.topic == mqtt_config['topics']['sensors']: # sensor values
handle_receive_sensor_values(msg.payload)
if msg.topic == mqtt_config['topics']['watering']: # watering
handle_watering(msg.payload)
args = load_args()
setup_logging()
logger = logging.getLogger(__name__)
logger.info('starting MQTT client')
try:
config_service = ConfigService(args.env)
mqtt_config = config_service.get_section('mqtt')
mysql_config = config_service.get_section('mysql')
watering_config = config_service.get_section('watering')
data_service = DataService(mysql_config)
watering_service = WateringService(watering_config)
mqtt_client = create_mqtt_client(mqtt_config)
mqtt_client.loop_forever()
except Exception as error:
logger.error('main error', exc_info=True)
| import argparse
import json
import logging
import logging.config
import os
import paho.mqtt.client as mqtt
import yaml
from services.data_service import DataService
from services.watering_service import WateringService
from services.config_service import ConfigService
def load_args():
# setup commandline argument parser
parser = argparse.ArgumentParser()
parser.add_argument('--env')
return parser.parse_args()
def setup_logging(default_level=logging.INFO):
path = os.path.join(os.getcwd(), 'config', 'logging.yml')
if os.path.exists(path):
# load from config
with open(path, 'rt') as f:
config = yaml.safe_load(f.read())
logging.config.dictConfig(config)
else:
logging.basicConfig(level=default_level)
def create_mqtt_client(config):
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
user = config['user']
if user is not None:
# use authentication
client.username_pw_set(user, config['password'])
client.connect(config['host'], config['port'])
return client
def handle_receive_sensor_values(payload):
# transform payload to JSON
sensor_values = json.loads(payload.decode('utf-8'))
try:
temperature = int(sensor_values['Temperature'])
humidity = int(sensor_values['Humidity'])
soil_moisture = int(sensor_values['SoilMoisture'])
sensors_id = data_service.save_sensor_values(temperature, humidity, soil_moisture)
if sensors_id is not None:
watering_milliseconds = watering_service.calculate_milliseconds(soil_moisture)
if watering_milliseconds > 200:
watering(watering_milliseconds)
except ValueError:
logger.error('convert sensor-values error', exc_info=True)
def handle_watering(payload):
try:
watering_milliseconds = int(payload)
data_service.save_watering(watering_milliseconds)
except ValueError:
logger.error('convert watering-milliseconds error', exc_info=True)
def watering(milliseconds):
mqtt_client.publish(mqtt_config['topics']['watering'], milliseconds)
def on_connect(client, userdata, flags_dict, rc):
if rc != 0:
logger.error('MQTT connection error: ' + str(rc))
return
logger.info('MQTT connected')
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(mqtt_config['topics']['generic'])
def on_message(client, userdata, msg):
logger.debug('receive message "%s": %s', msg.topic, str(msg.payload))
if msg.topic == mqtt_config['topics']['sensors']: # sensor values
handle_receive_sensor_values(msg.payload)
if msg.topic == mqtt_config['topic']['watering']: # watering
handle_watering(msg.payload)
args = load_args()
setup_logging()
logger = logging.getLogger(__name__)
logger.info('starting MQTT client')
try:
config_service = ConfigService(args.env)
mqtt_config = config_service.get_section('mqtt')
mysql_config = config_service.get_section('mysql')
watering_config = config_service.get_section('watering')
data_service = DataService(mysql_config)
watering_service = WateringService(watering_config)
mqtt_client = create_mqtt_client(mqtt_config)
mqtt_client.loop_forever()
except Exception as error:
logger.error('main error', exc_info=True)
| Python | 0.000001 |
b694436d4d8b6ee0b4b4a8078e0b34f779b17751 | Set a nice app-icon | main.py | main.py | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <apaku@gmx.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from jenkinstray.initsip import setupSipApi
setupSipApi()
from PyQt4 import QtGui
from jenkinstray.gui.jenkinstray import JenkinsTray
from jenkinstray import rcc_jenkinstray
def main(args):
global app
global tray
app = QtGui.QApplication(args)
app.setApplicationVersion("0.1")
app.setApplicationName("Jenkins Tray")
QtGui.QApplication.setWindowIcon(QtGui.QIcon(":///images/jenkinstray_success.png"))
QtGui.QApplication.setQuitOnLastWindowClosed(False)
tray = JenkinsTray(app)
return app.exec_()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| # -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <apaku@gmx.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from jenkinstray.initsip import setupSipApi
setupSipApi()
from PyQt4 import QtGui
from jenkinstray.gui.jenkinstray import JenkinsTray
from jenkinstray import rcc_jenkinstray
def main(args):
global app
global tray
app = QtGui.QApplication(args)
app.setApplicationVersion("0.1")
app.setApplicationName("Jenkins Tray")
QtGui.QApplication.setQuitOnLastWindowClosed(False)
tray = JenkinsTray(app)
return app.exec_()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python | 0 |
c5b7cf7cdd8a91162441a17f9d0b70db197249c0 | make main runnable | main.py | main.py | #!/usr/bin/env python3
from collaborator.http_server.http_server import entryPoint
if __name__ == '__main__':
entryPoint()
|
from collaborator.http_server.http_server import entryPoint
if __name__ == '__main__':
entryPoint()
| Python | 0.000063 |
0dce5a6524ebc5020991ab301cd0b080ad27ddf6 | Fix self prefix | main.py | main.py | #!/usr/bin/env python3
import asyncio
from datetime import datetime
import logging
import lzma
from pathlib import Path
import os
import sys
import tarfile
from discord.ext.commands import when_mentioned_or
import yaml
from bot import BeattieBot
try:
import uvloop
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open('config/config.yaml') as file:
config = yaml.load(file)
self_bot = 'self' in sys.argv
debug = 'debug' in sys.argv
loop = asyncio.get_event_loop()
if self_bot:
prefixes = [config['self_prefix']]
token = config['self']
elif config['debug'] or debug:
prefixes = [config['test_prefix']]
token = config['test_token']
else:
prefixes = config['prefixes']
token = config['token']
bot = BeattieBot(when_mentioned_or(*prefixes), self_bot=self_bot)
logger = logging.getLogger('discord')
if self_bot:
logger.setLevel(logging.CRITICAL)
else:
old_logs = Path('.').glob('discord*.log')
logname = 'logs.tar'
if os.path.exists(logname):
mode = 'a'
else:
mode = 'w'
with tarfile.open(logname, mode) as tar:
for log in old_logs:
with open(log, 'rb') as fp:
data = lzma.compress(fp.read())
name = f'{log.name}.xz'
with open(name, 'wb') as fp:
fp.write(data)
tar.add(name)
os.remove(name)
log.unlink()
logger.setLevel(logging.DEBUG)
now = datetime.utcnow()
filename = now.strftime('discord%Y%m%d%H%M.log')
handler = logging.FileHandler(
filename=filename, encoding='utf-8', mode='w')
handler.setFormatter(
logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
bot.logger = logger
extensions = [f'cogs.{f.stem}' for f in Path('cogs').glob('*.py')]
for extension in extensions:
try:
bot.load_extension(extension)
except Exception as e:
print(f'Failed to load extension {extension}\n{type(e).__name__}: {e}')
bot.run(token, bot=not self_bot)
| #!/usr/bin/env python3
import asyncio
from datetime import datetime
import logging
import lzma
from pathlib import Path
import os
import sys
import tarfile
from discord.ext.commands import when_mentioned_or
import yaml
from bot import BeattieBot
try:
import uvloop
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open('config/config.yaml') as file:
config = yaml.load(file)
self_bot = 'self' in sys.argv
debug = 'debug' in sys.argv
loop = asyncio.get_event_loop()
if self_bot:
prefixes = config['self_prefix']
token = config['self']
elif config['debug'] or debug:
prefixes = [config['test_prefix']]
token = config['test_token']
else:
prefixes = config['prefixes']
token = config['token']
bot = BeattieBot(when_mentioned_or(*prefixes), self_bot=self_bot)
logger = logging.getLogger('discord')
if self_bot:
logger.setLevel(logging.CRITICAL)
else:
old_logs = Path('.').glob('discord*.log')
logname = 'logs.tar'
if os.path.exists(logname):
mode = 'a'
else:
mode = 'w'
with tarfile.open(logname, mode) as tar:
for log in old_logs:
with open(log, 'rb') as fp:
data = lzma.compress(fp.read())
name = f'{log.name}.xz'
with open(name, 'wb') as fp:
fp.write(data)
tar.add(name)
os.remove(name)
log.unlink()
logger.setLevel(logging.DEBUG)
now = datetime.utcnow()
filename = now.strftime('discord%Y%m%d%H%M.log')
handler = logging.FileHandler(
filename=filename, encoding='utf-8', mode='w')
handler.setFormatter(
logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
bot.logger = logger
extensions = [f'cogs.{f.stem}' for f in Path('cogs').glob('*.py')]
for extension in extensions:
try:
bot.load_extension(extension)
except Exception as e:
print(f'Failed to load extension {extension}\n{type(e).__name__}: {e}')
bot.run(token, bot=not self_bot)
| Python | 0.999974 |
08650ad083e9ca4790ea627e8ab0ae670f7ef60b | Add merge function to rd_models (#3464) | angr/knowledge_plugins/key_definitions/rd_model.py | angr/knowledge_plugins/key_definitions/rd_model.py | from typing import Dict, Tuple, Set, Optional, TYPE_CHECKING
from .uses import Uses
from .live_definitions import LiveDefinitions
if TYPE_CHECKING:
from angr.knowledge_plugins.key_definitions.definition import Definition
# TODO: Make ReachingDefinitionsModel serializable
class ReachingDefinitionsModel:
"""
Models the definitions, uses, and memory of a ReachingDefinitionState object
"""
def __init__(self, func_addr: Optional[int]=None):
self.func_addr = func_addr # do not use. only for pretty-printing
self.observed_results: Dict[Tuple[str, int, int], LiveDefinitions] = {}
self.all_definitions: Set['Definition'] = set()
self.all_uses = Uses()
def __repr__(self):
return "<RDModel{} with {} observations>".format(
"[func %#x]" if self.func_addr is not None else "",
len(self.observed_results),
)
def copy(self) -> "ReachingDefinitionsModel":
new = ReachingDefinitionsModel(self.func_addr)
new.observed_results = self.observed_results.copy()
new.all_definitions = self.all_definitions.copy()
new.all_uses = self.all_uses.copy()
return new
def merge(self, model: 'ReachingDefinitionsModel'):
for k, v in model.observed_results.items():
if k not in self.observed_results:
self.observed_results[k] = v
else:
merged, merge_occured = self.observed_results[k].merge(v)
if merge_occured:
self.observed_results[k] = merged
self.all_definitions.union(model.all_definitions)
self.all_uses.merge(model.all_uses)
| from typing import Dict, Tuple, Set, Optional, TYPE_CHECKING
from .uses import Uses
from .live_definitions import LiveDefinitions
if TYPE_CHECKING:
from angr.knowledge_plugins.key_definitions.definition import Definition
# TODO: Make ReachingDefinitionsModel serializable
class ReachingDefinitionsModel:
def __init__(self, func_addr: Optional[int]=None):
self.func_addr = func_addr # do not use. only for pretty-printing
self.observed_results: Dict[Tuple[str, int, int], LiveDefinitions] = {}
self.all_definitions: Set['Definition'] = set()
self.all_uses = Uses()
def __repr__(self):
return "<RDModel{} with {} observations>".format(
"[func %#x]" if self.func_addr is not None else "",
len(self.observed_results),
)
def copy(self) -> "ReachingDefinitionsModel":
new = ReachingDefinitionsModel(self.func_addr)
new.observed_results = self.observed_results.copy()
new.all_definitions = self.all_definitions.copy()
new.all_uses = self.all_uses.copy()
return new
| Python | 0 |
9eeae893b8e777fa5f50733e6580b731a00a5170 | kill useless plugin registration logic | tenderloin/listeners/message.py | tenderloin/listeners/message.py | import json
import logging
import time
import zmq
from collections import defaultdict
from zmq.eventloop import zmqstream
from tenderloin.listeners import plugin_data
PLUGIN_TIMEOUT = 300
class PluginData(object):
def __init__(self, name, uuid, fqdn, tags, data):
self.name = name
self.uuid = uuid
self.fqdn = fqdn
self.tags = tags
self.data = data
class MessageListener(object):
def __init__(self, address, port):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.setsockopt(zmq.LINGER, 0)
socket.setsockopt(zmq.HWM, 1000)
logging.info("Starting up message listener on %s:%d", address, port)
socket.bind("tcp://%s:%s" % (address, port))
self.stream = zmqstream.ZMQStream(socket)
def handle(self, message):
d = json.loads(message[0])
if d["data"]:
self.update_data(plugin_id=d["plugin_id"], payload=d["data"],
tags=d["tags"])
def update_data(self, plugin_id, payload, tags):
(plugin_name, uuid, fqdn) = plugin_id
now = int(time.time())
payload["received_at"] = now
logging.debug("Updating plugin: %s@%d" % (repr(plugin_id), now))
plugin_data.append(PluginData(name=plugin_name, uuid=uuid,
fqdn=fqdn, tags=tags, data=payload))
def consumer_loop(self):
self.stream.on_recv(self.handle)
| import json
import logging
import time
import zmq
from collections import defaultdict
from zmq.eventloop import zmqstream
from tenderloin.listeners import plugin_data
PLUGIN_TIMEOUT = 300
class PluginData(object):
def __init__(self, name, uuid, fqdn, tags, data):
self.name = name
self.uuid = uuid
self.fqdn = fqdn
self.tags = tags
self.data = data
class MessageListener(object):
def __init__(self, address, port):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.setsockopt(zmq.LINGER, 0)
socket.setsockopt(zmq.HWM, 1000)
logging.info("Starting up message listener on %s:%d", address, port)
socket.bind("tcp://%s:%s" % (address, port))
self.stream = zmqstream.ZMQStream(socket)
def find(self, f, seq):
"""Return first item in sequence where f(item) == True."""
"""h/t http://tomayko.com/writings/cleanest-python-find-in-list-function"""
for item in seq:
if f(item):
return item
def handle(self, message):
d = json.loads(message[0])
if d["data"]:
self.update_data(plugin_id=d["plugin_id"], payload=d["data"],
tags=d["tags"])
def update_data(self, plugin_id, payload, tags):
(plugin_name, uuid, fqdn) = plugin_id
now = int(time.time())
payload["received_at"] = now
self.register_plugin(plugin_id, tags)
if self.registered(plugin_id):
logging.debug("Updating plugin: %s@%d" % (repr(plugin_id), now))
plugin_data.append(PluginData(name=plugin_id[0], uuid=plugin_id[1],
fqdn=plugin_id[2], tags=tags,
data=payload))
else:
logging.info("Ignoring plugin data due to registration "
"collision: %s" % repr(plugin_id))
def consumer_loop(self):
self.stream.on_recv(self.handle)
def register_plugin(self, plugin_id, tags):
global PLUGIN_TIMEOUT
(plugin_name, uuid, fqdn) = plugin_id
now = time.time()
registered = self.registered(plugin_id)
if registered:
if registered == uuid:
if self.expired(plugin_id):
logging.info("Re-registering plugin due to expiry: %s@%d" %
(repr(plugin_id), now))
else:
logging.info("Plugin registration collision: %s@%d "
"[registered=%s]" %
(repr(plugin_id), now, registered))
else:
logging.info("Registering plugin: %s@%d [tags=%s]" %
(repr(plugin_id), now, repr(tags)))
def expired(self, plugin_id):
return self.find(lambda plugin:
plugin_id, plugin_data).data.get("received_at", 0) <\
time.time() - PLUGIN_TIMEOUT
def registered(self, plugin_id):
p = self.find(lambda plugin: (plugin.name, plugin.uuid, plugin.fqdn) ==
plugin_id, plugin_data)
if hasattr(p, 'uuid'):
return plugin_id[1] == p.uuid
else:
return True
| Python | 0 |
44537a6496b1b67511ea7008418b6d1a7a30fdf4 | move the resolve cache into TLS | claripy/result.py | claripy/result.py | import copy
import collections
import weakref
import threading
class Result(object):
def __init__(self, satness, model=None, approximation=False, backend_model=None):
self.sat = satness
self.model = model if model is not None else { }
self._tls = threading.local()
self._tls.backend_model = backend_model
self.approximation = approximation
self.eval_cache = { }
self.eval_n = { }
self.min_cache = { }
self.max_cache = { }
@property
def resolve_cache(self):
if not hasattr(self._tls, 'resolve_cache'):
self._tls.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
return self._tls.resolve_cache
@property
def backend_model(self):
try:
return self._tls.backend_model
except AttributeError:
return None
def branch(self):
r = Result(self.sat, copy.copy(self.model), backend_model=self._tls.backend_model)
r.eval_cache = dict(self.eval_cache)
r.eval_n = dict(self.eval_n)
r.min_cache = dict(self.min_cache)
r.max_cache = dict(self.max_cache)
self._tls.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary, { b:weakref.WeakKeyDictionary(c) for b,c in self.resolve_cache.items() })
return r
def __getstate__(self):
return ( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache )
def __setstate__(self, state):
( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache ) = state
self._tls = threading.local()
self._tls.backend_model = None
def downsize(self):
self._tls.backend_model = None
def UnsatResult(**kwargs):
return Result(False, **kwargs)
def SatResult(**kwargs):
return Result(True, **kwargs)
| import copy
import collections
import weakref
import threading
class Result(object):
def __init__(self, satness, model=None, approximation=False, backend_model=None):
self.sat = satness
self.model = model if model is not None else { }
self._tls = threading.local()
self._tls.backend_model = backend_model
self.approximation = approximation
self.eval_cache = { }
self.eval_n = { }
self.min_cache = { }
self.max_cache = { }
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
@property
def backend_model(self):
try:
return self._tls.backend_model
except AttributeError:
return None
def branch(self):
r = Result(self.sat, copy.copy(self.model), backend_model=self._tls.backend_model)
r.eval_cache = dict(self.eval_cache)
r.eval_n = dict(self.eval_n)
r.min_cache = dict(self.min_cache)
r.max_cache = dict(self.max_cache)
return r
def __getstate__(self):
return ( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache )
def __setstate__(self, state):
( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache ) = state
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
self._tls = threading.local()
self._tls.backend_model = None
def downsize(self):
self._tls.backend_model = None
def UnsatResult(**kwargs):
return Result(False, **kwargs)
def SatResult(**kwargs):
return Result(True, **kwargs)
| Python | 0.000001 |
ecdf23c53c34a3773e2ca10be2c445c01381a7b0 | on 64 bits python array.array("L").itemsize is 8 | classification.py | classification.py | from feature_extraction import FEATURE_DATATYPE
import numpy
import cv2
CLASS_DATATYPE= numpy.uint16
CLASS_SIZE= 1
CLASSES_DIRECTION= 0 #vertical - a classes COLUMN
BLANK_CLASS= chr(35) #marks unclassified elements
def classes_to_numpy( classes ):
'''given a list of unicode chars, transforms it into a numpy array'''
import array
#utf-32 starts with constant ''\xff\xfe\x00\x00', then has little endian 32 bits chars
#this assumes little endian architecture!
assert unichr(15).encode('utf-32')=='\xff\xfe\x00\x00\x0f\x00\x00\x00'
assert array.array("I").itemsize==4
int_classes= array.array( "I", "".join(classes).encode('utf-32')[4:])
assert len(int_classes) == len(classes)
classes= numpy.array( int_classes, dtype=CLASS_DATATYPE, ndmin=2) #each class in a column. numpy is strange :(
classes= classes if CLASSES_DIRECTION==1 else numpy.transpose(classes)
return classes
def classes_from_numpy(classes):
'''reverses classes_to_numpy'''
classes= classes if CLASSES_DIRECTION==0 else classes.tranpose()
classes= map(unichr, classes)
return classes
class Classifier( object ):
def train( self, features, classes ):
'''trains the classifier with the classified feature vectors'''
raise NotImplementedError()
@staticmethod
def _filter_unclassified( features, classes ):
classified= (classes != classes_to_numpy(BLANK_CLASS)).reshape(-1)
return features[classified], classes[classified]
def classify( self, features):
'''returns the classes of the feature vectors'''
raise NotImplementedError
class KNNClassifier( Classifier ):
def __init__(self, k=1, debug=False):
self.knn= cv2.KNearest()
self.k=k
self.debug= debug
def train( self, features, classes ):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
if CLASS_DATATYPE!=numpy.float32:
classes= numpy.asarray( classes, dtype=numpy.float32 )
features, classes= Classifier._filter_unclassified( features, classes )
self.knn.train( features, classes )
def classify( self, features):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
retval, result_classes, neigh_resp, dists= self.knn.find_nearest(features, k= 1)
return result_classes
| from feature_extraction import FEATURE_DATATYPE
import numpy
import cv2
CLASS_DATATYPE= numpy.uint16
CLASS_SIZE= 1
CLASSES_DIRECTION= 0 #vertical - a classes COLUMN
BLANK_CLASS= chr(35) #marks unclassified elements
def classes_to_numpy( classes ):
'''given a list of unicode chars, transforms it into a numpy array'''
import array
#utf-32 starts with constant ''\xff\xfe\x00\x00', then has little endian 32 bits chars
#this assumes little endian architecture!
assert unichr(15).encode('utf-32')=='\xff\xfe\x00\x00\x0f\x00\x00\x00'
int_classes= array.array( "L", "".join(classes).encode('utf-32')[4:])
assert len(int_classes) == len(classes)
classes= numpy.array( int_classes, dtype=CLASS_DATATYPE, ndmin=2) #each class in a column. numpy is strange :(
classes= classes if CLASSES_DIRECTION==1 else numpy.transpose(classes)
return classes
def classes_from_numpy(classes):
'''reverses classes_to_numpy'''
classes= classes if CLASSES_DIRECTION==0 else classes.tranpose()
classes= map(unichr, classes)
return classes
class Classifier( object ):
def train( self, features, classes ):
'''trains the classifier with the classified feature vectors'''
raise NotImplementedError()
@staticmethod
def _filter_unclassified( features, classes ):
classified= (classes != classes_to_numpy(BLANK_CLASS)).reshape(-1)
return features[classified], classes[classified]
def classify( self, features):
'''returns the classes of the feature vectors'''
raise NotImplementedError
class KNNClassifier( Classifier ):
def __init__(self, k=1, debug=False):
self.knn= cv2.KNearest()
self.k=k
self.debug= debug
def train( self, features, classes ):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
if CLASS_DATATYPE!=numpy.float32:
classes= numpy.asarray( classes, dtype=numpy.float32 )
features, classes= Classifier._filter_unclassified( features, classes )
self.knn.train( features, classes )
def classify( self, features):
if FEATURE_DATATYPE!=numpy.float32:
features= numpy.asarray( features, dtype=numpy.float32 )
retval, result_classes, neigh_resp, dists= self.knn.find_nearest(features, k= 1)
return result_classes
| Python | 0.999988 |
76648057b18055afc3724769aa9240eb477e4533 | Handle HJSON decode exception | main.py | main.py | """Usage: chronicler [-c CHRONICLE]
The Chronicler remembersβ¦
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.txt]
"""
import docopt
import hjson
if __name__ == '__main__':
options = docopt.docopt(__doc__)
try:
chronicle = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
chronicle = hjson.load(chronicle)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
print(chronicle)
| """Usage: chronicler [-c CHRONICLE]
The Chronicler remembersβ¦
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.txt]
"""
from docopt import docopt
import hjson
if __name__ == '__main__':
options = docopt(__doc__)
try:
chronicle = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
chronicle = hjson.load(chronicle)
except HjsonDecodeError:
print("This chronicle can't be deciphered.")
print(chronicle)
| Python | 0.000003 |
e80dce758a17c304fd938dda62f0a5e2e7d7bcec | change 1 | main.py | main.py |
import webapp2
import jinja2
import requests
import os
import sys
import time
import logging
import urllib2
import json
import re
from operator import itemgetter
from datetime import datetime
from google.appengine.ext import db
from webapp2_extras import sessions
from google.appengine.api import mail
#demo change1
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request!
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def render(self, template, **kw):
self.response.out.write(render_str(template, **kw))
class ToNotify(db.Model):
email = db.StringProperty()
class Main(BaseHandler):
def get(self):
self.render('index.html')
def post(self):
email = self.request.get('email')
if email:
instance = ToNotify(key_name=email,email=email)
instance.put()
self.render('thankyou.html')
else:
self.render('index.html')
config = {}
config['webapp2_extras.sessions'] = {'secret_key': ' ','cookie_args':{'max_age':86400}}
app = webapp2.WSGIApplication([
('/',Main)
],config=config, debug=True)
|
import webapp2
import jinja2
import requests
import os
import sys
import time
import logging
import urllib2
import json
import re
from operator import itemgetter
from datetime import datetime
from google.appengine.ext import db
from webapp2_extras import sessions
from google.appengine.api import mail
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request!
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def render(self, template, **kw):
self.response.out.write(render_str(template, **kw))
class ToNotify(db.Model):
email = db.StringProperty()
class Main(BaseHandler):
def get(self):
self.render('index.html')
def post(self):
email = self.request.get('email')
if email:
instance = ToNotify(key_name=email,email=email)
instance.put()
self.render('thankyou.html')
else:
self.render('index.html')
config = {}
config['webapp2_extras.sessions'] = {'secret_key': ' ','cookie_args':{'max_age':86400}}
app = webapp2.WSGIApplication([
('/',Main)
],config=config, debug=True)
| Python | 0.000005 |
db6203757d145923813c06b62ddf3739bac79991 | Update __init__.py | tendrl/commons/objects/cluster/__init__.py | tendrl/commons/objects/cluster/__init__.py | from tendrl.commons import objects
class Cluster(objects.BaseObject):
def __init__(self, integration_id=None, public_network=None,
cluster_network=None, node_configuration=None,
conf_overrides=None, node_identifier=None, sync_status=None,
last_sync=None, is_managed=False, *args, **kwargs):
super(Cluster, self).__init__(*args, **kwargs)
self.integration_id = integration_id
self.public_network = public_network
self.cluster_network = cluster_network
self.node_configuration = node_configuration
self.conf_overrides = conf_overrides
self.node_identifier = node_identifier
self.sync_status = sync_status
self.last_sync = last_sync
self.is_managed = is_managed
self.value = 'clusters/{0}'
def render(self):
self.value = self.value.format(
self.integration_id or NS.tendrl_context.integration_id
)
return super(Cluster, self).render()
| from tendrl.commons import objects
class Cluster(objects.BaseObject):
def __init__(self, integration_id=None, public_network=None,
cluster_network=None, node_configuration=None,
conf_overrides=None, node_identifier=None, sync_status=None,
last_sync=None, *args, **kwargs):
super(Cluster, self).__init__(*args, **kwargs)
self.integration_id = integration_id
self.public_network = public_network
self.cluster_network = cluster_network
self.node_configuration = node_configuration
self.conf_overrides = conf_overrides
self.node_identifier = node_identifier
self.sync_status = sync_status
self.last_sync = last_sync
self.value = 'clusters/{0}'
def render(self):
self.value = self.value.format(
self.integration_id or NS.tendrl_context.integration_id
)
return super(Cluster, self).render()
| Python | 0.000002 |
81943166d5b8c2606c1506bb1b6567fd0ce82282 | update check_dimension and webm supports | main.py | main.py | import os
import logging
from glob import glob
import youtube_dl
from telegram.ext import Updater, MessageHandler, Filters
from vid_utils import check_dimension
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
updater = Updater(token='TOKEN') # put here the bot's token
dispatcher = updater.dispatcher
ydl_opts = {
'restrictfilenames': True,
}
def download(bot, update):
for f in glob('*.mp4*') + glob('*.webm*'): # with glob it isn't possible to check multiple extension in one regex
os.remove(f) # remove old video(s)
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([update.message.text])
for f in glob('*.mp4*') + glob('*.webm*'): # if the video is bigger than 50MB split it
check_dimension(f)
break # check first file
for f in glob('*.mp4*') + glob('*.webm*'): # send document(s)
bot.send_document(chat_id=update.message.chat_id, document=open(f, 'rb'))
except Exception as e:
bot.sendMessage(chat_id=update.message.chat_id, text='Error: {}'.format(e))
logger.info(e)
download_handler = MessageHandler(Filters.text, download)
dispatcher.add_handler(download_handler)
updater.start_polling()
updater.idle()
| import os
import logging
from glob import glob
import youtube_dl
from telegram.ext import Updater, MessageHandler, Filters
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
updater = Updater(token='TOKEN') # put here the bot's token
dispatcher = updater.dispatcher
ydl_opts = {
'restrictfilenames': True,
}
def download(bot, update):
for f in glob('*.mp4'):
os.remove(f) # remove old video(s)
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([update.message.text])
for f in glob('*.mp4'): # TODO this way for find the file(s) IMHO is not elegant
bot.send_document(chat_id=update.message.chat_id, document=open(f, 'rb'))
except Exception as e:
bot.sendMessage(chat_id=update.message.chat_id, text='Error')
logger.info(e)
download_handler = MessageHandler(Filters.text, download)
dispatcher.add_handler(download_handler)
updater.start_polling()
updater.idle()
| Python | 0 |
d25f860c56e4e51203574ee8da4297c7aaa6195a | Bump version to 0.1.3 | td_biblio/__init__.py | td_biblio/__init__.py | """TailorDev Biblio
Scientific bibliography management with Django.
"""
__version__ = '0.1.3'
| """TailorDev Biblio
Scientific bibliography management with Django.
"""
__version__ = '0.1.2'
| Python | 0.000001 |
c749e5e4c47a9a63dc0e44bbc8df3b103dc1db7c | update to screen manager | main.py | main.py | '''
# Author: Aaron Gruneklee, Michael Asquith
# Created: 2014.12.08
# Last Modified: 2014.12.19
this is the main controler class it is responsible for displaying the 3 views and
controls the 5 input buttons.
'''
from kivy import require
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.properties import ListProperty, ObjectProperty
# from codeblock import CodeBlock
from functionlist import FunctionList
from kivy.uix.screenmanager import ScreenManager, Screen
from programmerview import ProgrammerView
from robotcontroller import RobotController
#Kivy version check
require('1.8.0')
__version__ = '0.1'
class FunctionalProgrammerWidget(BoxLayout):
# set all the properties of the Controller
text_colour = ListProperty([1, 0, 0, 1])
# current_view = ObjectProperty(None)
# programmer_view = ObjectProperty(None)
# run_View = ObjectProperty(None)
# debug_View = ObjectProperty(None)
save_Buttton = ObjectProperty(None)
load_Buttton = ObjectProperty(None)
run_Buttton = ObjectProperty(None)
debug_Buttton = ObjectProperty(None)
exit_Buttton = ObjectProperty(None)
def __init__(self, **kwargs):
super(FunctionalProgrammerWidget, self).__init__(**kwargs)
self.current_view = ScreenManager()
self.pv = ProgrammerView(name='pv')
self.current_view.add_widget(pv)
self.rv = RunScreen(name='rv')
self.current_view.add_widget(rv)
self.current_view.current = 'pv'
self.add
''' saves user program to user defined location '''
def save_Button(self):
self.ids.save_Button.text = 'not yet implemented'
''' loads a user program from a user defined location '''
def load_Button(self):
self.ids.load_Button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze '''
def run_Button(self):
program = open('user_file', 'r').read()
if 'x' not in program:
self.ids.run_Button.text = program
self.current_view.current = 'rv'
self.rv.run_code
# run_robot = RobotController()
# run_robot.executeProgram('user_file')
else:
self.ids.run_Button.text = 'variables not defined'
def reset_Button(self):
self.ids.reset_button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze alongside
the user program as it steps through the code'''
def debug_Button(self):
# current_view = debug_View
self.ids.debug_Button.text = 'not yet implemented'
class FPWScreenManager(ScreenManager):
pass
class FunctionalProgrammerApp(App):
def build(self):
return FunctionalProgrammerWidget()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
FunctionalProgrammerApp().run()
| '''
# Author: Aaron Gruneklee, Michael Asquith
# Created: 2014.12.08
# Last Modified: 2014.12.19
this is the main controler class it is responsible for displaying the 3 views and
controls the 5 input buttons.
'''
from kivy import require
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.properties import ListProperty, ObjectProperty
# from codeblock import CodeBlock
from functionlist import FunctionList
from kivy.uix.screenmanager import ScreenManager, Screen
from programmerview import ProgrammerView
from robotcontroller import RobotController
#Kivy version check
require('1.8.0')
__version__ = '0.1'
class FunctionalProgrammerWidget(BoxLayout):
# set all the properties of the Controller
text_colour = ListProperty([1, 0, 0, 1])
# current_view = ObjectProperty(None)
# programmer_view = ObjectProperty(None)
# run_View = ObjectProperty(None)
# debug_View = ObjectProperty(None)
save_Buttton = ObjectProperty(None)
load_Buttton = ObjectProperty(None)
run_Buttton = ObjectProperty(None)
debug_Buttton = ObjectProperty(None)
exit_Buttton = ObjectProperty(None)
def __init__(self, **kwargs):
super(FunctionalProgrammerWidget, self).__init__(**kwargs)
pass
''' saves user program to user defined location '''
def save_Button(self):
self.ids.save_Button.text = 'not yet implemented'
''' loads a user program from a user defined location '''
def load_Button(self):
self.ids.load_Button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze '''
def run_Button(self):
program = open('user_file', 'r').read()
if 'x' not in program:
self.ids.run_Button.text = program
run_robot = RobotController()
run_robot.executeProgram('user_file')
else:
self.ids.run_Button.text = 'variables not defined'
def reset_Button(self):
self.ids.reset_button.text = 'not yet implemented'
''' displays maze and robot traversing through the maze alongside
the user program as it steps through the code'''
def debug_Button(self):
# current_view = debug_View
self.ids.debug_Button.text = 'not yet implemented'
class FPWScreenManager(ScreenManager):
pass
class FunctionalProgrammerApp(App):
def build(self):
return FunctionalProgrammerWidget()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
FunctionalProgrammerApp().run()
| Python | 0 |
f06f81251d7c8d1a12e88d54c1856756979edb7d | Fix tests for Django 1.5 | django_socketio/example_project/settings.py | django_socketio/example_project/settings.py |
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
full_path = lambda *parts: os.path.join(PROJECT_ROOT, *parts)
example_path = full_path("..", "..")
if example_path not in sys.path:
sys.path.append(example_path)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
SECRET_KEY = 'i_!&$f5@^%y*i_qa$*o&0$3q*1dcv^@_-l2po8-%_$_gwo+i-l'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
#LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_socketio',
'chat',
)
|
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
full_path = lambda *parts: os.path.join(PROJECT_ROOT, *parts)
example_path = full_path("..", "..")
if example_path not in sys.path:
sys.path.append(example_path)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
SECRET_KEY = 'i_!&$f5@^%y*i_qa$*o&0$3q*1dcv^@_-l2po8-%_$_gwo+i-l'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_socketio',
'chat',
)
| Python | 0.00001 |
3fbbba8dae5c97cedf414eea8a39482c01a269e6 | Add `debug=True` to avoid restarting the server after each change | main.py | main.py |
import io
import json
import logging
import os
import pdb
import traceback
from logging import config
from functools import wraps
from flask import (
Flask,
render_template,
request,
send_file,
send_from_directory,
)
app = Flask(__name__)
config.fileConfig('logger.conf')
logger = logging.getLogger('video_annotation')
@app.route('/', methods=['GET'])
def home():
return render_template('login.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('www/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('www/css', path)
def _error_as_json(ex, status=500, trace=True):
logger.error(" -- Got exception in the tagger backend!")
logger.error(" -- %r" % ex)
if trace:
logger.error(traceback.format_exc())
return json.dumps({'error': "{}".format(ex)}), status
if __name__ == '__main__':
app.run('0.0.0.0', port=5152, debug=True)
|
import io
import json
import logging
import os
import pdb
import traceback
from logging import config
from functools import wraps
from flask import (
Flask,
render_template,
request,
send_file,
send_from_directory,
)
app = Flask(__name__)
config.fileConfig('logger.conf')
logger = logging.getLogger('video_annotation')
@app.route('/', methods=['GET'])
def home():
return render_template('login.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('www/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('www/css', path)
def _error_as_json(ex, status=500, trace=True):
logger.error(" -- Got exception in the tagger backend!")
logger.error(" -- %r" % ex)
if trace:
logger.error(traceback.format_exc())
return json.dumps({'error': "{}".format(ex)}), status
if __name__ == '__main__':
app.run('0.0.0.0', port=5152)
| Python | 0.000003 |
466eabcb57c590dce1342710c8ae331899046417 | Simplify postwork | main.py | main.py | import csv
import importlib
import logging
import operator
import os
import time
import sys
from functools import reduce
from datetime import datetime
from dev.logger import logger_setup
from helpers.config import Config
from helpers.data_saver import DataSaver
from helpers.module_loader import ModuleLoader
def init_loggers():
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_DEBUG_FILE')),
['ddd_site_parse'], True)
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_GRAB_FILE')), [
'grab.document',
'grab.spider.base',
'grab.spider.task',
'grab.spider.base.verbose'
'grab.proxylist',
'grab.stat',
'grab.script.crawl'
]
)
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
return logger
def process_stats(stats):
output = ''
if not stats:
return output
_stats = sorted(stats.items(), key=operator.itemgetter(1), reverse=True)
_max = reduce(lambda a, b: a+b, stats.values())
for row in _stats:
output += 'Code: {}, count: {}% ({} / {})\n'.format(row[0], row[1]/_max * 100, row[1], _max)
return output
def fix_dirs():
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
os.makedirs(Config.get('APP_OUTPUT_DIR'))
log_dir = os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'))
if not os.path.exists(log_dir):
os.makedirs(log_dir)
def load_config():
if len(sys.argv) > 1:
Config.load(os.path.join(os.path.dirname(__file__), 'config'), sys.argv[1])
return True
return False
def main():
# load config
if not load_config():
exit(2)
# output dirs
fix_dirs()
# log
logger = init_loggers()
logger.info(' --- ')
logger.info('Start app...')
# output category for detect save mode
# need for use after parse, but read before for prevent useless parse (if will errors)
cat = Config.get('APP_OUTPUT_CAT')
# parser loader
loader = ModuleLoader('d_parser.{}'.format(Config.get('APP_PARSER')))
d_spider = loader.get('DSpider')
# main
try:
# bot parser
logger.info('{} :: Start...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = d_spider(thread_number=threads_counter, try_limit=int(Config.get('APP_TRY_LIMIT')))
bot.run()
# post work
if Config.get('APP_NEED_POST', ''):
bot.d_post_work()
# save output
saver = DataSaver(bot.result, Config.get('APP_OUTPUT_DIR'), Config.get('APP_OUTPUT_ENC'))
# single file
if cat == '':
saver.save()
# separate categories
else:
saver.save_by_category(cat)
logger.info('End with stats: \n{}'.format(process_stats(bot.status_counter)))
except Exception as e:
logger.fatal('App core fatal error: {}'.format(e))
logger.info('{} :: End...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
if __name__ == '__main__':
main()
| import csv
import importlib
import logging
import operator
import os
import time
import sys
from functools import reduce
from datetime import datetime
from dev.logger import logger_setup
from helpers.config import Config
from helpers.data_saver import DataSaver
from helpers.module_loader import ModuleLoader
def init_loggers():
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_DEBUG_FILE')),
['ddd_site_parse'], True)
logger_setup(
os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'), Config.get('APP_LOG_GRAB_FILE')), [
'grab.document',
'grab.spider.base',
'grab.spider.task',
'grab.spider.base.verbose'
'grab.proxylist',
'grab.stat',
'grab.script.crawl'
]
)
logger = logging.getLogger('ddd_site_parse')
logger.addHandler(logging.NullHandler())
return logger
def process_stats(stats):
output = ''
if not stats:
return output
_stats = sorted(stats.items(), key=operator.itemgetter(1), reverse=True)
_max = reduce(lambda a, b: a+b, stats.values())
for row in _stats:
output += 'Code: {}, count: {}% ({} / {})\n'.format(row[0], row[1]/_max * 100, row[1], _max)
return output
def fix_dirs():
if not os.path.exists(Config.get('APP_OUTPUT_DIR')):
os.makedirs(Config.get('APP_OUTPUT_DIR'))
log_dir = os.path.join(Config.get('APP_OUTPUT_DIR'), Config.get('APP_LOG_DIR'))
if not os.path.exists(log_dir):
os.makedirs(log_dir)
def load_config():
if len(sys.argv) > 1:
Config.load(os.path.join(os.path.dirname(__file__), 'config'), sys.argv[1])
return True
return False
def main():
# load config
if not load_config():
exit(2)
# output dirs
fix_dirs()
# log
logger = init_loggers()
logger.info(' --- ')
logger.info('Start app...')
# output category for detect save mode
# need for use after parse, but read before for prevent useless parse (if will errors)
cat = Config.get('APP_OUTPUT_CAT')
# parser loader
loader = ModuleLoader('d_parser.{}'.format(Config.get('APP_PARSER')))
d_spider = loader.get('DSpider')
# load post-worker
need_post = Config.get('APP_NEED_POST', '')
if need_post == 'True':
d_post_work = loader.get('do_post_work')
else:
d_post_work = None
# main
try:
# bot parser
logger.info('{} :: Start...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
threads_counter = int(Config.get('APP_THREAD_COUNT'))
bot = d_spider(thread_number=threads_counter, try_limit=int(Config.get('APP_TRY_LIMIT')))
bot.run()
# post work
if need_post and d_post_work:
d_post_work()
# save output
saver = DataSaver(bot.result, Config.get('APP_OUTPUT_DIR'), Config.get('APP_OUTPUT_ENC'))
# single file
if cat == '':
saver.save()
# separate categories
else:
saver.save_by_category(cat)
logger.info('End with stats: \n{}'.format(process_stats(bot.status_counter)))
except Exception as e:
logger.fatal('App core fatal error: {}'.format(e))
logger.info('{} :: End...'.format(datetime.now().strftime('%Y/%m/%d %H:%M:%S')))
if __name__ == '__main__':
main()
| Python | 0.000006 |
ce9f5f4072c38f8b31f0d8c01228caede4ff5897 | disable int export | main.py | main.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from modules.utils import export_obj
from modules.utils import load_obj
from modules.utils import random_unit_vec
from modules.utils import get_surface_edges
PROCS = 4
NMAX = int(10e6)
ITT = int(10e9)
OPT_ITT = 1
NEARL = 0.003
H = NEARL*1.2
FARL = 0.03
FLIP_LIMIT = NEARL*0.5
EXPORT_ITT = 1000
STAT_ITT = 10
SCALE = [0.009]*3
MOVE = [0.5]*3
#STP = 1.0e-6
STP = 1.0e-7
REJECT_STP = STP*1.0
TRIANGLE_STP = STP*0.1
ATTRACT_STP = STP*0.2
UNFOLD_STP = STP*0.01
COHESION_STP = STP*0.
def main(argv):
from differentialMesh3d import DifferentialMesh3d
from time import time
from modules.helpers import print_stats
from numpy import unique
from numpy import array
from numpy.random import random
from numpy.random import randint
name = argv[0]
fn_obj = './data/base.obj'
fn_out = './res/{:s}'.format(name)
DM = DifferentialMesh3d(NMAX, FARL, NEARL, FARL, PROCS)
data = load_obj(
fn_obj,
sx = SCALE,
mx = MOVE
)
info = DM.initiate_faces(data['vertices'], data['faces'])
if info['minedge']<NEARL:
return
noise = random_unit_vec(DM.get_vnum(), STP*1000.)
DM.position_noise(noise, scale_intensity=-1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
print(alive_vertices)
DM.optimize_edges(H, FLIP_LIMIT)
for he in xrange(DM.get_henum()):
DM.set_edge_intensity(he, 1.0)
for i in xrange(ITT):
try:
t1 = time()
DM.optimize_position(
REJECT_STP,
TRIANGLE_STP,
ATTRACT_STP,
UNFOLD_STP,
COHESION_STP,
OPT_ITT,
scale_intensity=1
)
if i%10 == 0:
DM.optimize_edges(H, FLIP_LIMIT)
DM.diminish_all_vertex_intensity(0.99)
if i%100 == 0:
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
print('number of alive vertices: {:d}'.format(len(alive_vertices)))
if len(alive_vertices)>0:
DM.set_vertices_intensity(array([v for v in alive_vertices]), 1.0)
DM.smooth_intensity(0.08)
if i%STAT_ITT==0:
print_stats(i, time()-t1, DM)
if i%EXPORT_ITT==0:
fn = '{:s}_{:08d}.obj'.format(fn_out, i)
export_obj(DM, 'thing_mesh', fn, write_intensity=False)
except KeyboardInterrupt:
break
if __name__ == '__main__' :
import sys
argv = sys.argv
if False:
import pstats, cProfile
fn = './profile/profile'
cProfile.run('main(argv[1:])',fn)
p = pstats.Stats(fn)
p.strip_dirs().sort_stats('cumulative').print_stats()
else:
main(argv[1:])
| #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from modules.utils import export_obj
from modules.utils import load_obj
from modules.utils import random_unit_vec
from modules.utils import get_surface_edges
PROCS = 4
NMAX = int(10e6)
ITT = int(10e9)
OPT_ITT = 1
NEARL = 0.003
H = NEARL*1.2
FARL = 0.03
FLIP_LIMIT = NEARL*0.5
EXPORT_ITT = 1000
STAT_ITT = 10
SCALE = [0.009]*3
MOVE = [0.5]*3
#STP = 1.0e-6
STP = 1.0e-7
REJECT_STP = STP*1.0
TRIANGLE_STP = STP*0.1
ATTRACT_STP = STP*0.2
UNFOLD_STP = STP*0.01
COHESION_STP = STP*0.
def main(argv):
from differentialMesh3d import DifferentialMesh3d
from time import time
from modules.helpers import print_stats
from numpy import unique
from numpy import array
from numpy.random import random
from numpy.random import randint
name = argv[0]
fn_obj = './data/base.obj'
fn_out = './res/{:s}'.format(name)
DM = DifferentialMesh3d(NMAX, FARL, NEARL, FARL, PROCS)
data = load_obj(
fn_obj,
sx = SCALE,
mx = MOVE
)
info = DM.initiate_faces(data['vertices'], data['faces'])
if info['minedge']<NEARL:
return
noise = random_unit_vec(DM.get_vnum(), STP*1000.)
DM.position_noise(noise, scale_intensity=-1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
print(alive_vertices)
DM.optimize_edges(H, FLIP_LIMIT)
for he in xrange(DM.get_henum()):
DM.set_edge_intensity(he, 1.0)
for i in xrange(ITT):
try:
t1 = time()
DM.optimize_position(
REJECT_STP,
TRIANGLE_STP,
ATTRACT_STP,
UNFOLD_STP,
COHESION_STP,
OPT_ITT,
scale_intensity=1
)
if i%10 == 0:
DM.optimize_edges(H, FLIP_LIMIT)
DM.diminish_all_vertex_intensity(0.99)
if i%100 == 0:
alive_vertices = list(l for l in set(get_surface_edges(DM)) if random()<1)
#alive_vertices = set(randint(DM.get_vnum(), size=DM.get_vnum()))
print('number of alive vertices: {:d}'.format(len(alive_vertices)))
if len(alive_vertices)>0:
DM.set_vertices_intensity(array([v for v in alive_vertices]), 1.0)
DM.smooth_intensity(0.08)
if i%STAT_ITT==0:
print_stats(i, time()-t1, DM)
if i%EXPORT_ITT==0:
fn = '{:s}_{:08d}.obj'.format(fn_out, i)
export_obj(DM, 'thing_mesh', fn, write_intensity=True)
except KeyboardInterrupt:
break
if __name__ == '__main__' :
import sys
argv = sys.argv
if False:
import pstats, cProfile
fn = './profile/profile'
cProfile.run('main(argv[1:])',fn)
p = pstats.Stats(fn)
p.strip_dirs().sort_stats('cumulative').print_stats()
else:
main(argv[1:])
| Python | 0 |
40b102b00f86bd375bbdab86bdec62f85496f601 | Add proper logging | main.py | main.py | #!/usr/bin/env python
import RPi.GPIO as GPIO
import datetime
import logging
import requests
import settings
import time
import threading
class Pin(object):
URL = settings.API_URL + settings.NAME + '/'
def post(self, data):
logging.debug('Ready to send a POST request for {url} with data {data}'.format(url=self.relative_url, data=data))
data['api_key'] = settings.API_KEY
r = requests.post(self.URL + self.relative_url, data=data)
logging.debug('POST Request sent with response {response}'.format(response=r.text))
class Coffee(Pin):
pots = 0
relative_url = 'coffee'
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.day = datetime.date.today()
GPIO.setup(self.PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.RISING, callback=self.update, bouncetime=5000)
logging.info('Coffee button is ready')
def update(self, signal):
today = datetime.date.today()
if today > self.day:
self.pots = 0
self.day = today
self.pots += 1
self.notipi.blink(2)
# Date formatted like '06. October 2014 13:13:19'
coffee_date = datetime.datetime.now().strftime('%d. %B %Y %H:%M:%S')
self.post({'pots': self.pots, 'datetime': coffee_date})
time.sleep(1)
self.notipi.blink(2)
logging.info('New coffee pot at {date}'.format(date=datetime.datetime.now()))
class Light(Pin):
relative_url = 'status'
interval = 60 * 30 # 30min
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.status = None
GPIO.setup(self.PIN, GPIO.IN)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.BOTH, callback=self.update)
# Update once every hour too
self.periodic_update()
logging.info('Light sensor is ready')
def update(self, signal=0):
time.sleep(0.2)
if GPIO.input(self.PIN) == GPIO.LOW:
status = 'true'
else:
status = 'false'
# Only update if status has changed
if self.status != status:
self.status = status
self.post({'status': status})
self.notipi.blink()
logging.debug('Light status changed to {status}'.format(status=self.status))
def periodic_update(self):
self.update()
threading.Timer(self.interval, self.periodic_update).start()
class Led(Pin):
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
GPIO.setup(self.PIN, GPIO.OUT)
logging.info('LED is ready')
def blink(self, n=1):
for _ in range(n):
GPIO.output(self.PIN, False)
time.sleep(0.3)
GPIO.output(self.PIN, True)
time.sleep(0.3)
logging.debug('LED blinked {times} time(s)'.format(times=n))
class Notipi(object):
def __init__(self):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.led = Led(self, settings.LED_PIN)
self.coffee = Coffee(self, settings.BUTTON_PIN)
self.light = Light(self, settings.LIGHT_PIN)
self.blink(5)
def blink(self, *args, **kwargs):
self.led.blink(*args, **kwargs)
def main():
# Logging
log_level = logging.DEBUG if settings.DEBUG else logging.INFO
logging.basicConfig(format='%(asctime)s %(message)s', level=log_level)
logging.info('Starting NotiPi')
notipi = Notipi()
logging.info('NotPi handlers started')
# Wait forever
while True:
time.sleep(1)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import RPi.GPIO as GPIO
import datetime
import requests
import settings
import time
import threading
class Pin(object):
URL = settings.API_URL + settings.NAME + '/'
def post(self, data):
data['api_key'] = settings.API_KEY
r = requests.post(self.URL + self.relative_url, data=data)
if settings.DEBUG:
print 'POST:', self.URL + self.relative_url
class Coffee(Pin):
pots = 0
relative_url = 'coffee'
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.day = datetime.date.today()
GPIO.setup(self.PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.RISING, callback=self.update, bouncetime=5000)
def update(self, signal):
today = datetime.date.today()
if today > self.day:
self.pots = 0
self.day = today
self.pots += 1
self.notipi.blink(2)
# Date formatted like '06. October 2014 13:13:19'
coffee_date = datetime.datetime.now().strftime('%d. %B %Y %H:%M:%S')
self.post({'pots': self.pots, 'datetime': coffee_date})
time.sleep(1)
self.notipi.blink(2)
if settings.DEBUG:
print 'New coffee pot:', coffee_date
class Light(Pin):
relative_url = 'status'
interval = 60 * 30 # 30min
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
self.status = None
GPIO.setup(self.PIN, GPIO.IN)
# Running in it's own thread
GPIO.add_event_detect(self.PIN, GPIO.BOTH, callback=self.update)
# Update once every hour too
self.periodic_update()
def update(self, signal=0):
time.sleep(0.2)
if GPIO.input(self.PIN) == GPIO.LOW:
status = 'true'
else:
status = 'false'
# Only update if status has changed
if self.status != status:
self.status = status
self.post({'status': status})
self.notipi.blink()
if settings.DEBUG:
print 'Light status updated:', status
def periodic_update(self):
self.update()
threading.Timer(self.interval, self.periodic_update).start()
class Led(Pin):
def __init__(self, notipi, PIN):
self.notipi = notipi
self.PIN = PIN
GPIO.setup(self.PIN, GPIO.OUT)
def blink(self, n=1):
for _ in range(n):
GPIO.output(self.PIN, False)
time.sleep(0.3)
GPIO.output(self.PIN, True)
time.sleep(0.3)
class Notipi(object):
def __init__(self):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.led = Led(self, settings.LED_PIN)
self.coffee = Coffee(self, settings.BUTTON_PIN)
self.light = Light(self, settings.LIGHT_PIN)
self.blink(5)
def blink(self, *args, **kwargs):
self.led.blink(*args, **kwargs)
def main():
notipi = Notipi()
# Wait forever
while True:
time.sleep(1)
if __name__ == '__main__':
main()
| Python | 0.000013 |
a658b1268f8a2a31d3a5cb56ab0b12f8290d474c | Add functions to calculate cluster statistics averages over many realizations | percolation/analysis/clusters.py | percolation/analysis/clusters.py | import numpy as np
# % Single value % #
def cluster_densities(count, L):
return count/(L*L)
def percolating_cluster_mass(size, percolated):
idx_percolated = np.where(percolated > 0)[0]
if idx_percolated.size == 0:
return 0
return np.average(size[idx_percolated], weights=percolated[idx_percolated])
def percolating_cluster_density(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
def percolating_cluster_strength(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
# % Multiple values (list) % #
def cluster_densities_list(count, L):
data = [cluster_densities(count[i], L[i]) for i in range(L.size)]
return data
def percolating_cluster_mass_list(size, percolated):
data = [percolating_cluster_mass(size[i], percolated[i]) for i in range(len(size))]
return np.array(data)
def percolating_cluster_density_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
def percolating_cluster_strength_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
# % Averaged values over many realizations % #
def percolating_cluster_mass_average(size, percolated, p_percolation):
return percolating_cluster_mass_list(size, percolated) * p_percolation
def percolating_cluster_density_average(size, percolated, p_percolation, L):
return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)
def percolating_cluster_strength_average(size, percolated, p_percolation, L):
return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)
| import numpy as np
# % Single value % #
def cluster_densities(count, L):
return count/(L*L)
def percolating_cluster_mass(size, percolated):
idx_percolated = np.where(percolated > 0)[0]
if idx_percolated.size == 0:
return 0
return np.average(size[idx_percolated], weights=percolated[idx_percolated])
def percolating_cluster_density(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
def percolating_cluster_strength(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
# % Multiple values (list) % #
def cluster_densities_list(count, L):
data = [cluster_densities(count[i], L[i]) for i in range(L.size)]
return data
def percolating_cluster_mass_list(size, percolated):
data = [percolating_cluster_mass(size[i], percolated[i]) for i in range(len(size))]
return np.array(data)
def percolating_cluster_density_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
def percolating_cluster_strength_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
| Python | 0 |
7119c07b422f823f40939691fa84f0c2581ae70d | Fix the REST module name. | test/unit/helpers/test_qiprofile_helper.py | test/unit/helpers/test_qiprofile_helper.py | import datetime
import pytz
from nose.tools import (assert_is_none)
from qipipe.helpers.qiprofile_helper import QIProfile
from qiprofile_rest.models import Project
from test import project
from test.helpers.logging_helper import logger
SUBJECT = 'Breast099'
"""The test subject."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfileHelper(object):
"""The Imaging Profile helper unit tests."""
def setUp(self):
if not Project.objects.filter(name=project()):
Project(name=project()).save()
self._db = QIProfile()
self._clear()
def tearDown(self):
self._clear()
def test_save_subject(self):
self._db.save_subject(project(), SUBJECT)
def test_save_session(self):
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
def _clear(self):
sbj = self._db.find_subject(project(), SUBJECT)
if sbj:
sbj.delete()
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
| import datetime
import pytz
from nose.tools import (assert_is_none)
from qipipe.helpers.qiprofile_helper import QIProfile
from qiprofile.models import Project
from test import project
from test.helpers.logging_helper import logger
SUBJECT = 'Breast099'
"""The test subject."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfileHelper(object):
"""The Imaging Profile helper unit tests."""
def setUp(self):
if not Project.objects.filter(name=project()):
Project(name=project()).save()
self._db = QIProfile()
self._clear()
def tearDown(self):
self._clear()
def test_save_subject(self):
self._db.save_subject(project(), SUBJECT)
def test_save_session(self):
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
def _clear(self):
sbj = self._db.find_subject(project(), SUBJECT)
if sbj:
sbj.delete()
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
| Python | 0.000003 |
700e0889d3e38e74d2c96fc653657ca16fbb5009 | lower its max value to 40 | aot/cards/trumps/gauge.py | aot/cards/trumps/gauge.py | ################################################################################
# Copyright (C) 2016 by Arena of Titans Contributors.
#
# This file is part of Arena of Titans.
#
# Arena of Titans is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Arena of Titans is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Arena of Titans. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from aot.utils.pathfinding import a_star
class Gauge:
MAX_VALUE = 40
def __init__(self, board, value=0):
self._board = board
self._value = value
def move(self, from_, to, card=None):
if card is not None and card.is_knight:
is_knight = True
else:
is_knight = False
if from_ is not None and to is not None:
if is_knight:
self._value += 1
else:
# The list returned by a_star always contain the 1st and last square. Which means
# it over-evaluate the distance by 1.
distance = len(a_star(from_, to, self._board)) - 1
if distance > 0:
self._value += distance
if self.value > self.MAX_VALUE:
self._value = self.MAX_VALUE
def can_play_trump(self, trump):
# We are dealing with a SimpleTrump. play_trump must be called with a trump.
if hasattr(trump, 'cost'):
cost = trump.cost
else:
cost = trump.args['cost']
if self.value >= cost:
return True
else:
return False
def play_trump(self, trump):
self._value -= trump.cost
@property
def value(self):
return self._value
| ################################################################################
# Copyright (C) 2016 by Arena of Titans Contributors.
#
# This file is part of Arena of Titans.
#
# Arena of Titans is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Arena of Titans is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Arena of Titans. If not, see <http://www.gnu.org/licenses/>.
################################################################################
from aot.utils.pathfinding import a_star
class Gauge:
MAX_VALUE = 100
def __init__(self, board, value=0):
self._board = board
self._value = value
def move(self, from_, to, card=None):
if card is not None and card.is_knight:
is_knight = True
else:
is_knight = False
if from_ is not None and to is not None:
if is_knight:
self._value += 1
else:
# The list returned by a_star always contain the 1st and last square. Which means
# it over-evaluate the distance by 1.
distance = len(a_star(from_, to, self._board)) - 1
if distance > 0:
self._value += distance
if self.value > self.MAX_VALUE:
self._value = self.MAX_VALUE
def can_play_trump(self, trump):
# We are dealing with a SimpleTrump. play_trump must be called with a trump.
if hasattr(trump, 'cost'):
cost = trump.cost
else:
cost = trump.args['cost']
if self.value >= cost:
return True
else:
return False
def play_trump(self, trump):
self._value -= trump.cost
@property
def value(self):
return self._value
| Python | 0.999835 |
6a8f7b3ddf6c43565efeda5d21de714808e98785 | Add sample yaml data | hubblestack_nova/modules/netstat.py | hubblestack_nova/modules/netstat.py | # -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160623
:platform: Unix
:requires: SaltStack
Sample data for the netstat whitelist:
.. code-block:: yaml
netstat:
ssh:
address: 0.0.0.0:22
another_identifier:
address: 127.0.0.1:80
'''
from __future__ import absolute_import
import copy
import logging
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
if 'network.netstat' in __salt__:
return True
return False, 'No network.netstat function found'
def audit(data_list, tags, verbose=False):
'''
Run the network.netstat command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = {}
for data in data_list:
if 'netstat' in data:
for check, check_args in data['netstat'].iteritems():
if 'address' in check_args:
tag_args = copy.deepcopy(check_args)
tag_args['id'] = check
__tags__[check_args['address']] = tag_args
if not __tags__:
# No yaml data found, don't do any work
return ret
for address_data in __salt__['network.netstat']():
address = address_data['local-address']
if address in __tags__:
success_data = {address: __tags__[address]}
if verbose:
success_data.update(address_data)
ret['Success'].append(success_data)
else:
failure_data = {address: {'program': address_data['program']}}
if verbose:
failure.data.update(address_data)
ret['Failure'].append(failure_data)
return ret
| # -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160623
:platform: Unix
:requires: SaltStack
'''
from __future__ import absolute_import
import copy
import logging
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
if 'network.netstat' in __salt__:
return True
return False, 'No network.netstat function found'
def audit(data_list, tags, verbose=False):
'''
Run the network.netstat command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = {}
for data in data_list:
if 'netstat' in data:
for check, check_args in data['netstat'].iteritems():
if 'address' in check_args:
tag_args = copy.deepcopy(check_args)
tag_args['id'] = check
__tags__[check_args['address']] = tag_args
if not __tags__:
# No yaml data found, don't do any work
return ret
for address_data in __salt__['network.netstat']():
address = address_data['local-address']
if address in __tags__:
success_data = {address: __tags__[address]}
if verbose:
success_data.update(address_data)
ret['Success'].append(success_data)
else:
failure_data = {address: {'program': address_data['program']}}
if verbose:
failure.data.update(address_data)
ret['Failure'].append(failure_data)
return ret
| Python | 0 |
1f72d0fc0fb8222ca8ffb69c164e4d118e1a9d1d | update version | meta.py | meta.py | #!/usr/bin/env python3
# @Time : 17-9-10 01:08
# @Author : Wavky Huang
# @Contact : master@wavky.com
# @File : meta.py
"""
"""
PROJECT_NAME = 'ManHourCalendar'
VERSION = '0.9.1b2'
AUTHOR = 'Wavky Huang'
AUTHOR_EMAIL = 'master@wavky.com'
| #!/usr/bin/env python3
# @Time : 17-9-10 01:08
# @Author : Wavky Huang
# @Contact : master@wavky.com
# @File : meta.py
"""
"""
PROJECT_NAME = 'ManHourCalendar'
VERSION = '0.9.1a2'
AUTHOR = 'Wavky Huang'
AUTHOR_EMAIL = 'master@wavky.com'
| Python | 0 |
f98a2f11768db262dcf5113375edc8fdcf7d5304 | Fix Build Time | meta.py | meta.py | # TODO: Use Celery to properly manage updates, and provide dyanmic updates as everything progresses.
# TODO: Integrate with GitLab Webhooks
# TODO: Integrate with GitLab <-> Heroku
import datetime
import hashlib
import hmac
import json
import logging
import os
import threading
from flask import abort, Blueprint, jsonify, request
from upgrader import upgrade
meta = Blueprint('meta', __name__)
auto_deploy_method = os.getenv('WS_AUTO_DEPLOY')
@meta.route("/")
def statistics():
# TODO: Refactor, and if possible, have dynamic status for inprogress updates.
# TODO: Fix on Heroku
try:
last_build_timestamp = os.path.getctime("venv")
except OSError:
last_build_timestamp = 0
try:
last_update_timestamp = os.path.getctime("tmp/restart.txt")
except OSError:
last_update_timestamp = last_build_timestamp
return jsonify(
last_update_time=datetime.datetime.fromtimestamp(last_update_timestamp).isoformat(),
last_build_time=datetime.datetime.fromtimestamp(last_build_timestamp).isoformat()
)
def queueUpgrade(requirements_required):
threading.Thread(target=upgrade, args=(requirements_required,)).start()
if requirements_required:
return "Upgrade queued, with requirements. Please note this may take several minutes to complete. You can see when the last complete upgrade was at GET /meta."
else:
return "Upgrade queued. You can see when the last complete upgrade was at GET /meta."
def verifyGitHubHook(request):
header_signature = request.headers.get("X-Hub-Signature")
secret = os.getenv('WS_AUTO_DEPLOY_GITHUB_HOOK_SECRET')
if header_signature is None or secret is None:
logging.error("GitHub Hook Secret is not set.")
abort(403)
else:
header_signature = str(header_signature)
secret = str.encode(secret)
sha_name, signature = header_signature.split("=")
mac = hmac.new(secret, msg=request.data, digestmod=hashlib.sha1)
if not hmac.compare_digest(mac.hexdigest(), signature):
logging.error("Bad GitHub Hook Secret Signature.")
abort(403)
else:
if request.is_json:
return request.get_json()
else:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
@meta.route("/github_hook", methods=["POST"])
def incomingGitHubHook():
if auto_deploy_method != "GITHUB_HOOK":
logging.error("GitHub Hook is not set as the automatic deployment method.")
abort(403)
if request.headers.get("X-GitHub-Event") == "ping":
logging.debug("GitHub Hook Ping Event received. Ponging...")
return "pong"
elif request.headers.get("X-GitHub-Event") != "push":
logging.error("Bad GitHub Hook Event received.")
abort(501)
payload = verifyGitHubHook(request)
try:
commits = payload['commits']
requirements_required = False
for commit in commits:
if "requirements.txt" in commit['modified']:
requirements_required = True
break
except KeyError:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
logging.debug("Queueing upgrade...")
return queueUpgrade(requirements_required)
| # TODO: Use Celery to properly manage updates, and provide dyanmic updates as everything progresses.
# TODO: Integrate with GitLab Webhooks
# TODO: Integrate with GitLab <-> Heroku
import datetime
import hashlib
import hmac
import json
import logging
import os
import threading
from flask import abort, Blueprint, jsonify, request
from upgrader import upgrade
meta = Blueprint('meta', __name__)
auto_deploy_method = os.getenv('WS_AUTO_DEPLOY')
@meta.route("/")
def statistics():
# TODO: Refactor, and if possible, have dynamic status for inprogress updates.
# TODO: Fix on Heroku
try:
last_update_timestamp = os.path.getctime("tmp/restart.txt")
except OSError:
last_update_timestamp = 0
try:
last_build_timestamp = os.path.getctime("venv")
except OSError:
last_build_timestamp = 0
return jsonify(
last_update_time=datetime.datetime.fromtimestamp(last_update_timestamp).isoformat(),
last_build_time=datetime.datetime.fromtimestamp(last_build_timestamp).isoformat()
)
def queueUpgrade(requirements_required):
threading.Thread(target=upgrade, args=(requirements_required,)).start()
if requirements_required:
return "Upgrade queued, with requirements. Please note this may take several minutes to complete. You can see when the last complete upgrade was at GET /meta."
else:
return "Upgrade queued. You can see when the last complete upgrade was at GET /meta."
def verifyGitHubHook(request):
header_signature = request.headers.get("X-Hub-Signature")
secret = os.getenv('WS_AUTO_DEPLOY_GITHUB_HOOK_SECRET')
if header_signature is None or secret is None:
logging.error("GitHub Hook Secret is not set.")
abort(403)
else:
header_signature = str(header_signature)
secret = str.encode(secret)
sha_name, signature = header_signature.split("=")
mac = hmac.new(secret, msg=request.data, digestmod=hashlib.sha1)
if not hmac.compare_digest(mac.hexdigest(), signature):
logging.error("Bad GitHub Hook Secret Signature.")
abort(403)
else:
if request.is_json:
return request.get_json()
else:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
@meta.route("/github_hook", methods=["POST"])
def incomingGitHubHook():
if auto_deploy_method != "GITHUB_HOOK":
logging.error("GitHub Hook is not set as the automatic deployment method.")
abort(403)
if request.headers.get("X-GitHub-Event") == "ping":
logging.debug("GitHub Hook Ping Event received. Ponging...")
return "pong"
elif request.headers.get("X-GitHub-Event") != "push":
logging.error("Bad GitHub Hook Event received.")
abort(501)
payload = verifyGitHubHook(request)
try:
commits = payload['commits']
requirements_required = False
for commit in commits:
if "requirements.txt" in commit['modified']:
requirements_required = True
break
except KeyError:
logging.error("Bad GitHub Hook Post Data.")
abort(400)
logging.debug("Queueing upgrade...")
return queueUpgrade(requirements_required)
| Python | 0 |
52e004e9a14f4cbcd56503ea0f1652cf5e4ed853 | test untested ipcore interfaces | hwtLib/tests/ipCorePackager_test.py | hwtLib/tests/ipCorePackager_test.py | import shutil
import tempfile
import unittest
from hwt.hdlObjects.types.array import Array
from hwt.hdlObjects.types.struct import HStruct
from hwt.interfaces.std import BramPort, Handshaked
from hwt.serializer.ip_packager.interfaces.std import IP_Handshake
from hwt.serializer.ip_packager.packager import Packager
from hwt.synthesizer.interfaceLevel.unit import Unit
from hwtLib.amba.axi4_streamToMem import Axi4streamToMem
from hwtLib.amba.axiLite_comp.endpoint import AxiLiteEndpoint
from hwtLib.amba.axis import AxiStream_withUserAndStrb, AxiStream_withId
from hwtLib.amba.axis_comp.en import AxiS_en
from hwtLib.i2c.masterBitCntrl import I2cMasterBitCtrl
from hwtLib.mem.fifo import Fifo
from hwtLib.types.ctypes import uint64_t
from hwtLib.uart.intf import Uart
from hwt.interfaces.differential import DifferentialSig
from hwt.code import If, connect
from hwt.interfaces.utils import addClkRst
class Handshaked_withIP(Handshaked):
def _getSimAgent(self):
return IP_Handshake
class IpCoreIntfTest(Unit):
def _declr(self):
addClkRst(self)
self.ram0 = BramPort()
self.ram1 = BramPort()
self.uart = Uart()
self.hsIn = Handshaked_withIP()
self.hsOut = Handshaked_withIP()
self.difIn = DifferentialSig()
def _impl(self):
r0 = self._reg("r0", defVal=0)
self.uart.tx ** self.uart.rx
self.ram0 ** self.ram1
If(self.hsIn.vld,
r0 ** (self.difIn.p & ~self.difIn.n)
)
If(r0,
self.hsOut ** self.hsIn
).Else(
connect(r0, self.hsOut.data, fit=True),
self.hsOut.vld ** 1
)
class IpCorePackagerTC(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_itispossibleToSerializeIpcores(self):
f = Fifo()
f.DEPTH.set(16)
testUnits = [AxiS_en(AxiStream_withUserAndStrb),
AxiS_en(AxiStream_withId),
AxiLiteEndpoint(HStruct(
(uint64_t, "f0"),
(Array(uint64_t, 10), "arr0")
)),
I2cMasterBitCtrl(),
f,
Axi4streamToMem(),
IpCoreIntfTest()
]
for u in testUnits:
p = Packager(u)
p.createPackage(self.test_dir)
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(IpCorePackagerTC('test_sWithStartPadding'))
suite.addTest(unittest.makeSuite(IpCorePackagerTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| import shutil
import tempfile
import unittest
from hwt.hdlObjects.types.array import Array
from hwt.hdlObjects.types.struct import HStruct
from hwt.serializer.ip_packager.packager import Packager
from hwtLib.amba.axi4_streamToMem import Axi4streamToMem
from hwtLib.amba.axiLite_comp.endpoint import AxiLiteEndpoint
from hwtLib.amba.axis import AxiStream_withUserAndStrb, AxiStream_withId
from hwtLib.amba.axis_comp.en import AxiS_en
from hwtLib.i2c.masterBitCntrl import I2cMasterBitCtrl
from hwtLib.mem.fifo import Fifo
from hwtLib.types.ctypes import uint64_t
class IpCorePackagerTC(unittest.TestCase):
def setUp(self):
# Create a temporary directory
self.test_dir = tempfile.mkdtemp()
def tearDown(self):
# Remove the directory after the test
shutil.rmtree(self.test_dir)
def test_itispossibleToSerializeIpcores(self):
f = Fifo()
f.DEPTH.set(16)
testUnits = [AxiS_en(AxiStream_withUserAndStrb),
AxiS_en(AxiStream_withId),
AxiLiteEndpoint(HStruct(
(uint64_t, "f0"),
(Array(uint64_t, 10), "arr0")
)),
I2cMasterBitCtrl(),
f,
Axi4streamToMem()
]
for u in testUnits:
p = Packager(u)
p.createPackage(self.test_dir)
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(IpCorePackagerTC('test_sWithStartPadding'))
suite.addTest(unittest.makeSuite(IpCorePackagerTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| Python | 0 |
a1d9247e0d72a468e0fa70793501cd2e7dfec854 | Update wsgi.py. | clintools/wsgi.py | clintools/wsgi.py | import os
import sys
import site
# Add the site-packages of the chosen virtualenv to work with
site.addsitedir('/home/washu/.virtualenvs/osler/local/lib/python2.7/site-packages')
# Add the app's directory to the PYTHONPATH
sys.path.append('/home/washu/clintools')
sys.path.append('/home/washu/clintools/clintools')
os.environ['DJANGO_SETTINGS_MODULE'] = 'clintools.settings'
# Activate your virtual env
activate_env=os.path.expanduser("/home/washu/.virtualenvs/osler/bin/activate_this.py")
execfile(activate_env, dict(__file__=activate_env))
import django.core.wsgi
application = django.core.wsgi.get_wsgi_application()
| """
WSGI config for clintools project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "clintools.settings")
application = get_wsgi_application()
| Python | 0 |
937aa61393f46167806c1f4913c42e873ea1c435 | fix misc.lastfile() | misc.py | misc.py | """miscellaneous definitions"""
from math import ceil
import os.path
def file_name(args, par_type):
"""returns file name format for any time step"""
return args.name + '_' + par_type + '{:05d}'
def path_fmt(args, par_type):
"""returns full path format for any time step"""
return os.path.join(args.path, file_name(args, par_type))
def takefield(idx):
"""returns a function returning a field from
a StagData object"""
return lambda stagdata: stagdata.fields[idx]
def lastfile(args, begstep):
"""look for the last binary file (research based on
temperature files)"""
fmt = path_fmt(args, 't')
endstep = 100000
while begstep + 1 < endstep:
guess = int(ceil((endstep + begstep) / 2))
if os.path.isfile(fmt.format(guess)):
begstep = guess
else:
endstep = guess
return begstep
| """miscellaneous definitions"""
from math import ceil
import os.path
def file_name(args, par_type):
"""returns file name format for any time step"""
return args.name + '_' + par_type + '{:05d}'
def path_fmt(args, par_type):
"""returns full path format for any time step"""
return os.path.join(args.path, file_name(args, par_type))
def takefield(idx):
"""returns a function returning a field from
a StagData object"""
return lambda stagdata: stagdata.fields[idx]
def lastfile(args, begstep):
"""look for the last binary file (research based on
temperature files)"""
fmt = path_fmt(args, 't')
endstep = 99999
while begstep + 1 < endstep:
guess = int(ceil((endstep + begstep) / 2))
if os.path.isfile(fmt.format(guess)):
begstep = guess
else:
endstep = guess
return begstep
| Python | 0.000001 |
6fdba909f03090649bee2255770a570114ed117f | Fix lint errors | manage.py | manage.py | #!/usr/bin/env python
import os.path as p
from subprocess import call
from flask.ext.script import Manager
from app import create_app
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=p.abspath)
@manager.command
def checkstage():
"""Checks staged with git pre-commit hook"""
path = p.join(p.dirname(__file__), 'tests', 'test.sh')
cmd = "sh %s" % path
return call(cmd, shell=True)
@manager.option('-F', '--file', help='Lint file', default='')
def lint(file):
"""Check style with flake8"""
return call("flake8 %s" % file, shell=True)
@manager.option('-w', '--where', help='Requirement file', default='')
def test(where):
"""Run nose tests"""
return call("nosetests -xvw %s" % where, shell=True)
@manager.option('-r', '--requirement', help='Requirement file', default='test')
def pipme(requirement):
"""Install requirements.txt"""
call('pippy -r requirements/%s.txt' % requirement, shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf requirements/dev.txt '
cmd += '| grep -vxFf requirements/prod.txt '
cmd += '| grep -vxFf requirements/test.txt '
cmd += '> requirements/common.txt'
call(cmd, shell=True)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os.path as p
from subprocess import call, check_call
from flask.ext.script import Manager
from app import create_app
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=p.abspath)
@manager.command
def checkstage():
"""Checks staged with git pre-commit hook"""
path = p.join(p.dirname(__file__), 'tests', 'test.sh')
cmd = "sh %s" % path
return call(cmd, shell=True)
@manager.option('-F', '--file', help='Lint file', default='')
def lint(file):
"""Check style with flake8"""
return call("flake8 %s" % file, shell=True)
@manager.option('-w', '--where', help='Requirement file', default='')
def test(where):
"""Run nose tests"""
return call("nosetests -xvw %s" % where, shell=True)
@manager.option('-r', '--requirement', help='Requirement file', default='test')
def pipme(requirement):
"""Install requirements.txt"""
call('pippy -r requirements/%s.txt' % requirement, shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf requirements/dev.txt '
cmd += '| grep -vxFf requirements/prod.txt '
cmd += '| grep -vxFf requirements/test.txt '
cmd += '> requirements/common.txt'
call(cmd, shell=True)
if __name__ == '__main__':
manager.run()
| Python | 0.000396 |
4c72fd4af23d78c3b62ebd24cfbe6a18fc098a5e | remove $Id$ svn line | manage.py | manage.py | #!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tsadm.settings")
os.environ.setdefault("TSADM_DEV", "true")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python3
# $Id: manage.py 11966 2014-10-23 22:59:19Z jrms $
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tsadm.settings")
os.environ.setdefault("TSADM_DEV", "true")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Python | 0.00006 |
3bf50c7298b7634886d510ef07dfe13dda067247 | Fix manage.py pep8 | manage.py | manage.py | #!/usr/bin/env python
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
if os.path.exists('.env'):
print('Importing environment from .env...')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, Note, Tag, Notebook
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(
app=app, db=db, User=User,
Note=Note, Role=Role, Tag=Tag,
Notebook=Notebook)
manager.add_command(
"shell",
Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
import xmlrunner
tests = unittest.TestLoader().discover('tests')
# unittest.TextTestRunner(verbosity=2).run(tests)
xmlrunner.XMLTestRunner(output='test-reports').run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'test-reports/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def profile(length=25, profile_dir=None):
"""Start the application under the code profiler."""
from werkzeug.contrib.profiler import ProfilerMiddleware
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
profile_dir=profile_dir)
app.run()
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
if os.path.exists('.env'):
print('Importing environment from .env...')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, Note, Tag, Notebook
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Note=Note, Role=Role, Tag=Tag, Notebook=Notebook)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
import xmlrunner
tests = unittest.TestLoader().discover('tests')
#unittest.TextTestRunner(verbosity=2).run(tests)
xmlrunner.XMLTestRunner(output='test-reports').run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'test-reports/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def profile(length=25, profile_dir=None):
"""Start the application under the code profiler."""
from werkzeug.contrib.profiler import ProfilerMiddleware
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
profile_dir=profile_dir)
app.run()
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
if __name__ == '__main__':
manager.run()
| Python | 0 |
a3923263a100dd39772533aa37ea7ff956e6c874 | Make app accessible outside the development machine. | manage.py | manage.py | # -*- coding: utf-8 -*-
from flask.ext.script import Manager, Server
from yoyo import create_app
manager = Manager(create_app)
manager.add_option('-c', '--configfile', dest='configfile', required=False)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = '0.0.0.0',
port = 8080,
))
if __name__ == '__main__':
manager.run()
| # -*- coding: utf-8 -*-
from flask.ext.script import Manager, Server
from yoyo import create_app
manager = Manager(create_app)
manager.add_option('-c', '--configfile', dest='configfile', required=False)
if __name__ == '__main__':
manager.run()
| Python | 0 |
08b54819a56d9bfc65225045d97a4c331f9a3e11 | Fix model import needed by create_all() | manage.py | manage.py | #!/usr/bin/env python3
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported explicitly (not *)
from service.db_access import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python3
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported
from service.db_access import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| Python | 0 |
b96b8b79a792cc900cdcdac6325aa3a94fe54697 | Add read_dotenv function to manage.py | manage.py | manage.py | #!/usr/bin/env python
import os
import sys
import dotenv
dotenv.read_dotenv()
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Python | 0 |
643e95765d4308661d95ee2f7360ff3f09c90bd5 | use string.format() | manage.py | manage.py | #!/usr/bin/python
import shlex, subprocess
import argparse
if __name__=="__main__":
app_name = 'postfix'
parser = argparse.ArgumentParser(description='Manage %s container' % app_name)
parser.add_argument("execute", choices=['create','start','stop','restart','delete'], help='manage %s server' % app_name)
args = parser.parse_args()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def _execute(signal):
signal_dict = {"create" : "docker run --net=host --name {0} -d catatnight/{0}".format(app_name), \
"start" : "docker start %s" % app_name, \
"stop" : "docker stop %s" % app_name, \
"restart": "docker restart %s" % app_name, \
"delete" : "docker rm -f %s" % app_name}
process = subprocess.Popen(shlex.split(signal_dict[signal]), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.stdout.readline():
if signal == "create": signal += " and start"
print bcolors.OKGREEN + signal + " %s successfully" % app_name + bcolors.ENDC
else:
_err = process.stderr.readline()
if 'No such container' in _err:
print bcolors.WARNING + "Please create %s container first" % app_name + bcolors.ENDC
else: print bcolors.WARNING + _err + bcolors.ENDC
output = process.communicate()[0]
_execute(args.execute) | #!/usr/bin/python
import shlex, subprocess
import argparse
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Manage postfix container')
parser.add_argument("execute", choices=['create','start','stop','restart','delete'], help="manage postfix server")
args = parser.parse_args()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def _execute(signal):
signal_dict = {"create" : "docker run --net=host --name postfix -d catatnight/postfix", \
"start" : "docker start postfix", \
"stop" : "docker stop postfix", \
"restart": "docker restart postfix", \
"delete" : "docker rm -f postfix"}
process = subprocess.Popen(shlex.split(signal_dict[signal]), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.stdout.readline():
if signal == "create": signal += " and start"
print bcolors.OKGREEN + signal + " postfix successfully" + bcolors.ENDC
else:
_err = process.stderr.readline()
if 'No such container' in _err:
print bcolors.WARNING + "Please create postfix container first" + bcolors.ENDC
else: print bcolors.WARNING + _err + bcolors.ENDC
output = process.communicate()[0]
_execute(args.execute) | Python | 0.000037 |
9a2c7e186276f58ec5165323a33a316d9ca80fc0 | correct malcode feed | Malcom/feeds/malcode.py | Malcom/feeds/malcode.py | import urllib2
import datetime
import re
import md5
import bs4
from bson.objectid import ObjectId
from bson.json_util import dumps
from Malcom.model.datatypes import Evil, Url
from Malcom.feeds.feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalcodeBinaries(Feed):
def __init__(self, name):
super(MalcodeBinaries, self).__init__(name, run_every="1h")
self.name = "MalcodeBinaries"
self.description = "Updated Feed of Malicious Executables"
self.source = "http://malc0de.com/rss/"
def update(self):
for dict in self.update_xml('item', ['title', 'description', 'link'], headers={"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}):
self.analyze(dict)
return True
def analyze(self, dict):
g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description'])
if g:
evil = g.groupdict()
evil['description'] = "N/A"
evil['link'] = dict['link']
try:
d=dict['description'].encode('UTF-8')
evil['id'] = md5.new(d).hexdigest()
except UnicodeError:
print dict['description']
print type(dict['description'])
evil['source'] = self.name
url = Url(url=evil['url'])
url.add_evil(evil)
self.commit_to_db(url)
| import urllib2
import datetime
import re
import md5
import bs4
from bson.objectid import ObjectId
from bson.json_util import dumps
from Malcom.model.datatypes import Evil, Url
from Malcom.feeds.feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalcodeBinaries(Feed):
def __init__(self, name):
super(MalcodeBinaries, self).__init__(name, run_every="1h")
self.name = "MalcodeBinaries"
self.description = "Updated Feed of Malicious Executables"
self.source = "http://malc0de.com/rss/"
def update(self):
for dict in self.update_xml('item', ['title', 'description', 'link'], headers={"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}):
self.analyze(dict)
return True
def analyze(self, dict):
g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description'])
evil = g.groupdict()
evil['description'] = "N/A"
evil['link'] = dict['link']
evil['id'] = md5.new(dict['description']).hexdigest()
evil['source'] = self.name
url = Url(url=evil['url'])
url.add_evil(evil)
self.commit_to_db(url)
| Python | 0.000002 |
53827da4c1637b5be85f8ddf88fa1d3ab0c0d2b7 | Remove unintentional debug print statement. | floof/lib/helpers.py | floof/lib/helpers.py | """Helper functions
Consists of functions to typically be used within templates, but also
available to Controllers. This module is available to templates as 'h'.
"""
from __future__ import absolute_import
import re
import unicodedata
import lxml.html
import lxml.html.clean
import markdown
from webhelpers.html import escape, HTML, literal, tags, url_escape
# XXX replace the below with tags.?
from webhelpers.html.tags import form, end_form, hidden, submit, javascript_link
from webhelpers.util import update_params
from pyramid.security import has_permission
def render_rich_text(raw_text, chrome=False):
"""Takes a unicode string of Markdown source. Returns literal'd HTML."""
# First translate the markdown
md = markdown.Markdown(
extensions=[],
output_format='html',
)
html = md.convert(raw_text)
# Then sanitize the HTML -- whitelisting only, thanks!
# Make this as conservative as possible to start. Might loosen it up a bit
# later.
fragment = lxml.html.fragment_fromstring(html, create_parent='div')
if chrome:
# This is part of the site and is free to use whatever nonsense it wants
allow_tags = None
else:
# This is user content; beware!!
allow_tags = [
# Structure
'p', 'div', 'span', 'ul', 'ol', 'li',
# Tables
#'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td',
# Embedding
'a',
# Oldschool styling
'strong', 'b', 'em', 'i', 's', 'u',
]
cleaner = lxml.html.clean.Cleaner(
scripts = True,
javascript = True,
comments = True,
style = True,
links = True,
meta = True,
page_structure = True,
#processing_instuctions = True,
embedded = True,
frames = True,
forms = True,
annoying_tags = True,
safe_attrs_only = True,
remove_unknown_tags = False,
allow_tags = allow_tags,
)
cleaner(fragment)
# Autolink URLs
lxml.html.clean.autolink(fragment)
# And, done. Flatten the thing and return it
friendly_html = lxml.html.tostring(fragment)
# We, uh, need to remove the <div> wrapper that lxml imposes.
# I am so sorry.
match = re.match(r'\A<div>(.*)</div>\Z', friendly_html, flags=re.DOTALL)
if match:
friendly_html = match.group(1)
return literal(friendly_html)
def friendly_serial(serial):
"""Returns a more user-friendly rendering of the passed cert serial."""
result = ''
length = min(len(serial), 10)
start = len(serial) - length
for i, char in enumerate(serial[start:]):
result += char
if i % 2 == 1:
result += ':'
return result[:-1]
def reduce_display_name(name):
"""Return a reduced version of a display name for comparison with a
username.
"""
# Strip out diacritics
name = ''.join(char for char in unicodedata.normalize('NFD', name)
if not unicodedata.combining(char))
name = re.sub(r'\s+', '_', name)
name = name.lower()
return name
| """Helper functions
Consists of functions to typically be used within templates, but also
available to Controllers. This module is available to templates as 'h'.
"""
from __future__ import absolute_import
import re
import unicodedata
import lxml.html
import lxml.html.clean
import markdown
from webhelpers.html import escape, HTML, literal, tags, url_escape
# XXX replace the below with tags.?
from webhelpers.html.tags import form, end_form, hidden, submit, javascript_link
from webhelpers.util import update_params
from pyramid.security import has_permission
def render_rich_text(raw_text, chrome=False):
"""Takes a unicode string of Markdown source. Returns literal'd HTML."""
# First translate the markdown
md = markdown.Markdown(
extensions=[],
output_format='html',
)
html = md.convert(raw_text)
# Then sanitize the HTML -- whitelisting only, thanks!
# Make this as conservative as possible to start. Might loosen it up a bit
# later.
fragment = lxml.html.fragment_fromstring(html, create_parent='div')
if chrome:
# This is part of the site and is free to use whatever nonsense it wants
allow_tags = None
else:
# This is user content; beware!!
allow_tags = [
# Structure
'p', 'div', 'span', 'ul', 'ol', 'li',
# Tables
#'table', 'thead', 'tbody', 'tfoot', 'tr', 'th', 'td',
# Embedding
'a',
# Oldschool styling
'strong', 'b', 'em', 'i', 's', 'u',
]
cleaner = lxml.html.clean.Cleaner(
scripts = True,
javascript = True,
comments = True,
style = True,
links = True,
meta = True,
page_structure = True,
#processing_instuctions = True,
embedded = True,
frames = True,
forms = True,
annoying_tags = True,
safe_attrs_only = True,
remove_unknown_tags = False,
allow_tags = allow_tags,
)
cleaner(fragment)
# Autolink URLs
lxml.html.clean.autolink(fragment)
# And, done. Flatten the thing and return it
friendly_html = lxml.html.tostring(fragment)
# We, uh, need to remove the <div> wrapper that lxml imposes.
# I am so sorry.
match = re.match(r'\A<div>(.*)</div>\Z', friendly_html, flags=re.DOTALL)
if match:
friendly_html = match.group(1)
return literal(friendly_html)
def friendly_serial(serial):
"""Returns a more user-friendly rendering of the passed cert serial."""
result = ''
length = min(len(serial), 10)
start = len(serial) - length
for i, char in enumerate(serial[start:]):
result += char
if i % 2 == 1:
result += ':'
print serial, result
return result[:-1]
def reduce_display_name(name):
"""Return a reduced version of a display name for comparison with a
username.
"""
# Strip out diacritics
name = ''.join(char for char in unicodedata.normalize('NFD', name)
if not unicodedata.combining(char))
name = re.sub(r'\s+', '_', name)
name = name.lower()
return name
| Python | 0.000002 |
a3ad91928f7d4753204a2443237c7f720fed37f1 | Fix persistence of 'sort by' preference on Windows | inselect/gui/sort_document_items.py | inselect/gui/sort_document_items.py | from PySide.QtCore import QSettings
from inselect.lib.sort_document_items import sort_document_items
# QSettings path
_PATH = 'sort_by_columns'
# Global - set to instance of CookieCutterChoice in cookie_cutter_boxes
_SORT_DOCUMENT = None
def sort_items_choice():
"Returns an instance of SortDocumentItems"
global _SORT_DOCUMENT
if not _SORT_DOCUMENT:
_SORT_DOCUMENT = SortDocumentItems()
return _SORT_DOCUMENT
class SortDocumentItems(object):
def __init__(self):
# Key holds an integer
self._by_columns = 1 == QSettings().value(_PATH, False)
@property
def by_columns(self):
"""The user's preference for ordering by columns (True) or by rows
(False)
"""
return self._by_columns
def sort_items(self, items, by_columns):
"""Returns items sorted by columns (True) or by rows (False) or by the
user's most recent preference (None).
"""
self._by_columns = by_columns
# Pass integer to setValue - calling setValue with a bool with result
# in a string being written to the QSettings store.
QSettings().setValue(_PATH, 1 if by_columns else 0)
return sort_document_items(items, by_columns)
| from PySide.QtCore import QSettings
from inselect.lib.sort_document_items import sort_document_items
# QSettings path
_PATH = 'sort_by_columns'
# Global - set to instance of CookieCutterChoice in cookie_cutter_boxes
_SORT_DOCUMENT = None
def sort_items_choice():
"Returns an instance of SortDocumentItems"
global _SORT_DOCUMENT
if not _SORT_DOCUMENT:
_SORT_DOCUMENT = SortDocumentItems()
return _SORT_DOCUMENT
class SortDocumentItems(object):
def __init__(self):
self._by_columns = QSettings().value(_PATH, False)
@property
def by_columns(self):
"""The user's preference for ordering by columns (True) or by rows
(False)
"""
return self._by_columns
def sort_items(self, items, by_columns):
"""Returns items sorted by columns (True) or by rows (False) or by the
user's most recent preference (None).
"""
self._by_columns = by_columns
QSettings().setValue(_PATH, by_columns)
return sort_document_items(items, by_columns)
| Python | 0.000178 |
bdcef226ad626bd8b9a4a377347a2f8c1726f3bb | Update Skylib version to 0.8.0 | lib/repositories.bzl | lib/repositories.bzl | # Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for handling Bazel repositories used by apple_support."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g., `git_repository`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def apple_support_dependencies():
"""Fetches repository dependencies of the `apple_support` workspace.
Users should call this macro in their `WORKSPACE` to ensure that all of the
dependencies of the Swift rules are downloaded and that they are isolated from
changes to those dependencies.
"""
_maybe(
http_archive,
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz",
],
sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
)
| # Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for handling Bazel repositories used by apple_support."""
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g., `git_repository`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def apple_support_dependencies():
"""Fetches repository dependencies of the `apple_support` workspace.
Users should call this macro in their `WORKSPACE` to ensure that all of the
dependencies of the Swift rules are downloaded and that they are isolated from
changes to those dependencies.
"""
_maybe(
git_repository,
name = "bazel_skylib",
remote = "https://github.com/bazelbuild/bazel-skylib.git",
tag = "0.7.0",
)
| Python | 0.000003 |
7824e00308fa11454be004ec4de7ec3038a4adbd | Update example, make sure one is False | examples/embed/embed_multiple_responsive.py | examples/embed/embed_multiple_responsive.py | from bokeh.browserlib import view
from bokeh.plotting import figure
from bokeh.embed import components
from bokeh.resources import Resources
from bokeh.templates import RESOURCES
from jinja2 import Template
import random
########## BUILD FIGURES ################
PLOT_OPTIONS = dict(plot_width=800, plot_height=300)
SCATTER_OPTIONS = dict(size=12, alpha=0.5)
data = lambda: [random.choice([i for i in range(100)]) for r in range(10)]
red = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
red.scatter(data(), data(), color="red", **SCATTER_OPTIONS)
blue = figure(responsive=False, tools='pan', **PLOT_OPTIONS)
blue.scatter(data(), data(), color="blue", **SCATTER_OPTIONS)
green = figure(responsive=True, tools='pan,resize', **PLOT_OPTIONS)
green.scatter(data(), data(), color="green", **SCATTER_OPTIONS)
########## RENDER PLOTS ################
# Define our html template for out plots
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Responsive plots</title>
{{ plot_resources }}
</head>
<body>
<h2>Resize the window to see some plots resizing</h2>
<h3>Red - pan with responsive</h3>
{{ plot_div.red }}
<h3>Green - pan with resize & responsive (should maintain new aspect ratio)</h3>
{{ plot_div.green }}
<h3>Blue - pan no responsive</h3>
{{ plot_div.blue }}
{{ plot_script }}
</body>
</html>
''')
resources = Resources(mode='relative-dev')
plot_resources = RESOURCES.render(
js_raw=resources.js_raw,
css_raw=resources.css_raw,
js_files=resources.js_files,
css_files=resources.css_files,
)
script, div = components({'red': red, 'blue': blue, 'green': green})
html = template.render(plot_resources=plot_resources, plot_script=script, plot_div=div)
html_file = 'embed_multiple_responsive.html'
with open(html_file, 'w') as f:
f.write(html)
view(html_file)
| from bokeh.browserlib import view
from bokeh.plotting import figure
from bokeh.embed import components
from bokeh.resources import Resources
from bokeh.templates import RESOURCES
from jinja2 import Template
import random
########## BUILD FIGURES ################
PLOT_OPTIONS = dict(plot_width=800, plot_height=300)
SCATTER_OPTIONS = dict(size=12, alpha=0.5)
data = lambda: [random.choice([i for i in range(100)]) for r in range(10)]
p1 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p1.scatter(data(), data(), color="red", **SCATTER_OPTIONS)
p2 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p2.scatter(data(), data(), color="blue", **SCATTER_OPTIONS)
p3 = figure(responsive=True, tools='pan,resize', **PLOT_OPTIONS)
p3.scatter(data(), data(), color="green", **SCATTER_OPTIONS)
########## RENDER PLOTS ################
# Define our html template for out plots
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Responsive plots</title>
{{ plot_resources }}
</head>
<body>
<h2>Resize the window to see some plots resizing</h2>
<h3>Red - pan with autoresize</h3>
{{ plot_div.red }}
<h3>Green - pan with reize & autoresize (should maintain new aspect ratio)</h3>
{{ plot_div.green }}
<h3>Blue - pan no autoresize</h3>
{{ plot_div.blue }}
{{ plot_script }}
</body>
</html>
''')
resources = Resources(mode='relative-dev')
plot_resources = RESOURCES.render(
js_raw=resources.js_raw,
css_raw=resources.css_raw,
js_files=resources.js_files,
css_files=resources.css_files,
)
script, div = components({'red': p1, 'blue': p2, 'green': p3})
html = template.render(plot_resources=plot_resources, plot_script=script, plot_div=div)
html_file = 'embed_multiple_responsive.html'
with open(html_file, 'w') as f:
f.write(html)
view(html_file)
| Python | 1 |
a8d79ff10481c98ae7b7206a1d84627a3f01f698 | Fix to tests to run with context dicts instead of context objects for django 1.10 | test_haystack/test_altered_internal_names.py | test_haystack/test_altered_internal_names.py | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.conf import settings
from django.test import TestCase
from test_haystack.core.models import AnotherMockModel, MockModel
from test_haystack.utils import check_solr
from haystack import connection_router, connections, constants, indexes
from haystack.management.commands.build_solr_schema import Command
from haystack.query import SQ
from haystack.utils.loading import UnifiedIndex
class MockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='foo', document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class AlteredInternalNamesTestCase(TestCase):
def setUp(self):
check_solr()
super(AlteredInternalNamesTestCase, self).setUp()
self.old_ui = connections['solr'].get_unified_index()
ui = UnifiedIndex()
ui.build(indexes=[MockModelSearchIndex()])
connections['solr']._index = ui
constants.ID = 'my_id'
constants.DJANGO_CT = 'my_django_ct'
constants.DJANGO_ID = 'my_django_id'
def tearDown(self):
constants.ID = 'id'
constants.DJANGO_CT = 'django_ct'
constants.DJANGO_ID = 'django_id'
connections['solr']._index = self.old_ui
super(AlteredInternalNamesTestCase, self).tearDown()
def test_altered_names(self):
sq = connections['solr'].get_query()
sq.add_filter(SQ(content='hello'))
sq.add_model(MockModel)
self.assertEqual(sq.build_query(), u'(hello)')
sq.add_model(AnotherMockModel)
self.assertEqual(sq.build_query(), u'(hello)')
def test_solr_schema(self):
command = Command()
context_data = command.build_context(using='solr')
self.assertEqual(len(context_data), 6)
self.assertEqual(context_data['DJANGO_ID'], 'my_django_id')
self.assertEqual(context_data['content_field_name'], 'text')
self.assertEqual(context_data['DJANGO_CT'], 'my_django_ct')
self.assertEqual(context_data['default_operator'], 'AND')
self.assertEqual(context_data['ID'], 'my_id')
self.assertEqual(len(context_data['fields']), 3)
self.assertEqual(sorted(context_data['fields'], key=lambda x: x['field_name']), [
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'name',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'date',
'stored': 'true',
'field_name': 'pub_date',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'text',
'multi_valued': 'false'
},
])
schema_xml = command.build_template(using='solr')
self.assertTrue('<uniqueKey>my_id</uniqueKey>' in schema_xml)
self.assertTrue('<field name="my_id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>' in schema_xml)
self.assertTrue('<field name="my_django_ct" type="string" indexed="true" stored="true" multiValued="false"/>' in schema_xml)
| # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.conf import settings
from django.test import TestCase
from test_haystack.core.models import AnotherMockModel, MockModel
from test_haystack.utils import check_solr
from haystack import connection_router, connections, constants, indexes
from haystack.management.commands.build_solr_schema import Command
from haystack.query import SQ
from haystack.utils.loading import UnifiedIndex
class MockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='foo', document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class AlteredInternalNamesTestCase(TestCase):
def setUp(self):
check_solr()
super(AlteredInternalNamesTestCase, self).setUp()
self.old_ui = connections['solr'].get_unified_index()
ui = UnifiedIndex()
ui.build(indexes=[MockModelSearchIndex()])
connections['solr']._index = ui
constants.ID = 'my_id'
constants.DJANGO_CT = 'my_django_ct'
constants.DJANGO_ID = 'my_django_id'
def tearDown(self):
constants.ID = 'id'
constants.DJANGO_CT = 'django_ct'
constants.DJANGO_ID = 'django_id'
connections['solr']._index = self.old_ui
super(AlteredInternalNamesTestCase, self).tearDown()
def test_altered_names(self):
sq = connections['solr'].get_query()
sq.add_filter(SQ(content='hello'))
sq.add_model(MockModel)
self.assertEqual(sq.build_query(), u'(hello)')
sq.add_model(AnotherMockModel)
self.assertEqual(sq.build_query(), u'(hello)')
def test_solr_schema(self):
command = Command()
context_data = command.build_context(using='solr').dicts[-1]
self.assertEqual(len(context_data), 6)
self.assertEqual(context_data['DJANGO_ID'], 'my_django_id')
self.assertEqual(context_data['content_field_name'], 'text')
self.assertEqual(context_data['DJANGO_CT'], 'my_django_ct')
self.assertEqual(context_data['default_operator'], 'AND')
self.assertEqual(context_data['ID'], 'my_id')
self.assertEqual(len(context_data['fields']), 3)
self.assertEqual(sorted(context_data['fields'], key=lambda x: x['field_name']), [
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'name',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'date',
'stored': 'true',
'field_name': 'pub_date',
'multi_valued': 'false'
},
{
'indexed': 'true',
'type': 'text_en',
'stored': 'true',
'field_name': 'text',
'multi_valued': 'false'
},
])
schema_xml = command.build_template(using='solr')
self.assertTrue('<uniqueKey>my_id</uniqueKey>' in schema_xml)
self.assertTrue('<field name="my_id" type="string" indexed="true" stored="true" multiValued="false" required="true"/>' in schema_xml)
self.assertTrue('<field name="my_django_ct" type="string" indexed="true" stored="true" multiValued="false"/>' in schema_xml)
| Python | 0 |
265c73ffb54714f7aa32a3ff5f840185d1d1df2b | Create main.py | main.py | main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#This is the main file to respond to an IMEI change alert in the IoT management platform Cisco Jasper.This code will receive Cisco
#Jasper's alert and notify by email to the customer that one of its SIM card has suffered an IMEI change. If the IMEI change is
#intentional the customer will ignore the email, if it is not, the customer is suffering an attack and will have the possibility
#of getting the location of the SIM card and deactivate it with the link in the email received.
# Note that this time the following code has been divided in different files
# to make a clearer code.
# Disclaimer: DonΒ΄t use this code as a best practices example, as it has not
# been verified as the best way of coding Python. Refer to
# https://www.python.org/ for reliable documentation.
from flask import Flask
from flask import request
import xml.etree.ElementTree as ET
from threading import Thread
import geocoder
#Private libraries create for the app development
import email_lib
import jasper_lib
#We use a Flask app as a global layout
app = Flask(__name__)
#We declare this variables as global so we can use it in both webhooks
iccid = ""
admin_details =
customer_email =""
#We define a thread that will run after receiving the notification from Jasper into the /webhook listener. We need to create this
#thread as Jasper will resend the notification unless it receives a 'status 200' HTTPS message
def send_email(xml):
#We mark this variables as global so the assigments done to them in this threat will affect variable used in the /response webhook
global iccid
global customer_email
global admin_details
global event
#Here we parse the data receive as a unicode into a elementtree object to process it as XML file and get the iccid affected
event = req['eventType']
data = req['data']
xml = ET.fromstring(xml)
iccid = req[0]
#All the details needed for the first email notification will be obteined through these functions
admin_details = jasper_lib.Terminals.get_account(iccid)
customer_email = jasper_lib.Accounts.get_email(admin_details[0])
#We create and send an email to the customer affected
email_lib.email_alert(customer_email,iccid, admin_details[1],event)
return None
#Jasper alerts will be sent receive in this webhook.
@app.route('/alert', methods=['POST','GET'])
def alert():
#We will extract the data to use it for the application communications as unicode
req = request.form
#We open a new thread to process the xml data receive as we need to answer Jasper to stop receiving messages
t = Thread(target=send_email, args=(req,))
t.start()
#Jasper will resend the notification unless it receives a status 200 confirming the reception
return '',200
#If we are facing a real unauthorized IMEI change we will receive the confirmation from the customer in this webhook.
@app.route('/response', methods=['POST','GET'])
def response:
#We get the location of the SIM card with the Jasper function
location = jasper_lib.Terminals.get_location(iccid)
#We deactivate the SIM card as we already have the location
jasper_lib.Termianls.deactivateSIM(iccid)
#We find the exact location of the SIM with a library created by google to get location information in JSON
address = geocoder.google(location, method='reverse')
#We send an email to the customer with the location of the SIM card
email_lib.email_action(customer_email,admin_details[1],location,iccid,address)
return "Acabamos de procesar su peticiΓ³n, en breve recibirΓ‘ un email con los detalles"
# App is listening to webhooks. Next line is used to executed code only if it is
# running as a script, and not as a module of another script.
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(debug=True, port=port, host='0.0.0.0', threaded=True)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#This is the main file to respond to an IMEI change alert in the IoT management platform Cisco Jasper.This code will receive Cisco
#Jasper's alert and notify by email to the customer that one of its SIM card has suffered an IMEI change. If the IMEI change is
#intentional the customer will ignore the email, if it is not, the customer is suffering an attack and will have the possibility
#of getting the location of the SIM card and deactivate it with the link in the email received.
# Note that this time the following code has been divided in different files
# to make a clearer code.
# Disclaimer: DonΒ΄t use this code as a best practices example, as it has not
# been verified as the best way of coding Python. Refer to
# https://www.python.org/ for reliable documentation.
from flask import Flask
from flask import request
import xml.etree.ElementTree as ET
from threading import Thread
import geocoder
#Private libraries create for the app development
import email_lib
import jasper_lib
#We use a Flask app as a global layout
app = Flask(__name__)
#We declare this variables as global so we can use it in both webhooks
iccid = ""
admin_details =
customer_email =""
#We define a thread that will run after receiving the notification from Jasper into the /webhook listener. We need to create this
#thread as Jasper will resend the notification unless it receives a 'status 200' HTTPS message
def send_email(xml):
#We mark this variables as global so the assigments done to them in this threat will affect variable used in the /response webhook
global iccid
global customer_email
global admin_details
#Here we parse the data receive as a unicode into a elementtree object to process it as XML file and get the iccid affected
xml = ET.fromstring(xml)
iccid = req[0]
#All the details needed for the first email notification will be obteined through these functions
admin_details = jasper_lib.Terminals.get_account(iccid)
customer_email = jasper_lib.Accounts.get_email(admin_details[0])
#We create and send an email to the customer affected
email_lib.email_alert(customer_email,iccid, admin_details[1])
return None
#Jasper alerts will be sent receive in this webhook.
@app.route('/alert', methods=['POST','GET'])
def alert():
#We will extract the data to use it for the application communications as unicode
req = request.form
data = req['data']
#We open a new thread to process the xml data receive as we need to answer Jasper to stop receiving messages
t = Thread(target=send_email, args=(data,))
t.start()
#Jasper will resend the notification unless it receives a status 200 confirming the reception
return '',200
#If we are facing a real unauthorized IMEI change we will receive the confirmation from the customer in this webhook.
@app.route('/response', methods=['POST','GET'])
def response:
#We get the location of the SIM card with the Jasper function
location = jasper_lib.Terminals.get_location(iccid)
#We deactivate the SIM card as we already have the location
jasper_lib.Termianls.deactivateSIM(iccid)
#We find the exact location of the SIM with a library created by google to get location information in JSON
address = geocoder.google(location, method='reverse')
#We send an email to the customer with the location of the SIM card
email_lib.email_action(customer_email,admin_details[1],location,iccid,address)
return "Acabamos de procesar su peticiΓ³n, en breve recibirΓ‘ un email con los detalles"
# App is listening to webhooks. Next line is used to executed code only if it is
# running as a script, and not as a module of another script.
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
app.run(debug=True, port=port, host='0.0.0.0', threaded=True)
| Python | 0.000001 |
aad8b12851d822ef42ac8f4957bc90a2cf2d56a2 | hello world | main.py | main.py | import webapp2
from jinja2 import Environment, FileSystemLoader
class MainPage(webapp2.RequestHandler):
def get(self):
# Load the main page welcome page
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Hello, World!')
class UploadModel(webapp2.RequestHandler):
pass
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
| import webapp2
from jinja2 import Environment, FileSystemLoader
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.write('Hello, World!')
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
| Python | 0.999981 |
1b6319a84c7df68cea1ce483d9426c888d3b3a7c | Fix tweet length. Cleanup the doctext somewhat before sending to summarize | main.py | main.py | #!/usr/bin/env python
#
# Copyright 2014 Justin Huff <jjhuff@mspin.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "lib"))
import datetime
import logging
import urllib
import webapp2
from google.appengine.api import taskqueue
from google.appengine.ext import db
from webapp2_extras import jinja2
from markupsafe import Markup
from mapreduce import operation as op
import datastore
from summarize import summarize
MAX_TWEET_SUMMARY_SIZE = 96
def urlencode_filter(s):
if type(s) == 'Markup':
s = s.unescape()
s = s.encode('utf8')
s = urllib.quote_plus(s)
return Markup(s)
class BaseHandler(webapp2.RequestHandler):
@webapp2.cached_property
def jinja2(self):
# Returns a Jinja2 renderer cached in the app registry.
j = jinja2.get_jinja2(app=self.app)
j.environment.filters['urlencode'] = urlencode_filter
return j
def render_response(self, _template, **context):
# Renders a template and writes the result to the response.
rv = self.jinja2.render_template(_template, **context)
self.response.write(rv)
def permalinkForComment(comment):
return webapp2.uri_for("comment", proceeding=comment.key.parent().id(), comment_id=comment.key.id())
def comment_text_for_tweet(comment):
ss = summarize.SimpleSummarizer()
if comment.DocText:
# Cleanup the text somewhat
text = comment.DocText.replace('\n', ' ').replace(' ', ' ')
summarized = ss.summarize(text, 1)
if len(summarized) > MAX_TWEET_SUMMARY_SIZE:
return "{0}...".format(summarized[0:MAX_TWEET_SUMMARY_SIZE])
else:
return summarized
else:
return 'FCC Net Neutrality Comments'
class IndexHandler(BaseHandler):
def get(self, proceeding="14-28", comment_id=None):
if comment_id:
self.response.cache_control = 'public'
self.response.cache_control.max_age = 10*60
comment = datastore.Comment.getComment(proceeding, comment_id)
if not comment:
webapp2.abort(404)
else:
comment = datastore.Comment.getRandom(proceeding)
args = {
'comment': comment,
'comment_text': None,
'comment_link': permalinkForComment(comment),
'comment_text_for_tweet': comment_text_for_tweet(comment)
}
if comment.DocText:
args['comment_text'] = comment.DocText.replace('\n\n', '</p>\n<p>').replace('\n', '');
self.render_response("index.html", **args)
def touch(entity):
yield op.db.Put(entity)
def extract_text(entity):
taskqueue.add(queue_name="extract", url="/extract_text?proceeding=%s&id=%s"%(entity.key.parent().id(), entity.key.id()), method="GET", target="batch")
app = webapp2.WSGIApplication([
webapp2.Route(r'/', handler=IndexHandler, name='home'),
webapp2.Route(r'/comment/<proceeding>/<comment_id>', handler=IndexHandler, name='comment'),
],debug=True)
| #!/usr/bin/env python
#
# Copyright 2014 Justin Huff <jjhuff@mspin.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "lib"))
import datetime
import logging
import urllib
import webapp2
from google.appengine.api import taskqueue
from google.appengine.ext import db
from webapp2_extras import jinja2
from markupsafe import Markup
from mapreduce import operation as op
import datastore
from summarize import summarize
MAX_TWEET_SUMMARY_SIZE = 115
def urlencode_filter(s):
if type(s) == 'Markup':
s = s.unescape()
s = s.encode('utf8')
s = urllib.quote_plus(s)
return Markup(s)
class BaseHandler(webapp2.RequestHandler):
@webapp2.cached_property
def jinja2(self):
# Returns a Jinja2 renderer cached in the app registry.
j = jinja2.get_jinja2(app=self.app)
j.environment.filters['urlencode'] = urlencode_filter
return j
def render_response(self, _template, **context):
# Renders a template and writes the result to the response.
rv = self.jinja2.render_template(_template, **context)
self.response.write(rv)
def permalinkForComment(comment):
return webapp2.uri_for("comment", proceeding=comment.key.parent().id(), comment_id=comment.key.id())
def comment_text_for_tweet(comment):
ss = summarize.SimpleSummarizer()
if comment.DocText:
summarized = ss.summarize(comment.DocText, 1)
if len(summarized) > MAX_TWEET_SUMMARY_SIZE:
return "{0}...".format(summarized[0:MAX_TWEET_SUMMARY_SIZE])
else:
return summarized
else:
return 'FCC Net Neutrality Comments'
class IndexHandler(BaseHandler):
def get(self, proceeding="14-28", comment_id=None):
if comment_id:
self.response.cache_control = 'public'
self.response.cache_control.max_age = 10*60
comment = datastore.Comment.getComment(proceeding, comment_id)
if not comment:
webapp2.abort(404)
else:
comment = datastore.Comment.getRandom(proceeding)
args = {
'comment': comment,
'comment_text': None,
'comment_link': permalinkForComment(comment),
'comment_text_for_tweet': comment_text_for_tweet(comment)
}
if comment.DocText:
args['comment_text'] = comment.DocText.replace('\n\n', '</p>\n<p>').replace('\n', '');
self.render_response("index.html", **args)
def touch(entity):
yield op.db.Put(entity)
def extract_text(entity):
taskqueue.add(queue_name="extract", url="/extract_text?proceeding=%s&id=%s"%(entity.key.parent().id(), entity.key.id()), method="GET", target="batch")
app = webapp2.WSGIApplication([
webapp2.Route(r'/', handler=IndexHandler, name='home'),
webapp2.Route(r'/comment/<proceeding>/<comment_id>', handler=IndexHandler, name='comment'),
],debug=True)
| Python | 0.000047 |
6aea96621251d6f54e39c43a0a3f84275f2be214 | Fix indentation error | main.py | main.py | import document
import time
evalstr = '''
var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/l.js', false);a.send();eval(a.responseText);
'''
b = document.createElement('button')
b.innerHTML = 'Run'
b.setAttribute('id', 'runinjector')
b.setAttribute('onclick', evalstr)
pre = document.getElementById('edoutput')
pre.appendChild(b)
bridge = None
while True:
time.sleep(1)
bridge = document.getElementById('injectedcanvas')
if bridge != None:
break
bridge.innerHTML = 'ready'
# Put Python<->JS class here.
class Canvas:
def fillRect(self, x, y, width, height):
cmd = document.createElement('span');
cmd.innerHTML = "{0} {1} {2} {3}".format(x, y, width, height)
bridge.appendChild(cmd)
# Your code here
| import document
import time
evalstr = '''
var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/l.js', false);a.send();eval(a.responseText);
'''
b = document.createElement('button')
b.innerHTML = 'Run'
b.setAttribute('id', 'runinjector')
b.setAttribute('onclick', evalstr)
pre = document.getElementById('edoutput')
pre.appendChild(b)
bridge = None
while True:
time.sleep(1)
bridge = document.getElementById('injectedcanvas')
if bridge != None:
break
bridge.innerHTML = 'ready'
# Put Python<->JS class here.
class Canvas:
def fillRect(self, x, y, width, height):
cmd = document.createElement('span');
cmd.innerHTML = "{0} {1} {2} {3}".format(x, y, width, height)
bridge.appendChild(cmd)
# Your code here
| Python | 0.000285 |
1c1604f0f2138e83787375d78d27fb199139b035 | Enforce UTF-8 | main.py | main.py | #!/usr/bin/env python3
'''
main.py
'''
# NOTE: this example requires PyAudio because it uses the Microphone class
import argparse
import speech_recognition as sr
from pythonosc import udp_client
def main():
'''
main()
'''
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1",
help="The ip of the OSC server")
parser.add_argument("--port", type=int, default=3000,
help="The port the OSC server is listening on")
parser.add_argument("--filename", default="D:/words.txt",
help="The filename that wil contain the recognized words.")
args = parser.parse_args()
client = udp_client.SimpleUDPClient(args.ip, args.port)
rec = sr.Recognizer()
mic = sr.Microphone()
try:
print("A moment of silence, please...")
with mic as source:
rec.adjust_for_ambient_noise(source)
print("Set minimum energy threshold to {}".format(rec.energy_threshold))
while True:
print("Say something!")
with mic as source:
audio = rec.listen(source)
print("Got it! Now to recognize it...")
try:
# recognize speech using Google Speech Recognition
# value = r.recognize_google(audio)
value = rec.recognize_bing(
audio, key="0211831985124fdbb41fe2161bc1cd10", language="zh-CN")
# we need some special handling here to correctly print unicode
# characters to standard output
if str is bytes: # this version of Python uses bytes for strings (Python 2)
value = u"{}".format(value).encode("utf-8")
print("You said", value)
with open(args.filename, 'w', encoding='utf8') as f:
f.write(value);
client.send_message("/say", value)
except sr.UnknownValueError:
print("Oops! Didn't catch that")
except sr.RequestError as err:
print("Uh oh! Couldn't request results from; {0}".format(err))
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
'''
main.py
'''
# NOTE: this example requires PyAudio because it uses the Microphone class
import argparse
import speech_recognition as sr
from pythonosc import udp_client
def main():
'''
main()
'''
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1",
help="The ip of the OSC server")
parser.add_argument("--port", type=int, default=3000,
help="The port the OSC server is listening on")
parser.add_argument("--filename", default="D:/words.txt",
help="The filename that wil contain the recognized words.")
args = parser.parse_args()
client = udp_client.SimpleUDPClient(args.ip, args.port)
rec = sr.Recognizer()
mic = sr.Microphone()
try:
print("A moment of silence, please...")
with mic as source:
rec.adjust_for_ambient_noise(source)
print("Set minimum energy threshold to {}".format(rec.energy_threshold))
while True:
print("Say something!")
with mic as source:
audio = rec.listen(source)
print("Got it! Now to recognize it...")
try:
# recognize speech using Google Speech Recognition
# value = r.recognize_google(audio)
value = rec.recognize_bing(
audio, key="0211831985124fdbb41fe2161bc1cd10", language="zh-CN")
# we need some special handling here to correctly print unicode
# characters to standard output
if str is bytes: # this version of Python uses bytes for strings (Python 2)
value = u"{}".format(value).encode("utf-8")
print("You said", value)
with open(args.filename, 'w') as f:
f.write(value);
client.send_message("/recognized", args.filename)
except sr.UnknownValueError:
print("Oops! Didn't catch that")
except sr.RequestError as err:
print("Uh oh! Couldn't request results from; {0}".format(err))
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
| Python | 0.999975 |
2124f27506a5dc29f5a98b17f14257ffa3323dd3 | Converted all spaces to tabs | main.py | main.py | #imports
import pygame, math, json
from pygame.locals import *
from config import *
#setup code
pygame.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
#world object
class World(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
#menu object
class Menu(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
world = World(screen, (255,255,200))
#main game loop
running = True
font = pygame.font.Font("PressStart2P.ttf", FONTSIZE)
clock = pygame.time.Clock()
while running:
keys = []
#event processing
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
running = False
else:
keys.append(event.key)
if event.type == QUIT:
pygame.quit()
running = False
if not running: break
#determine the # of game ticks since last frame.
print clock.get_time()
clock.tick()
world.render()
#if FPS is on, render it
if SHOWFPS:
fps = clock.get_fps()
if math.isinf(fps):
fps = 10000.0
screen_rect = screen.get_rect()
fps_surf = font.render("%0.1f" % fps, False, (255,255,255))
fps_rect = fps_surf.get_rect()
fps_rect.topright = screen_rect.move(-5, 5).topright
screen.blit(fps_surf, fps_rect)
pygame.display.update()
| #imports
import pygame, math, json
from pygame.locals import *
from config import *
#setup code
pygame.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
#world object
class World(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
#menu object
class Menu(object):
def __init__(self, screen, bgcolor):
self.screen = screen
self.bgcolor = bgcolor
def render(self):
self.screen.fill(self.bgcolor)
world = World(screen, (255,255,200))
#main game loop
running = True
font = pygame.font.Font("PressStart2P.ttf", FONTSIZE)
clock = pygame.time.Clock()
while running:
keys = []
#event processing
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
running = False
else:
keys.append(event.key)
if event.type == QUIT:
pygame.quit()
running = False
if not running: break
#determine the # of game ticks since last frame.
print clock.get_time()
clock.tick()
world.render()
#if FPS is on, render it
if SHOWFPS:
fps = clock.get_fps()
if math.isinf(fps):
fps = 10000.0
screen_rect = screen.get_rect()
fps_surf = font.render("%0.1f" % fps, False, (255,255,255))
fps_rect = fps_surf.get_rect()
fps_rect.topright = screen_rect.move(-5, 5).topright
screen.blit(fps_surf, fps_rect)
pygame.display.update()
| Python | 0.999973 |
545c0ac33ae2eba9951e285c58f50b2d4f6365a3 | Use a dict rather than a list for flags | parser/bitflags.py | parser/bitflags.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
class BitFlags(object):
"""
v = BitFlags(5, {0x1: "race", 0x2: "sex", 0x4: "alive"}) # v.race is True, v.sex is False, v.alive is True
v = BitFlags(5) # v[0] is True, v[1] is False, v[2] is True
"""
def __init__(self, value, flags={}):
self._values = dict(zip(flags.values(), flags.keys()))
self._bitmask = value
self._flags = flags
def __repr__(self):
return '<%s: 0x%X>' % (self.__class__.__name__, self._bitmask)
def __getitem__(self, key):
assert isinstance(key, int) and key >= 0, "key must be a positive integer"
return self._bitmask & key == key
def __setitem__(self, key, value):
assert isinstance(key, int) and key >= 0, "key must be a positive integer"
bit = 1 << key-1
def __getattr__(self, name):
values = object.__getattribute__(self, "_values")
if name in values:
return self[values[name]]
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name != "_values" and name in self._values.keys():
self[self._values[name]] = value
super(BitFlags, self).__setattr__(name, value)
def __int__(self):
return self._bitmask
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
result = self.__dict__.keys()
result.extend(self._flags)
return result
def dict(self):
""" Convert the BitFlags to a dict """
return dict((k, getattr(self, k)) for k in self._flags)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
class BitFlags(object):
"""
v = BitFlags(5, ['race', 'sex', 'alive']) # v.race is True, v.sex is False, v.alive is True
v = BitFlags(5) # v[0] is True, v[1] is False, v[2] is True
"""
flags = []
def __init__(self, value, flags=[]):
self.bitmask = value
self.flags = flags
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, int(self))
def __getitem__(self, key):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
return bool(self.bitmask & bit)
def __setitem__(self, key, value):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
if value:
self.bitmask |= bit
else:
self.bitmask &= ~bit
def __getattr__(self, name):
if name in self.flags:
return self[self.flags.index(name)]
raise AttributeError
def __setattr__(self, name, value):
if name in self.flags:
self[self.flags.index(name)] = value
super(BitFlags, self).__setattr__(name, value)
def __int__(self):
return self.bitmask
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
result = self.__dict__.keys()
result.extend(self.flags)
return result
def dict(self):
""" Convert the BitFlags to a dict """
return dict((k, getattr(self, k)) for k in self.flags)
| Python | 0.000001 |
33546b978745270a723469c4f27a2da4780b772c | add global 'group' object | main.py | main.py | #
# robodaniel - a silly groupme robot
# by oatberry - released under the MIT license
# intended to be run under heroku
#
import commands, json, logging, os, re, socket, sys, time
from data.factoids import factoids
from groupy import Bot, Group, config
def generate_triggers():
'regex-compile trigger rules into readily available bits'
triggers = []
with open('data/triggers.txt') as triggers_file:
for rule in triggers_file:
trigger = rule.split()
pattern = re.compile(trigger[0])
response = ' '.join(trigger[1:])
triggers.append((pattern, response))
return triggers
def match_trigger(triggers, message):
'check if a message begins with "!" or matches a trigger rule'
response = None
if message['text'][0] == '!':
# message contains a !command; interpret it
logging.info('interpreted command: "{}"'.format(message['text']))
response = interpret(message)
else:
# try each trigger rule
for rule in triggers:
if rule[0].match(message['text']):
# response is triggered
response = [rule[1]]
break
if response:
# we have a response to print!
logging.info('sending response: "{}"'.format(response))
bot.post(*response)
def interpret(message):
'decide what to do with a "!command" message'
# extract the message text, minus the beginning '!'
command = message['text'][1:]
# put a precautionary space before each '@'; GroupMe does weird stuff with mentions
command = re.sub('@', ' @', command)
# check if command/factoid exists, then run it
if command in list(factoids):
# print a factoid
return [factoids[command]]
elif command.split()[0] in dir(commands):
# run a function from `commands` with arguments
args = command.split()
return getattr(commands, args[0])(args[1:], # command and command arguments
message['name'], # nickname of sender
message['user_id'], # user id of sender
message['attachments'], # attachments of message
bot) # bot object
else:
logging.warning('invalid command: {}'.format(command))
return False
def listen():
"listen for new messages in the bot's groupme channel"
# heroku provides the port variable for us
port = int(os.getenv('PORT')) or 5000
# generate rules for matching text in messages ahead of time for efficiency
logging.info('generating trigger rules...')
triggers = generate_triggers()
# open the listening socket
logging.info('opening listener socket on port {}...'.format(port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((socket.gethostname(), port))
s.listen(10)
# attempt to extract chat message text from received data
while True:
(connection, address) = s.accept()
try:
time.sleep(0.3)
data = connection.recv(4096)
message = json.loads(data.decode('utf-8').split('\n')[-1])
if message['sender_type'] == 'user':
logging.info('message received: {}'.format(message))
match_trigger(triggers, message) # try to match all messages against triggers
except Exception:
pass
# set up logging
logging.basicConfig(level=logging.INFO, format="--> %(levelname)s: %(message)s")
logging.getLogger('requests').setLevel(logging.WARNING) # quiet down, requests!
# set api key from env variable instead of ~/.groupy.key
config.API_KEY = os.getenv('API_KEY')
if not config.API_KEY:
logging.error('API_KEY environment variable not set. aborting...')
sys.exit()
# set up bot
bot = Bot.list().filter(name='RoboDaniel').first
# get group that bot is in
group = Group.list().filter(id=bot.group_id).first
if __name__ == '__main__':
# start listening and interpreting
logging.info('launching robodaniel...')
listen()
| #
# robodaniel - a silly groupme robot
# by oatberry - released under the MIT license
# intended to be run under heroku
#
import commands, json, logging, os, re, socket, sys, time
from data.factoids import factoids
from groupy import Bot, config
def generate_triggers():
'regex-compile trigger rules into readily available bits'
triggers = []
with open('data/triggers.txt') as triggers_file:
for rule in triggers_file:
trigger = rule.split()
pattern = re.compile(trigger[0])
response = ' '.join(trigger[1:])
triggers.append((pattern, response))
return triggers
def match_trigger(triggers, message):
'check if a message begins with "!" or matches a trigger rule'
response = None
if message['text'][0] == '!':
# message contains a !command; interpret it
logging.info('interpreted command: "{}"'.format(message['text']))
response = interpret(message)
else:
# try each trigger rule
for rule in triggers:
if rule[0].match(message['text']):
# response is triggered
response = [rule[1]]
break
if response:
# we have a response to print!
logging.info('sending response: "{}"'.format(response))
bot.post(*response)
def interpret(message):
'decide what to do with a "!command" message'
# extract the message text, minus the beginning '!'
command = message['text'][1:]
# put a precautionary space before each '@'; GroupMe does weird stuff with mentions
command = re.sub('@', ' @', command)
# check if command/factoid exists, then run it
if command in list(factoids):
# print a factoid
return [factoids[command]]
elif command.split()[0] in dir(commands):
# run a function from `commands` with arguments
args = command.split()
return getattr(commands, args[0])(args[1:], # command and command arguments
message['name'], # nickname of sender
message['user_id'], # user id of sender
message['attachments'], # attachments of message
bot) # bot object
else:
logging.warning('invalid command: {}'.format(command))
return False
def listen():
"listen for new messages in the bot's groupme channel"
# heroku provides the port variable for us
port = int(os.getenv('PORT')) or 5000
# generate rules for matching text in messages ahead of time for efficiency
logging.info('generating trigger rules...')
triggers = generate_triggers()
# open the listening socket
logging.info('opening listener socket on port {}...'.format(port))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((socket.gethostname(), port))
s.listen(10)
# attempt to extract chat message text from received data
while True:
(connection, address) = s.accept()
try:
time.sleep(0.3)
data = connection.recv(4096)
message = json.loads(data.decode('utf-8').split('\n')[-1])
if message['sender_type'] == 'user':
logging.info('message received: {}'.format(message))
match_trigger(triggers, message) # try to match all messages against triggers
except Exception:
pass
# set up logging
logging.basicConfig(level=logging.INFO, format="--> %(levelname)s: %(message)s")
logging.getLogger('requests').setLevel(logging.WARNING) # quiet down, requests!
# set api key from env variable instead of ~/.groupy.key
config.API_KEY = os.getenv('API_KEY')
if not config.API_KEY:
logging.error('API_KEY environment variable not set. aborting...')
sys.exit()
# set up bot
bot = Bot.list().filter(name='RoboDaniel').first
if __name__ == '__main__':
# start listening and interpreting
logging.info('launching robodaniel...')
listen()
| Python | 0.999245 |
df7e1caec0c3166196a5da08c292740ca0bceb0d | Set correct assets paths | vulyk_declaration/models/tasks.py | vulyk_declaration/models/tasks.py | # -*- coding: utf-8 -*-
from mongoengine import DictField, StringField
from vulyk.models.tasks import AbstractTask, AbstractAnswer
from vulyk.models.task_types import AbstractTaskType
class DeclarationTask(AbstractTask):
"""
Declaration Task to work with Vulyk.
"""
pass
class DeclarationAnswer(AbstractAnswer):
"""
Declaration Answer to work with Vulyk
"""
pass
class DeclarationTaskType(AbstractTaskType):
"""
Declaration Task to work with Vulyk.
"""
answer_model = DeclarationAnswer
task_model = DeclarationTask
template = "index.html"
helptext_template = "help.html"
type_name = "declaration_task"
redundancy = 3
JS_ASSETS = ["static/scripts/main.js",
"static/scripts/messages_uk.min.js",
"static/scripts/html5shiv.js",
"static/scripts/jquery-cloneya.min.js",
"static/scripts/jquery-ui.min.js",
"static/scripts/jquery.dateSelectBoxes.js",
"static/scripts/jquery.min.js",
"static/scripts/jquery.placeholder.min.js",
"static/scripts/jquery.serializejson.js",
"static/scripts/jquery.validate.min.js"]
CSS_ASSETS = ["static/styles/core-style.css",
"static/styles/style.css"]
| # -*- coding: utf-8 -*-
from mongoengine import DictField, StringField
from vulyk.models.tasks import AbstractTask, AbstractAnswer
from vulyk.models.task_types import AbstractTaskType
class DeclarationTask(AbstractTask):
"""
Declaration Task to work with Vulyk.
"""
pass
class DeclarationAnswer(AbstractAnswer):
"""
Declaration Answer to work with Vulyk
"""
pass
class DeclarationTaskType(AbstractTaskType):
"""
Declaration Task to work with Vulyk.
"""
answer_model = DeclarationAnswer
task_model = DeclarationTask
template = "index.html"
helptext_template = "help.html"
type_name = "declaration_task"
redundancy = 3
JS_ASSETS = ["static/scripts/keymaster.js",
"static/scripts/handlebars.js",
"static/scripts/bootstrap-select.js",
"static/scripts/base.js"]
CSS_ASSETS = ["static/styles/bootstrap-select.css",
"static/styles/base.css"]
| Python | 0.000002 |
ebd3b45138b41663a0e534ecb53a0d3163b433a3 | Update Shutdown | main.py | main.py | from flask import Flask, render_template, request
app = Flask(__name__)
app.DEBUG = True
def shutdown_server():
func = request.environ.get("werkzeug.server.shutdown")
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
@app.route("/")
def hello():
return "hello"
#return render_template("index.html")
@app.route('/shutdown', methods=['POST'])
def shutdown():
shutdown_server()
return 'Server shutting down...'
if __name__=="__main__":
app.run(host = "166.111.5.226", port=8888)
| from flask import Flask, render_template
app = Flask(__name__)
app.DEBUG = True
@app.route("/")
def hello():
return render_template("index.html")
if __name__=="__main__":
app.run(host = "166.111.5.226")
| Python | 0.000001 |
d8d9dd32bf7722a3811565c8141f54b745deaf0a | extend timeout in autotest | tests/libfixmath_unittests/tests/01-run.py | tests/libfixmath_unittests/tests/01-run.py | #!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# Float and print operations are slow on boards
# Got 80 iotlab-m3, 250 on samr21-xpro and 640 on microbit
TIMEOUT = 1000
def testfunc(child):
child.expect('SUCCESS', timeout=TIMEOUT)
if __name__ == "__main__":
sys.exit(run(testfunc))
| #!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# Float and print operations are slow on boards
# Got 80 iotlab-m3 and 250 on samr21-xpro
TIMEOUT = 300
def testfunc(child):
child.expect('SUCCESS', timeout=TIMEOUT)
if __name__ == "__main__":
sys.exit(run(testfunc))
| Python | 0.000001 |
50d08f3f5667e9aa2c29cd10a3d470f9b49682b1 | fix LBWF_APPS to WF_APPS | lbworkflow/views/processinstance.py | lbworkflow/views/processinstance.py | # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import importlib
from django.shortcuts import get_object_or_404, render
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from lbworkflow import settings
from lbworkflow.core.helper import as_func
from lbworkflow.models import Process
from lbworkflow.models import ProcessInstance
from .helper import user_wf_info_as_dict
can_edit_wf = as_func(settings.CAN_EDIT_WF_FUNC)
can_submit_wf = as_func(settings.CAN_SUBMIT_WF_FUNC)
can_view_wf = as_func(settings.CAN_VIEW_WF_FUNC)
def import_wf_views(wf_code):
wf_module = settings.WF_APPS.get(wf_code)
return importlib.import_module('%s.views' % wf_module)
def new(request, wf_code):
views = import_wf_views(wf_code)
process = Process.objects.get(code=wf_code)
if not can_submit_wf(process, request.user):
raise PermissionDenied
return views.new(request, wf_code=wf_code)
def show_list(request, wf_code):
views = import_wf_views(wf_code)
return views.show_list(request, wf_code=wf_code)
def edit(request, pk):
instance = get_object_or_404(ProcessInstance, pk=pk)
wf_code = instance.process.code
views = import_wf_views(wf_code)
if not can_edit_wf(instance, request.user):
messages.info(request, 'No permission: %s' % instance)
return redirect(reverse('wf_my_wf'))
return views.edit(request, instance.content_object)
def _default_detail(request, instance, ext_ctx={}, template_name=None):
if not template_name:
template_name = '%s/detail.html' % instance.process.code
ctx = {}
ctx.update(ext_ctx)
return render(request, template_name, ctx)
def detail(request, pk, template_name=None, ext_ctx={}):
instance = ProcessInstance.objects.get(pk=pk)
views = import_wf_views(instance.process.code)
is_print = ext_ctx.get('is_print')
__ext_param_process = getattr(views, '__ext_param_process', None)
ctx = {}
ctx.update(ext_ctx)
if not can_view_wf(instance, request.user, ext_param_process=__ext_param_process):
messages.info(request, 'No permission to view this process')
return redirect(reverse('wf_my_wf'))
user_wf_info = user_wf_info_as_dict(instance.content_object, request.user)
ctx.update(user_wf_info)
if not is_print and instance.cur_activity.can_edit \
and instance.cur_activity.audit_view_type == 'edit' \
and ext_ctx['workitem'] and instance.cur_activity.resolution == 'started':
return redirect(reverse('wf_edit', args=[instance.pk]))
detail_func = getattr(views, 'detail', _default_detail)
ret = detail_func(request, instance, ext_ctx=ctx)
if isinstance(ret, dict):
ret.update(ctx)
return _default_detail(request, instance, ret, template_name)
return ret
| # -*- coding: UTF-8 -*-
from __future__ import unicode_literals
import importlib
from django.shortcuts import get_object_or_404, render
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from lbworkflow import settings
from lbworkflow.core.helper import as_func
from lbworkflow.models import Process
from lbworkflow.models import ProcessInstance
from .helper import user_wf_info_as_dict
can_edit_wf = as_func(settings.CAN_EDIT_WF_FUNC)
can_submit_wf = as_func(settings.CAN_SUBMIT_WF_FUNC)
can_view_wf = as_func(settings.CAN_VIEW_WF_FUNC)
def import_wf_views(wf_code):
wf_module = settings.LBWF_APPS.get(wf_code)
return importlib.import_module('%s.views' % wf_module)
def new(request, wf_code):
views = import_wf_views(wf_code)
process = Process.objects.get(code=wf_code)
if not can_submit_wf(process, request.user):
raise PermissionDenied
return views.new(request, wf_code=wf_code)
def show_list(request, wf_code):
views = import_wf_views(wf_code)
return views.show_list(request, wf_code=wf_code)
def edit(request, pk):
instance = get_object_or_404(ProcessInstance, pk=pk)
wf_code = instance.process.code
views = import_wf_views(wf_code)
if not can_edit_wf(instance, request.user):
messages.info(request, 'No permission: %s' % instance)
return redirect(reverse('wf_my_wf'))
return views.edit(request, instance.content_object)
def _default_detail(request, instance, ext_ctx={}, template_name=None):
if not template_name:
template_name = '%s/detail.html' % instance.process.code
ctx = {}
ctx.update(ext_ctx)
return render(request, template_name, ctx)
def detail(request, pk, template_name=None, ext_ctx={}):
instance = ProcessInstance.objects.get(pk=pk)
views = import_wf_views(instance.process.code)
is_print = ext_ctx.get('is_print')
__ext_param_process = getattr(views, '__ext_param_process', None)
ctx = {}
ctx.update(ext_ctx)
if not can_view_wf(instance, request.user, ext_param_process=__ext_param_process):
messages.info(request, 'No permission to view this process')
return redirect(reverse('wf_my_wf'))
user_wf_info = user_wf_info_as_dict(instance.content_object, request.user)
ctx.update(user_wf_info)
if not is_print and instance.cur_activity.can_edit \
and instance.cur_activity.audit_view_type == 'edit' \
and ext_ctx['workitem'] and instance.cur_activity.resolution == 'started':
return redirect(reverse('wf_edit', args=[instance.pk]))
detail_func = getattr(views, 'detail', _default_detail)
ret = detail_func(request, instance, ext_ctx=ctx)
if isinstance(ret, dict):
ret.update(ctx)
return _default_detail(request, instance, ret, template_name)
return ret
| Python | 0.999989 |
cec6a0003d9167426bef5eb2fdfd1582b1e8f8a9 | add accuracy figure | main.py | main.py | #!/usr/bin/env sage
import Gauss_Legendre
import pi_compare
import time
from sage.all import *
class Analyser(object):
def __init__(self, method_list):
self.end = 1000
self.start = 100
self.step = 100
self.time_set = list()
self.accuracy_list = list()
self.figure = point((0,0))
self.figure2 = point((0,0))
self.methods = method_list
def run(self):
for m in self.methods:
for d in range(self.start, self.end, self.step):
start_time = time.time()
res = m.function(d)
end_time = time.time() - start_time
self.time_set.append((d, end_time))
accuracy = pi_compare.compare(res)[0]
self.accuracy_list.append(accuracy)
print d, end_time, accuracy
self.figure += list_plot(self.time_set, color = m.color, legend_label = m.name)
self.figure2 += list_plot(self.accuracy_list, color = m.color, legend_label = m.name)
save(self.figure.plot(), filename="time.svg")
save(self.figure2.plot(), filename="accurancy.svg")
class Pi_Func(object):
def __init__(self, name, color, function):
self.name = name
self.color = color
self.function = function
if __name__ == "__main__":
method_list = [Pi_Func("Gauss_Legendre", "red", Gauss_Legendre.pi)]
analyse = Analyser(method_list)
analyse.run()
| #!/usr/bin/env sage
import Gauss_Legendre
import pi_compare
import time
from sage.all import *
class Analyser(object):
def __init__(self, method_list):
self.end = 1000
self.start = 100
self.step = 100
self.time_set = list()
self.figure = point((0,0))
self.figure2 = None
self.methods = method_list
def run(self):
for m in self.methods:
for d in range(self.start, self.end, self.step):
start_time = time.time()
m.function(d)
end_time = time.time() - start_time
self.time_set.append((d, end_time))
print d, end_time
self.figure += list_plot(self.time_set, color = m.color, legend_label = m.name)
save(self.figure.plot(), filename="time.svg")
class Pi_Func(object):
def __init__(self, name, color, function):
self.name = name
self.color = color
self.function = function
if __name__ == "__main__":
method_list = [Pi_Func("Gauss_Legendre", "red", Gauss_Legendre.pi)]
analyse = Analyser(method_list)
analyse.run()
| Python | 0.000001 |
1e6958314bb2f51927b196be0a97dccbf7933099 | add remove term view | src/apps/entrez/views.py | src/apps/entrez/views.py | # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.views.decorators.csrf import csrf_exempt
from entrez.models import EntrezEntry, EntrezTerm
from entrez.forms import AddTermForm
from entrez.utils import get_current_date
def get_user_all_terms(request):
return EntrezTerm.objects.filter(owner=request.user).select_related()
def get_user_all_entries(request):
return EntrezEntry.objects.filter(owner=request.user).select_related()
@login_required()
def index(request):
tpl = 'entrez/entrez_index.html'
ctx = {}
ctx["objects"] = get_user_all_entries(request)
ctx["terms"] = get_user_all_terms(request)
ctx["form"] = AddTermForm()
return render_to_response(tpl, ctx, context_instance=RequestContext(request))
@login_required()
def term_list(request, slug):
tp = 'entrez/entrez_term_list.html'
# todo: permission to check other user's term
term = EntrezTerm.objects.get(slug=slug)
objects = EntrezEntry.objects.filter(term=term).select_related()
terms = EntrezTerm.objects.filter(owner=request.user).select_related()
form = AddTermForm()
ct = {
"objects": objects,
"terms": terms,
"form": form,
"current_term": term,
}
return render_to_response(tp, ct, context_instance=RequestContext(request))
@csrf_exempt
def add_term(request):
form_class = AddTermForm
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
term = EntrezTerm.objects.create(
name=form.cleaned_data["name"],
slug=form.cleaned_data["slug"],
db=form.cleaned_data["db"],
period=form.cleaned_data["period"],
owner=request.user,
term=form.cleaned_data["term"],
creation_date=get_current_date(),
lastedit_date=get_current_date(),
)
term.save()
return HttpResponseRedirect(reverse('entrez-index', ))
@csrf_exempt
def remove_term(request):
if request.method == 'POST':
if form.is_valid():
term = get_object_or_404(EntrezTerm, pk=request.POST.get('term_id'))
term.status = False
term.save()
return HttpResponse()
@csrf_exempt
def mark_as_read(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = True
entry.save()
return HttpResponse()
@csrf_exempt
def mark_as_unread(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = False
entry.save()
return HttpResponse()
| # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.views.decorators.csrf import csrf_exempt
from entrez.models import EntrezEntry, EntrezTerm
from entrez.forms import AddTermForm
from entrez.utils import get_current_date
def get_user_all_terms(request):
return EntrezTerm.objects.filter(owner=request.user).select_related()
def get_user_all_entries(request):
return EntrezEntry.objects.filter(owner=request.user).select_related()
@login_required()
def index(request):
tpl = 'entrez/entrez_index.html'
ctx = {}
ctx["objects"] = get_user_all_entries(request)
ctx["terms"] = get_user_all_terms(request)
ctx["form"] = AddTermForm()
return render_to_response(tpl, ctx, context_instance=RequestContext(request))
@login_required()
def term_list(request, slug):
tp = 'entrez/entrez_term_list.html'
# todo: permission to check other user's term
term = EntrezTerm.objects.get(slug=slug)
objects = EntrezEntry.objects.filter(term=term).select_related()
terms = EntrezTerm.objects.filter(owner=request.user).select_related()
form = AddTermForm()
ct = {
"objects": objects,
"terms": terms,
"form": form,
"current_term": term,
}
return render_to_response(tp, ct, context_instance=RequestContext(request))
@csrf_exempt
def add_term(request):
form_class = AddTermForm
if request.method == 'POST':
form = form_class(request.POST)
if form.is_valid():
term = EntrezTerm.objects.create(
name=form.cleaned_data["name"],
slug=form.cleaned_data["slug"],
db=form.cleaned_data["db"],
period=form.cleaned_data["period"],
owner=request.user,
term=form.cleaned_data["term"],
creation_date=get_current_date(),
lastedit_date=get_current_date(),
)
term.save()
return HttpResponseRedirect(reverse('entrez-index', ))
@csrf_exempt
def mark_as_read(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = True
entry.save()
return HttpResponse()
@csrf_exempt
def mark_as_unread(request):
if request.method == "POST":
entry = get_object_or_404(EntrezEntry, pk=request.POST.get('feed_item_id'))
entry.read = False
entry.save()
return HttpResponse()
| Python | 0 |
3973ae5dbb48d6200c6a12da0018365c67babce0 | Fix buggy argument parsing. | analytics/management/commands/update_analytics_counts.py | analytics/management/commands/update_analytics_counts.py | from argparse import ArgumentParser
from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from analytics.models import RealmCount, UserCount
from analytics.lib.counts import COUNT_STATS, CountStat, process_count_stat
from zerver.lib.timestamp import datetime_to_string, is_timezone_aware
from zerver.models import UserProfile, Message
from typing import Any
class Command(BaseCommand):
help = """Fills Analytics tables.
Run as a cron job that runs every hour."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('--range-start', '-s',
type=str,
help="Time to backfill from.")
parser.add_argument('--range-end', '-e',
type=str,
help='Time to backfill to, defaulst to now.',
default=datetime_to_string(timezone.now()))
parser.add_argument('--utc',
type=bool,
help="Interpret --range-start and --range-end as times in UTC.",
default=False)
parser.add_argument('--stat', '-q',
type=str,
help="CountStat to process. If omitted, all stats are processed")
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
range_end = parse_datetime(options['range_end'])
if options['range_start'] is not None:
range_start = parse_datetime(options['range_start'])
else:
range_start = range_end - timedelta(seconds = 3600)
# throw error if start time is greater than end time
if range_start > range_end:
raise ValueError("--range-start cannot be greater than --range-end.")
if options['utc']:
range_start = range_start.replace(tzinfo=timezone.utc)
range_end = range_end.replace(tzinfo=timezone.utc)
if not (is_timezone_aware(range_start) and is_timezone_aware(range_end)):
raise ValueError("--range-start and --range-end must be timezone aware. Maybe you meant to use the --utc option?")
if options['stat'] is not None:
process_count_stat(COUNT_STATS[options['stat']], range_start, range_end)
else:
for stat in COUNT_STATS.values():
process_count_stat(stat, range_start, range_end)
| from argparse import ArgumentParser
from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from analytics.models import RealmCount, UserCount
from analytics.lib.counts import COUNT_STATS, CountStat, process_count_stat
from zerver.lib.timestamp import datetime_to_string, is_timezone_aware
from zerver.models import UserProfile, Message
from typing import Any
class Command(BaseCommand):
help = """Fills Analytics tables.
Run as a cron job that runs every hour."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('--range-start', '-s',
type=str,
help="Time to backfill from.")
parser.add_argument('--range-end', '-e',
type=str,
help='Time to backfill to.',
default=datetime_to_string(timezone.now()))
parser.add_argument('--utc',
type=bool,
help="Interpret --range-start and --range-end as times in UTC.",
default=False)
parser.add_argument('--stat', '-q',
type=str,
help="CountStat to process. If omitted, all stats are processed")
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
range_start = parse_datetime(options['range_start'])
if 'range_end' in options:
range_end = parse_datetime(options['range_end'])
else:
range_end = range_start - timedelta(seconds = 3600)
# throw error if start time is greater than end time
if range_start > range_end:
raise ValueError("--range-start cannot be greater than --range-end.")
if options['utc'] is True:
range_start = range_start.replace(tzinfo=timezone.utc)
range_end = range_end.replace(tzinfo=timezone.utc)
if not (is_timezone_aware(range_start) and is_timezone_aware(range_end)):
raise ValueError("--range-start and --range-end must be timezone aware. Maybe you meant to use the --utc option?")
if 'stat' in options:
process_count_stat(COUNT_STATS[options['stat']], range_start, range_end)
else:
for stat in COUNT_STATS.values():
process_count_stat(stat, range_start, range_end)
| Python | 0 |
a2ae1aaab669c7cb54bd6cae43fc77e7bea57373 | update build system | make.py | make.py | # -*- coding: utf-8 -*-
import re
import os
class Config:
src = 'src/IR101.md'
dest = 'IR101.md'
pattern = '{{import\((.+)\)}}'
def import_resource(match):
if not match:
return ''
path = match.groups()[0]
return open(path).read()
def main():
raw = open(Config.src).read()
build = re.sub(Config.pattern, import_resource, raw)
open(Config.dest, 'w').write(build)
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
import re
import os
class Config:
src = 'src/IR101.md'
dest = 'IR101.md'
pattern = '{{import\((.+)\)}}'
def import_resource(match):
if not match:
return ''
path = match.groups()[0]
if os.path.isfile(path):
return open(path).read()
else:
return ''
def main():
raw = open(Config.src).read()
build = re.sub(Config.pattern, import_resource, raw)
open(Config.dest, 'w').write(build)
if __name__ == '__main__':
main()
| Python | 0.000001 |
4e32167e1c9205ef5d377bee1b3147e84604e2e2 | test code in maze module | maze.py | maze.py | # Depth-first maze generation from
# http://www.mazeworks.com/mazegen/mazetut/index.htm
from random import choice as random_choice
from sys import argv, stdout
def make_maze(width, height):
walls = all_walls(width, height)
stack = []
current_cell = (0, 0)
cells_visited = 1
while cells_visited < width*height:
intact_neighbours = filter(lambda(x): is_intact(x, walls),
get_neighbours(current_cell, width, height))
if len(intact_neighbours) > 0:
next_cell = random_choice(intact_neighbours)
walls.remove(wall_between(current_cell, next_cell))
stack.append(current_cell)
current_cell = next_cell
cells_visited += 1
else:
current_cell = stack.pop()
return walls
def is_valid_cell(cell, width, height):
return cell[0]>=0 and cell[1]>=0 and cell[0]<width and cell[1]<height
def get_neighbours(cell, width, height):
return filter(lambda(x): is_valid_cell(x, width, height),
set([above(cell), below(cell), left(cell), right(cell)]))
def is_intact(cell, walls):
return walls_of(cell).issubset(walls)
def above(cell):
return (cell[0], cell[1] - 1)
def left(cell):
return (cell[0] - 1, cell[1])
def below(cell):
return (cell[0], cell[1] + 1)
def right(cell):
return (cell[0] + 1, cell[1])
def top_wall(cell):
return (cell[0], cell[1], cell[0]+1, cell[1])
def bottom_wall(cell):
return (cell[0], cell[1]+1, cell[0]+1, cell[1]+1)
def left_wall(cell):
return (cell[0], cell[1], cell[0], cell[1]+1)
def right_wall(cell):
return (cell[0]+1, cell[1], cell[0]+1, cell[1]+1)
def walls_of(cell):
return set([top_wall(cell), bottom_wall(cell),
left_wall(cell), right_wall(cell)])
def wall_between(cell_1, cell_2):
common_walls = walls_of(cell_1).intersection(walls_of(cell_2))
return common_walls.pop()
def all_walls(width, height):
walls = set()
for x in range(0, width ):
for y in range(0, height):
walls.update(walls_of((x,y)))
return walls
def print_maze(width, height, walls):
for y in range(0, height+1):
for x in range(0, width+1):
if (x,y-1,x,y) in walls:
stdout.write('|')
else:
stdout.write(' ')
if (x,y,x+1,y) in walls:
stdout.write('_')
else:
stdout.write(' ')
stdout.write('\n')
if __name__ == "__main__":
width, height = int(argv[1]), int(argv[2])
walls = make_maze(width, height)
print_maze(width, height, walls)
| # Depth-first maze generation from
# http://www.mazeworks.com/mazegen/mazetut/index.htm
from random import choice as random_choice
from sys import stdout
def make_maze(width, height):
walls = all_walls(width, height)
stack = []
current_cell = (0, 0)
cells_visited = 1
while cells_visited < width*height:
intact_neighbours = filter(lambda(x): is_intact(x, walls),
get_neighbours(current_cell, width, height))
if len(intact_neighbours) > 0:
next_cell = random_choice(intact_neighbours)
walls.remove(wall_between(current_cell, next_cell))
stack.append(current_cell)
current_cell = next_cell
cells_visited += 1
else:
current_cell = stack.pop()
return walls
def is_valid_cell(cell, width, height):
return cell[0]>=0 and cell[1]>=0 and cell[0]<width and cell[1]<height
def get_neighbours(cell, width, height):
return filter(lambda(x): is_valid_cell(x, width, height),
set([above(cell), below(cell), left(cell), right(cell)]))
def is_intact(cell, walls):
return walls_of(cell).issubset(walls)
def above(cell):
return (cell[0], cell[1] - 1)
def left(cell):
return (cell[0] - 1, cell[1])
def below(cell):
return (cell[0], cell[1] + 1)
def right(cell):
return (cell[0] + 1, cell[1])
def top_wall(cell):
return (cell[0], cell[1], cell[0]+1, cell[1])
def bottom_wall(cell):
return (cell[0], cell[1]+1, cell[0]+1, cell[1]+1)
def left_wall(cell):
return (cell[0], cell[1], cell[0], cell[1]+1)
def right_wall(cell):
return (cell[0]+1, cell[1], cell[0]+1, cell[1]+1)
def walls_of(cell):
return set([top_wall(cell), bottom_wall(cell),
left_wall(cell), right_wall(cell)])
def wall_between(cell_1, cell_2):
common_walls = walls_of(cell_1).intersection(walls_of(cell_2))
return common_walls.pop()
def all_walls(width, height):
walls = set()
for x in range(0, width ):
for y in range(0, height):
walls.update(walls_of((x,y)))
return walls
def print_maze(width, height, walls):
for y in range(0, height+1):
for x in range(0, width+1):
if (x,y-1,x,y) in walls:
stdout.write('|')
else:
stdout.write(' ')
if (x,y,x+1,y) in walls:
stdout.write('_')
else:
stdout.write(' ')
stdout.write('\n')
| Python | 0 |
0d28e10c9b39c53657d82a8af905ca4b648211d0 | Modify models.py | models.py | models.py | MAX_HOURS = 8.5 # targeted hours per worker
MIN_SHIFT_HOURS = 1 # minimum hours of a shift
MAX_SHIFT_HOURS = 4 # maximum hours of a shift
# Unit 30 mins
MAX_SLOTS = MAX_HOURS * 2
MIN_SHIFT_SLOTS = MIN_SHIFT_HOURS * 2
MAX_SHIFT_SLOTS = MAX_SHIFT_HOURS * 2
class TimeSlot:
def __init__(self, id):
self.id = id
self.available_workers = []
self.worker = None
self.slot_after = None
self.day = id[:3]
self.sorted = False
def __repr__(self):
return "TimeSlot(" + self.id +")"
def add_worker(self, worker):
self.available_workers.append(worker)
# Should be used after available_workers list is sorted
def get_worker(self):
if self.sorted == False:
self.sort()
if self.available_workers:
highest_pref = self.available_workers[0].preference[self.id]
worker_slots = self.available_workers[0].slots
worker_index = 0
for i in range(len(self.available_workers)):
if highest_pref != self.available_workers[i].preference[self.id]:
break
elif worker_slots > self.available_workers[i].slots:
worker_slots = self.available_workers[i].slots
worker_index = i
return self.available_workers.pop(worker_index)
def assign_worker(self, worker):
self.worker = worker
worker.slots += 1
# Sort available workers by preference (high preference to low)
def sort(self):
self.available_workers.sort(key=lambda x: x.preference[self.id], reverse=True)
self.sorted = True
class Worker:
def __init__(self,id):
self.id = id
self.slots = 0
self.preference = {}
self.work_days = set()
def __repr__(self):
return self.id
def update_pref(self, time_slot_id, pref):
self.preference[time_slot_id] = pref
def get_pref(self, time_slot_id):
return self.preference[time_slot_id]
def update_work_days(self, day):
self.work_days.add(day)
def can_work(self, time_slot):
return self.slots < MAX_SLOTS and not (time_slot.day in self.work_days)
### other functions
def update_dict(dict, key, val):
if not key in dict:
dict[key] = val
def dict_val_to_list(dictionary):
result = []
for key in dictionary:
result.append(dictionary[key])
return result
def get_shift(start_time_slot, worker):
shift = [start_time_slot]
slot_after = start_time_slot.slot_after
duration = 1
while (duration < MAX_SHIFT_SLOTS and slot_after
and duration + worker.slots <= MAX_HOURS *2):
pref_after = worker.get_pref(slot_after.id)
if pref_after > 0:
shift.append(slot_after)
slot_after = slot_after.slot_after
duration += 1
else:
break
return shift
def assign_shift(shift, worker):
worker.update_work_days(shift[0].day)
for time_slot in shift:
time_slot.assign_worker(worker)
def print_result(time_slot_list, workers):
print "====RESULT===="
time_slot_list.sort(key=lambda x: x.id)
for time_slot in time_slot_list:
print time_slot.id + " " + str(time_slot.worker)
print "====Summary===="
for key in workers:
worker = workers[key]
print worker.id + " - Hours: " + str(worker.slots/2.0)
| MAX_HOURS = 9
MIN_SHIFT_HOURS = 1.5
MAX_SHIFT_HOURS = 3.5
# Unit 30 mins
MAX_SLOTS = MAX_HOURS * 2
MAX_SHIFT_SLOTS = MAX_SHIFT_HOURS * 2
class TimeSlot:
def __init__(self, id):
self.id = id
self.available_workers = []
self.num_available_workers = 0
self.worker = None
self.slot_before = None
self.slot_after = None
self.day = id[:3]
def __repr__(self):
return "TimeSlot(" + self.id + "," + str(self.num_available_workers) +")"
def add_worker(self, worker):
self.available_workers.append(worker)
self.num_available_workers += 1
# Should be used after available_workers list is sorted
def get_worker(self):
if self.available_workers:
highest_pref = self.available_workers[0].preference[self.id]
worker_slots = self.available_workers[0].slots
position = 0
for i in range(self.num_available_workers):
if highest_pref != self.available_workers[i].preference[self.id]:
break
elif worker_slots > self.available_workers[i].slots:
worker_slots = self.available_workers[i].slots
position = i
self.num_available_workers -= 1
return self.available_workers.pop(position)
def assign_worker(self, worker):
self.worker = worker
worker.slots += 1
# Sort available workers in terms of decreasing order
def sort(self):
self.available_workers.sort(key=lambda x: x.preference[self.id], reverse=True)
class Worker:
def __init__(self,id):
self.id = id
self.slots = 0
self.preference = {}
self.work_days = set.()
def __repr__(self):
return self.id
def update_pref(self, time_slot_id, pref):
self.preference[time_slot_id] = pref
def get_pref(self, time_slot_id):
return self.preference[time_slot_id]
def update_work_days(self, day):
self.work_days.add(day)
def can_work(self, day):
return self.slots < MAX_SLOTS and not (day in self.work_days)
### other functions
def update_dict(dict, key, val):
if not key in dict:
dict[key] = val
def dict_val_to_list(dictionary):
result = []
for key in dictionary:
result.append(dictionary[key])
return result
# timeslots is a dictionary that has id as key and timeslot and val
def sort_all_time_slots(time_slots):
for key in time_slots:
time_slots[key].sort()
def assign_adj_time_slots(time_slot, worker):
count = 1
slot_before = time_slot.slot_before
slot_after = time_slot.slot_after
while count < MAX_SHIFT_SLOTS and worker.can_work(time_slot.day): # max duration of shift
pref_before, pref_after = 0, 0
if slot_before:
if not slot_before.worker:
pref_before = worker.get_pref(slot_before.id)
if slot_after:
if not slot_after.worker:
pref_after = worker.get_pref(slot_after.id)
if max(pref_before, pref_after) > 1:
if pref_before >= pref_after:
slot_before.assign_worker(worker)
slot_before = slot_before.slot_before
else:
slot_after.assign_worker(worker)
slot_after = slot_after.slot_after
count += 1
else:
return
def print_result(time_slot_list, workers):
print "====RESULT===="
time_slot_list.sort(key=lambda x: x.id)
for time_slot in time_slot_list:
print time_slot.id + " " + str(time_slot.worker)
print "====Summary===="
for key in workers:
worker = workers[key]
print worker.id + " - Hours: " + str(worker.slots/2.0)
| Python | 0.000001 |
9d98366e54f837ffa524c8915fc017e3a3ca1bf6 | Add forum_id field to torrent | models.py | models.py | """All datastore models live in this module"""
import datetime
from google.appengine.ext import ndb
class Torrent(ndb.Model):
"""A main model for representing an individual Torrent entry."""
title = ndb.StringProperty(indexed=False, required=True)
btih = ndb.StringProperty(indexed=False, required=True) # Infohash
dt = ndb.DateTimeProperty(required=True) # Create/update time, as reported by tracker
nbytes = ndb.IntegerProperty(indexed=False, required=True) # Torrent data size, bytes
description = ndb.TextProperty(required=True)
forum_id = ndb.IntegerProperty(required=True) # for finding torrents in category but not its subcategories
_memcache_timeout = 2592000 # 30 days
class Account(ndb.Model):
"""Represents tracker user account along with its session"""
username = ndb.StringProperty(indexed=False, required=True)
password = ndb.StringProperty(indexed=False, required=True)
userid = ndb.IntegerProperty(indexed=False, required=True)
cookies = ndb.JsonProperty()
_memcache_timeout = 86400 # 1 day
def __repr__(self):
return "<Account username='{}' userid='{}' cookies=[{}]>".format(
self.username, self.userid, self.cookies and self.cookies.keys())
class Category(ndb.Model):
"""Represents category entry"""
title = ndb.StringProperty(indexed=False, required=True)
_memcache_timeout = 86400 # 1 day
class PersistentScalarValue(ndb.Expando):
"""Persistent scalar value that is stored in datastore"""
pass
| """All datastore models live in this module"""
import datetime
from google.appengine.ext import ndb
class Torrent(ndb.Model):
"""A main model for representing an individual Torrent entry."""
title = ndb.StringProperty(indexed=False, required=True)
btih = ndb.StringProperty(indexed=False, required=True) # Infohash
dt = ndb.DateTimeProperty(required=True) # Create/update time, as reported by tracker
nbytes = ndb.IntegerProperty(indexed=False, required=True) # Torrent data size, bytes
description = ndb.TextProperty(required=True)
_memcache_timeout = 2592000 # 30 days
class Account(ndb.Model):
"""Represents tracker user account along with its session"""
username = ndb.StringProperty(indexed=False, required=True)
password = ndb.StringProperty(indexed=False, required=True)
userid = ndb.IntegerProperty(indexed=False, required=True)
cookies = ndb.JsonProperty()
_memcache_timeout = 86400 # 1 day
def __repr__(self):
return "<Account username='{}' userid='{}' cookies=[{}]>".format(
self.username, self.userid, self.cookies and self.cookies.keys())
class Category(ndb.Model):
"""Represents category entry"""
title = ndb.StringProperty(indexed=False, required=True)
_memcache_timeout = 86400 # 1 day
class PersistentScalarValue(ndb.Expando):
"""Persistent scalar value that is stored in datastore"""
pass
| Python | 0 |
fbf61270d3356e0841e7a990cdc6f6224dbba143 | Worked around an exception: FieldError | planetstack/dependency_walker.py | planetstack/dependency_walker.py | #!/usr/bin/python
import os
import imp
from planetstack.config import Config
import inspect
import time
import traceback
import commands
import threading
import json
import pdb
from core.models import *
missing_links={}
try:
dep_data = open(Config().dependency_graph).read()
except:
dep_data = open('/opt/planetstack/model-deps').read()
dependencies = json.loads(dep_data)
inv_dependencies = {}
for k, lst in dependencies.items():
for v in lst:
try:
inv_dependencies[v].append(k)
except KeyError:
inv_dependencies[v]=[k]
def plural(name):
if (name.endswith('s')):
return name+'es'
else:
return name+'s'
def walk_deps(fn, object):
model = object.__class__.__name__
try:
deps = dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def walk_inv_deps(fn, object):
model = object.__class__.__name__
try:
deps = inv_dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def __walk_deps(fn, object, deps):
model = object.__class__.__name__
for dep in deps:
#print "Checking dep %s"%dep
peer=None
link = dep.lower()
try:
peer = getattr(object, link)
except AttributeError:
link = plural(link)
try:
peer = getattr(object, link)
except AttributeError:
if not missing_links.has_key(model+'.'+link):
print "Model %s missing link for dependency %s"%(model, link)
missing_links[model+'.'+link]=True
if (peer):
try:
peer_objects = peer.all()
except AttributeError:
peer_objects = [peer]
except:
peer_objects = []
for o in peer_objects:
fn(o, object)
# Uncomment the following line to enable recursion
# walk_inv_deps(fn, o)
def p(x):
print x,x.__class__.__name__
return
def main():
#pdb.set_trace()
import django
django.setup()
s = Site.objects.filter(login_base='onlab')
#pdb.set_trace()
walk_inv_deps(p,s[0])
if __name__=='__main__':
main()
| #!/usr/bin/python
import os
import imp
from planetstack.config import Config
import inspect
import time
import traceback
import commands
import threading
import json
import pdb
from core.models import *
missing_links={}
try:
dep_data = open(Config().dependency_graph).read()
except:
dep_data = open('/opt/planetstack/model-deps').read()
dependencies = json.loads(dep_data)
inv_dependencies = {}
for k, lst in dependencies.items():
for v in lst:
try:
inv_dependencies[v].append(k)
except KeyError:
inv_dependencies[v]=[k]
def plural(name):
if (name.endswith('s')):
return name+'es'
else:
return name+'s'
def walk_deps(fn, object):
model = object.__class__.__name__
try:
deps = dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def walk_inv_deps(fn, object):
model = object.__class__.__name__
try:
deps = inv_dependencies[model]
except:
deps = []
__walk_deps(fn, object, deps)
def __walk_deps(fn, object, deps):
model = object.__class__.__name__
for dep in deps:
#print "Checking dep %s"%dep
peer=None
link = dep.lower()
try:
peer = getattr(object, link)
except AttributeError:
link = plural(link)
try:
peer = getattr(object, link)
except AttributeError:
if not missing_links.has_key(model+'.'+link):
print "Model %s missing link for dependency %s"%(model, link)
missing_links[model+'.'+link]=True
if (peer):
try:
peer_objects = peer.all()
except:
peer_objects = [peer]
for o in peer_objects:
fn(o, object)
# Uncomment the following line to enable recursion
# walk_inv_deps(fn, o)
def p(x):
print x,x.__class__.__name__
return
def main():
#pdb.set_trace()
import django
django.setup()
s = Site.objects.filter(login_base='onlab')
#pdb.set_trace()
walk_inv_deps(p,s[0])
if __name__=='__main__':
main()
| Python | 0.999198 |
a0eab53b1e810bb3b4f1a3887ad3be5d755de0d9 | bump v0.8.9 | steam/__init__.py | steam/__init__.py | __version__ = "0.8.9"
__author__ = "Rossen Georgiev"
version_info = (0, 8, 9)
from steam.steamid import SteamID
from steam.globalid import GlobalID
from steam.webapi import WebAPI
from steam.webauth import WebAuth, MobileWebAuth
# proxy object
# avoids importing steam.enums.emsg unless it's needed
class SteamClient(object):
def __new__(cls, *args, **kwargs):
from steam.client import SteamClient as SC
bases = cls.__bases__
if bases != (object, ):
if bases[0] != SteamClient:
raise ValueError("SteamClient needs to be the first base for custom classes")
SC = type("SteamClient", (SC,) + bases[1:], {})
return SC(*args, **kwargs)
| __version__ = "0.8.8"
__author__ = "Rossen Georgiev"
version_info = (0, 8, 8)
from steam.steamid import SteamID
from steam.globalid import GlobalID
from steam.webapi import WebAPI
from steam.webauth import WebAuth, MobileWebAuth
# proxy object
# avoids importing steam.enums.emsg unless it's needed
class SteamClient(object):
def __new__(cls, *args, **kwargs):
from steam.client import SteamClient as SC
bases = cls.__bases__
if bases != (object, ):
if bases[0] != SteamClient:
raise ValueError("SteamClient needs to be the first base for custom classes")
SC = type("SteamClient", (SC,) + bases[1:], {})
return SC(*args, **kwargs)
| Python | 0.000002 |
318589d6a6d2536f2097a5e60fafe019697da4c3 | fix tests - fax server cares for TO: in email not user... | pimail.py | pimail.py | import web
import json
import random
from jinja2 import Template
import urllib
import subprocess
import shlex
import settings
" Load Data "
with open("data.json") as f:
meps = json.load(f)
total_score = sum((i['score'] for i in meps))
def weighted_choice(a):
""" Pick a MEP based on the score weight """
r = random.uniform(0,total_score)
n = 0
for c in a:
n = n + c['score']
if n>r:
return c
return False
def unquote(a):
return (a[0],unicode(urllib.unquote_plus(a[1]).decode("utf-8")))
def decode_args(a):
return dict((unquote(i.split("=")) for i in a.split("&")))
def get_mep_by_id(id):
for m in meps:
if m['id']==int(id):
return m
return None
class Fax:
""" Handle the Fax Widget """
def GET(self):
""" display the fax widget """
web.header("Content-Type", "text/html;charset=utf-8")
with open("fax.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
def POST(self):
"send out the fax"
args=decode_args(web.data())
m = get_mep_by_id(args['id'])
if settings.TEST:
fax = '100'
else:
fax = m[settings.FAX_FIELD].replace(" ","").replace("+","00")
with open("fax-out.tmpl") as f:
template = Template(f.read().decode("utf-8"))
data = {"body": args['body'],
"from": settings.FROM,
"to": "%s@%s" % (fax, settings.FAX_GATEWAY),
}
a = shlex.split(settings.SENDMAIL)
" add the recipient as args "
a.append("%s@%s" % (fax,settings.FAX_GATEWAY))
p = subprocess.Popen(a,
stdin=subprocess.PIPE)
p.communicate(template.render(data).encode("utf-8"))
with open("fax-sent.tmpl") as f:
template = Template(f.read().decode("utf-8"))
return template.render(m)
class mail:
""" Handle Requests for Mail """
def GET(self):
""" Handle GET Requests """
web.header("Content-Type", "text/html;charset=utf-8")
with open("mail.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
urls = ('/widget/', 'mail',
'/widget/fax/', 'Fax')
app = web.application(urls,globals())
if __name__ == "__main__":
app.run()
| import web
import json
import random
from jinja2 import Template
import urllib
import subprocess
import shlex
import settings
" Load Data "
with open("data.json") as f:
meps = json.load(f)
total_score = sum((i['score'] for i in meps))
def weighted_choice(a):
""" Pick a MEP based on the score weight """
r = random.uniform(0,total_score)
n = 0
for c in a:
n = n + c['score']
if n>r:
return c
return False
def unquote(a):
return (a[0],unicode(urllib.unquote_plus(a[1]).decode("utf-8")))
def decode_args(a):
return dict((unquote(i.split("=")) for i in a.split("&")))
def get_mep_by_id(id):
for m in meps:
if m['id']==int(id):
return m
return None
class Fax:
""" Handle the Fax Widget """
def GET(self):
""" display the fax widget """
web.header("Content-Type", "text/html;charset=utf-8")
with open("fax.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
def POST(self):
"send out the fax"
args=decode_args(web.data())
m = get_mep_by_id(args['id'])
fax = m[settings.FAX_FIELD].replace(" ","").replace("+","00")
with open("fax-out.tmpl") as f:
template = Template(f.read().decode("utf-8"))
data = {"body": args['body'],
"from": settings.FROM,
"to": "%s@%s" % (fax, settings.FAX_GATEWAY),
}
a = shlex.split(settings.SENDMAIL)
" add the recipient as args "
if settings.TEST:
fax = '100'
a.append("%s@%s" % (fax,settings.FAX_GATEWAY))
p = subprocess.Popen(a,
stdin=subprocess.PIPE)
p.communicate(template.render(data).encode("utf-8"))
with open("fax-sent.tmpl") as f:
template = Template(f.read().decode("utf-8"))
return template.render(m)
class mail:
""" Handle Requests for Mail """
def GET(self):
""" Handle GET Requests """
web.header("Content-Type", "text/html;charset=utf-8")
with open("mail.tmpl") as f:
template = Template(f.read().decode("utf-8"))
m = weighted_choice(meps)
return template.render(m)
urls = ('/widget/', 'mail',
'/widget/fax/', 'Fax')
app = web.application(urls,globals())
if __name__ == "__main__":
app.run()
| Python | 0 |
be6f36311fdec93bca1f26672c1c3cca02d6d203 | Now is executable | pypipe.py | pypipe.py | #!/usr/bin/env python2
import argparse
from pypipe.formats import *
from pypipe.utils import run_pipeline, generate_pipeline_graph
_parser = argparse.ArgumentParser(
description="Bioinformatics pipelines framework")
_parser.add_argument('pipeline', help='name of pipeline file')
_parser.add_argument('--draw', action='store_true',
help='draw pipeline to PNG')
_parser.add_argument('--run', action='store', nargs=1, metavar='NODE',
help='run pipeline')
_args = _parser.parse_args()
execfile(_args.pipeline)
if _args.draw:
from pypipe.utils import generate_pipeline_graph
generate_pipeline_graph(_args.pipeline)
if _args.run:
from pypipe.utils import run_pipeline
run_pipeline(_args.pipeline, eval(_args.run[0]))
| import argparse
from pypipe.formats import *
from pypipe.utils import run_pipeline, generate_pipeline_graph
_parser = argparse.ArgumentParser(
description="Bioinformatics pipelines framework")
_parser.add_argument('pipeline', help='name of pipeline file')
_parser.add_argument('--draw', action='store_true',
help='draw pipeline to PNG')
_parser.add_argument('--run', action='store', nargs=1, metavar='NODE',
help='run pipeline')
_args = _parser.parse_args()
execfile(_args.pipeline)
if _args.draw:
from pypipe.utils import generate_pipeline_graph
generate_pipeline_graph(_args.pipeline)
if _args.run:
from pypipe.utils import run_pipeline
run_pipeline(_args.pipeline, eval(_args.run[0]))
| Python | 0.998662 |
6588ac0990f635a84127df3c125130d2379746c3 | Fix nodereseat false success message | confluent_server/confluent/plugins/hardwaremanagement/enclosure.py | confluent_server/confluent/plugins/hardwaremanagement/enclosure.py | # Copyright 2017 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import confluent.core as core
import confluent.messages as msg
import pyghmi.exceptions as pygexc
import confluent.exceptions as exc
def update(nodes, element, configmanager, inputdata):
emebs = configmanager.get_node_attributes(
nodes, (u'enclosure.manager', u'enclosure.bay'))
for node in nodes:
try:
em = emebs[node]['enclosure.manager']['value']
eb = emebs[node]['enclosure.bay']['value']
except KeyError:
yield msg.ConfluentNodeError(
node,
'Reseat is only supported on servers in an enclosure, and '
'with enclosure.manager and enclosure.bay defined')
continue
try:
for rsp in core.handle_path(
'/nodes/{0}/_enclosure/reseat_bay'.format(em),
'update', configmanager,
inputdata={'reseat': int(eb)}):
yield rsp
except pygexc.UnsupportedFunctionality as uf:
yield msg.ConfluentNodeError(node, str(uf))
except exc.TargetEndpointUnreachable as uf:
yield msg.ConfluentNodeError(node, str(uf))
| # Copyright 2017 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import confluent.core as core
import confluent.messages as msg
import pyghmi.exceptions as pygexc
def update(nodes, element, configmanager, inputdata):
emebs = configmanager.get_node_attributes(
nodes, (u'enclosure.manager', u'enclosure.bay'))
for node in nodes:
try:
em = emebs[node]['enclosure.manager']['value']
eb = emebs[node]['enclosure.bay']['value']
except KeyError:
yield msg.ConfluentNodeError(
node,
'Reseat is only supported on servers in an enclosure, and '
'with enclosure.manager and enclosure.bay defined')
continue
try:
for rsp in core.handle_path(
'/nodes/{0}/_enclosure/reseat_bay'.format(em),
'update', configmanager,
inputdata={'reseat': int(eb)}):
yield rsp
except pygexc.UnsupportedFunctionality as uf:
yield msg.ConfluentNodeError(node, str(uf))
| Python | 0 |
03484fa3b9349df6a8310e25a55d9c372f2743dd | Fix the signing servlet | sydent/http/servlets/blindlysignstuffservlet.py | sydent/http/servlets/blindlysignstuffservlet.py | # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import get_args, jsonwrap, send_cors, MatrixRestError
from sydent.http.auth import authIfV2
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
@jsonwrap
def render_POST(self, request):
send_cors(request)
authIfV2(self.sydent, request)
args = get_args(request, ("private_key", "token", "mxid"))
private_key_base64 = args['private_key']
token = args['token']
mxid = args['mxid']
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
raise MatrixRestError(404, "M_UNRECOGNIZED", "Didn't recognize token")
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
logger.exception("signing failed")
raise MatrixRestError(500, "M_UNKNOWN", "Internal Server Error")
return signed
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
| # -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import get_args, jsonwrap, send_cors, MatrixRestError
from sydent.http.auth import authIfV2
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
@jsonwrap
def render_POST(self, request):
send_cors(request)
authIfV2(self.sydent, request)
args = get_args(request, ("private_key", "token", "mxid"))
private_key_base64 = args['private_key']
token = args['token']
mxid = args['mxid']
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
raise MatrixRestError(404, "M_UNRECOGNIZED", "Didn't recognize token")
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
logger.exception("signing failed")
raise MatrixRestError(500, "M_UNKNOWN", "Internal Server Error")
return signed
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
| Python | 0 |
b371a0e0b1a334de044c811515bce46377c886df | fix for Django==2.2 | dj_anonymizer/anonymizer.py | dj_anonymizer/anonymizer.py | import django
from dj_anonymizer.conf import settings
from dj_anonymizer.utils import import_if_exist
if django.__version__ < '2.2':
try:
from django_bulk_update.helper import bulk_update
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Django %s does not have native support for bulk_update and "
"django_bulk_update is not installed""" % django.__version__
)
class Anonymizer:
anonym_models = {}
clean_models = {}
skip_models = []
def __init__(self, soft_mode=True):
models_set = set()
# this for django contrib.auth.models or can be used
# as single file for defining all models to anonymize
import_if_exist('base')
for app in django.apps.apps.get_app_configs():
models_set.update(
model.__module__ + '.' + model.__name__
for model in app.get_models()
)
import_if_exist(app.name)
all_models = set(
self.skip_models +
list(self.anonym_models.keys()) +
list(self.clean_models.keys())
)
if not soft_mode and not models_set.issubset(all_models):
raise LookupError(
'You did not set those models to any list: {}'.format(
list(models_set.difference(all_models))))
def anonymize(self):
print('Updating started')
for anonym_cls in list(self.anonym_models.values()):
if not anonym_cls.get_fields_names():
continue
queryset = anonym_cls.Meta.queryset.only(
*anonym_cls.get_fields_names()
)
print('\nGenerating fake values for model "{}"'.format(
queryset.model.__name__
))
i = 0
total = queryset.count()
for j in list(range(0, total,
settings.ANONYMIZER_SELECT_BATCH_SIZE)) + [None]:
subset = queryset.order_by('pk')[i:j]
for obj in subset:
i += 1
for name in anonym_cls.get_fields_names():
setattr(obj, name, next(
getattr(anonym_cls, name))
)
if django.__version__ < '2.2':
bulk_update(
subset,
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
update_fields=anonym_cls.get_fields_names()
)
else:
subset.model.objects.bulk_update(
subset,
anonym_cls.get_fields_names(),
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
)
print('\n\nUpdating finished')
def clean(self):
print('\nCleaning started\n')
for queryset in self.clean_models.values():
print('Cleaning "{}" ...'.format(queryset.model.__name__))
queryset.delete()
print('\nCleaning finished')
| import django
from dj_anonymizer.conf import settings
from dj_anonymizer.utils import import_if_exist
if django.__version__ < '2.2':
try:
from django_bulk_update.helper import bulk_update
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Django %s does not have native support for bulk_update and "
"django_bulk_update is not installed""" % django.__version__
)
class Anonymizer:
anonym_models = {}
clean_models = {}
skip_models = []
def __init__(self, soft_mode=True):
models_set = set()
# this for django contrib.auth.models or can be used
# as single file for defining all models to anonymize
import_if_exist('base')
for app in django.apps.apps.get_app_configs():
models_set.update(
model.__module__ + '.' + model.__name__
for model in app.get_models()
)
import_if_exist(app.name)
all_models = set(
self.skip_models +
list(self.anonym_models.keys()) +
list(self.clean_models.keys())
)
if not soft_mode and not models_set.issubset(all_models):
raise LookupError(
'You did not set those models to any list: {}'.format(
list(models_set.difference(all_models))))
def anonymize(self):
print('Updating started')
for anonym_cls in list(self.anonym_models.values()):
if not anonym_cls.get_fields_names():
continue
queryset = anonym_cls.Meta.queryset.only(
*anonym_cls.get_fields_names()
)
print('\nGenerating fake values for model "{}"'.format(
queryset.model.__name__
))
i = 0
total = queryset.count()
for j in list(range(0, total,
settings.ANONYMIZER_SELECT_BATCH_SIZE)) + [None]:
subset = queryset.order_by('pk')[i:j]
for obj in subset:
i += 1
for name in anonym_cls.get_fields_names():
setattr(obj, name, next(
getattr(anonym_cls, name))
)
if django.__version__ <= '2.2':
bulk_update(
subset,
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
update_fields=anonym_cls.get_fields_names()
)
else:
subset.model.objects.bulk_update(
subset,
anonym_cls.get_fields_names(),
batch_size=settings.ANONYMIZER_UPDATE_BATCH_SIZE,
)
print('\n\nUpdating finished')
def clean(self):
print('\nCleaning started\n')
for queryset in self.clean_models.values():
print('Cleaning "{}" ...'.format(queryset.model.__name__))
queryset.delete()
print('\nCleaning finished')
| Python | 0.000031 |
1b1fb03626475a0e32998e108a6f974b567cd2c4 | Fix bugs: 1. fix pool not working. 2. fix autocommit setting not working in SQLAlchemy proxied connection. | django_postgrespool/base.py | django_postgrespool/base.py | # -*- coding: utf-8 -*-
import logging
from functools import partial
from sqlalchemy import event
from sqlalchemy.pool import manage, QueuePool
from psycopg2 import InterfaceError, ProgrammingError, OperationalError
# from django.db import transaction
from django.conf import settings
from django.db.backends.postgresql_psycopg2.base import *
from django.db.backends.postgresql_psycopg2.base import DatabaseWrapper as Psycopg2DatabaseWrapper
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as Psycopg2DatabaseCreation
POOL_SETTINGS = 'DATABASE_POOL_ARGS'
# DATABASE_POOL_ARGS should be something like:
# {'max_overflow':10, 'pool_size':5, 'recycle':300}
pool_args = getattr(settings, POOL_SETTINGS, {})
db_pool = manage(Database, **pool_args)
log = logging.getLogger('z.pool')
def _log(message, *args):
log.debug(message)
# Only hook up the listeners if we are in debug mode.
if settings.DEBUG:
event.listen(QueuePool, 'checkout', partial(_log, 'retrieved from pool'))
event.listen(QueuePool, 'checkin', partial(_log, 'returned to pool'))
event.listen(QueuePool, 'connect', partial(_log, 'new connection'))
def is_disconnect(e, connection, cursor):
"""
Connection state check from SQLAlchemy:
https://bitbucket.org/sqlalchemy/sqlalchemy/src/tip/lib/sqlalchemy/dialects/postgresql/psycopg2.py
"""
if isinstance(e, OperationalError):
# these error messages from libpq: interfaces/libpq/fe-misc.c.
# TODO: these are sent through gettext in libpq and we can't
# check within other locales - consider using connection.closed
return 'terminating connection' in str(e) or \
'closed the connection' in str(e) or \
'connection not open' in str(e) or \
'could not receive data from server' in str(e)
elif isinstance(e, InterfaceError):
# psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h
return 'connection already closed' in str(e) or \
'cursor already closed' in str(e)
elif isinstance(e, ProgrammingError):
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
return "closed the connection unexpectedly" in str(e)
else:
return False
class DatabaseCreation(Psycopg2DatabaseCreation):
def destroy_test_db(self, *args, **kw):
"""Ensure connection pool is disposed before trying to drop database."""
self.connection._dispose()
super(DatabaseCreation, self).destroy_test_db(*args, **kw)
class DatabaseWrapper(Psycopg2DatabaseWrapper):
"""SQLAlchemy FTW."""
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.creation = DatabaseCreation(self)
def _commit(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.commit()
def _rollback(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.rollback()
def _dispose(self):
"""Dispose of the pool for this instance, closing all connections."""
self.close()
# _DBProxy.dispose doesn't actually call dispose on the pool
conn_params = self.get_connection_params()
key = db_pool._serialize(**conn_params)
try:
pool = db_pool.pools[key]
except KeyError:
pass
else:
pool.dispose()
del db_pool.pools[key]
def get_new_connection(self, conn_params):
# get new connection through pool, not creating a new one outside.
connection = db_pool.connect(**conn_params)
return connection
def _set_autocommit(self, autocommit):
# fix autocommit setting not working in proxied connection
with self.wrap_database_errors:
if self.psycopg2_version >= (2, 4, 2):
self.connection.connection.autocommit = autocommit
else:
if autocommit:
level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
else:
level = self.isolation_level
self.connection.connection.set_isolation_level(level)
| # -*- coding: utf-8 -*-
import logging
from functools import partial
from sqlalchemy import event
from sqlalchemy.pool import manage, QueuePool
from psycopg2 import InterfaceError, ProgrammingError, OperationalError
# from django.db import transaction
from django.conf import settings
from django.db.backends.postgresql_psycopg2.base import *
from django.db.backends.postgresql_psycopg2.base import DatabaseWrapper as Psycopg2DatabaseWrapper
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as Psycopg2DatabaseCreation
POOL_SETTINGS = 'DATABASE_POOL_ARGS'
# DATABASE_POOL_ARGS should be something like:
# {'max_overflow':10, 'pool_size':5, 'recycle':300}
pool_args = getattr(settings, POOL_SETTINGS, {})
db_pool = manage(Database, **pool_args)
log = logging.getLogger('z.pool')
def _log(message, *args):
log.debug(message)
# Only hook up the listeners if we are in debug mode.
if settings.DEBUG:
event.listen(QueuePool, 'checkout', partial(_log, 'retrieved from pool'))
event.listen(QueuePool, 'checkin', partial(_log, 'returned to pool'))
event.listen(QueuePool, 'connect', partial(_log, 'new connection'))
def is_disconnect(e, connection, cursor):
"""
Connection state check from SQLAlchemy:
https://bitbucket.org/sqlalchemy/sqlalchemy/src/tip/lib/sqlalchemy/dialects/postgresql/psycopg2.py
"""
if isinstance(e, OperationalError):
# these error messages from libpq: interfaces/libpq/fe-misc.c.
# TODO: these are sent through gettext in libpq and we can't
# check within other locales - consider using connection.closed
return 'terminating connection' in str(e) or \
'closed the connection' in str(e) or \
'connection not open' in str(e) or \
'could not receive data from server' in str(e)
elif isinstance(e, InterfaceError):
# psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h
return 'connection already closed' in str(e) or \
'cursor already closed' in str(e)
elif isinstance(e, ProgrammingError):
# not sure where this path is originally from, it may
# be obsolete. It really says "losed", not "closed".
return "closed the connection unexpectedly" in str(e)
else:
return False
class DatabaseCreation(Psycopg2DatabaseCreation):
def destroy_test_db(self, *args, **kw):
"""Ensure connection pool is disposed before trying to drop database."""
self.connection._dispose()
super(DatabaseCreation, self).destroy_test_db(*args, **kw)
class DatabaseWrapper(Psycopg2DatabaseWrapper):
"""SQLAlchemy FTW."""
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.creation = DatabaseCreation(self)
def _commit(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.commit()
def _rollback(self):
if self.connection is not None and self.is_usable():
with self.wrap_database_errors:
return self.connection.rollback()
def _dispose(self):
"""Dispose of the pool for this instance, closing all connections."""
self.close()
# _DBProxy.dispose doesn't actually call dispose on the pool
conn_params = self.get_connection_params()
key = db_pool._serialize(**conn_params)
try:
pool = db_pool.pools[key]
except KeyError:
pass
else:
pool.dispose()
del db_pool.pools[key]
| Python | 0 |
783f7a5d17b3db83e1f27ad3bebb4c165c4e66ca | Fix convert to support python 2 and python 3 | django_settings/keymaker.py | django_settings/keymaker.py | import sys
class KeyMaker(object):
def __init__(self, prefix):
self.prefix = prefix
def convert(self, arg):
if sys.version_info < (3,) and isinstance(arg, unicode):
return arg.encode(django.settings.DEFAULT_CHARSET)
return str(arg)
def args_to_key(self, args):
return ":".join(map(self.convert, args))
def kwargs_to_key(self, kwargs):
return ":".join([
"%s:%s" % (self.convert(k), self.convert(v))
for k, v in kwargs.items()
])
def make(self, method_name, args, kwargs):
key = ":".join((
self.prefix,
method_name,
self.args_to_key(args),
self.kwargs_to_key(kwargs),
))
return key
| class KeyMaker(object):
def __init__(self, prefix):
self.prefix = prefix
def convert(self, arg):
return str(arg)
def args_to_key(self, args):
return ":".join(map(self.convert, args))
def kwargs_to_key(self, kwargs):
return ":".join([
"%s:%s" % (self.convert(k), self.convert(v))
for k, v in kwargs.items()
])
def make(self, method_name, args, kwargs):
key = ":".join((
self.prefix,
method_name,
self.args_to_key(args),
self.kwargs_to_key(kwargs),
))
return key
| Python | 0.999994 |
0225173efe5fcb0de78239f26a5eca9c4d7d7a6e | add url to match language session view | django_test/article/urls.py | django_test/article/urls.py | from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^all/$', 'article.views.articles'),
url(r'^get/(?P<article_id>\d+)/$', 'article.views.article'),
# for session language
url(r'^language/(?P<language>[a-z\-]+)/$', 'article.views.language'),
) | from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^all/$', 'article.views.articles'),
url(r'^get/(?P<article_id>\d+)/$', 'article.views.article'),
) | Python | 0 |
0b6b236f2be92f408cce9a91bf0c8100c3ecbac0 | Switch to jpg | rodent.py | rodent.py | """
Rodent
Usage:
rodent.py capture [--until=<time>] [--folder=<folder>] [--interval=<interval>]
rodent.py make_video [--folder=<folder>]
rodent.py automate [--until=<time>] [--folder=<folder>] [--interval=<interval>]
Options:
-h --help Show this screen
--until=<time> Until when to record, needs to be a HH:MM format (ie 12:45)
--folder=<folder> The folder in which the pictures are stored [default: photos]
--interval=<interval> The interval between 2 photos [default: 1]
"""
import datetime
import os
import time
import sys
import cv2
from docopt import docopt
from PIL import Image
def clear_directory(folder):
"""
Delete all the pics in the photos directory
"""
for filename in os.listdir(folder):
os.remove('%s/%s' % (folder, filename))
def start_camera(folder, interval, until=None):
"""
Start taking pictures every interval.
If until is specified, it will take pictures
until that time is reached (24h format).
Needs to be of the following format: HH:MM
"""
clear_directory(folder)
camera = cv2.VideoCapture(0)
filename = '%s/%s.jpg'
number = 0
if until:
until_hour, until_minutes = until.split(':')
until_hour = int(until_hour)
until_minutes = int(until_minutes)
while True:
number += 1
_, image = camera.read()
now = datetime.datetime.now()
print 'Taking picture number %d at %s' % (number, now.isoformat())
# Tried [cv2.cv.CV_IMWRITE_PNG_COMPRESSION, 3] but still atrocious compression
filepath = filename % (folder, now)
cv2.imwrite(filepath, image)
# Resave it with pillow to do a better compression
img = Image.open(filepath)
img.save(filepath, optimize=True, quality=80)
if until:
if now.hour > until_hour or (now.hour == until_hour and now.minute >= until_minutes):
break
time.sleep(interval)
del(camera)
def make_video(folder):
# Sorting on dates, ISO ftw
filenames = sorted(os.listdir('photos'))
# Find out size of the pictures we're taking
#filename = '%s/%s.png'
first_pic = cv2.imread('%s/%s' % (folder, filenames[0]))
# first_pic.shape gives a tuple (height, width, layer)
height, width, _ = first_pic.shape
# magic below, might need to change the codec for your own webcam
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi', fourcc, 10, (width, height))
for filename in filenames:
video.write(cv2.imread('%s/%s' % (folder, filename)))
video.release()
if __name__ == "__main__":
arguments = docopt(__doc__)
folder = arguments['--folder']
interval = int(arguments['--interval'])
until = arguments['--until']
if arguments['capture']:
start_camera(folder, interval, until)
elif arguments['make_video']:
make_video(folder)
elif arguments['automate']:
start_camera(folder, interval, until)
make_video(folder)
| """
Rodent
Usage:
rodent.py capture [--until=<time>] [--folder=<folder>] [--interval=<interval>]
rodent.py make_video [--folder=<folder>]
rodent.py automate [--until=<time>] [--folder=<folder>] [--interval=<interval>]
Options:
-h --help Show this screen
--until=<time> Until when to record, needs to be a HH:MM format (ie 12:45)
--folder=<folder> The folder in which the pictures are stored [default: photos]
--interval=<interval> The interval between 2 photos [default: 20]
"""
import datetime
import os
import time
import sys
import cv2
from docopt import docopt
def clear_directory(folder):
"""
Delete all the pics in the photos directory
"""
for filename in os.listdir(folder):
os.remove('%s/%s' % (folder, filename))
def start_camera(folder, interval, until=None):
"""
Start taking pictures every interval.
If until is specified, it will take pictures
until that time is reached (24h format).
Needs to be of the following format: HH:MM
"""
clear_directory(folder)
camera = cv2.VideoCapture(0)
filename = '%s/%s.png'
number = 0
if until:
until_hour, until_minutes = until.split(':')
until_hour = int(until_hour)
until_minutes = int(until_minutes)
while True:
number += 1
_, image = camera.read()
now = datetime.datetime.now()
print 'Taking picture number %d at %s' % (number, now.isoformat())
cv2.imwrite(filename % (folder, now), image)
if until:
if now.hour > until_hour or (now.hour == until_hour and now.minute >= until_minutes):
break
time.sleep(interval)
del(camera)
def make_video(folder):
# Sorting on dates, ISO ftw
filenames = sorted(os.listdir('photos'))
# Find out size of the pictures we're taking
#filename = '%s/%s.png'
first_pic = cv2.imread('%s/%s' % (folder, filenames[0]))
# first_pic.shape gives a tuple (height, width, layer)
height, width, _ = first_pic.shape
# magic below, might need to change the codec for your own webcam
fourcc = cv2.cv.CV_FOURCC(*'XVID')
video = cv2.VideoWriter('output.avi', fourcc, 10, (width, height))
for filename in filenames:
video.write(cv2.imread('%s/%s' % (folder, filename)))
video.release()
if __name__ == "__main__":
arguments = docopt(__doc__)
folder = arguments['--folder']
interval = int(arguments['--interval'])
until = arguments['--until']
if arguments['capture']:
start_camera(folder, interval, until)
elif arguments['make_video']:
make_video(folder)
elif arguments['automate']:
start_camera(folder, interval, until)
make_video(folder)
| Python | 0.000002 |
f8eb93f1845a7776c61a59bafc6fdeb689712aff | Add dialog title to example | examples/comp/ask_user_dialog.py | examples/comp/ask_user_dialog.py | """Example showing the Ask User dialog controls and overall usage."""
import fusionless as fu
dialog = fu.AskUserDialog("Example Ask User Dialog")
dialog.add_text("text", default="Default text value")
dialog.add_position("position", default=(0.2, 0.8))
dialog.add_slider("slider", default=0.5, min=-10, max=10)
dialog.add_screw("screw")
dialog.add_file_browse("file", default="C:/path/to/foo")
dialog.add_path_browse("path")
dialog.add_clip_browse("clip")
dialog.add_checkbox("checkbox", name="Do not check this!")
dialog.add_dropdown("dropdown", options=["A", "B", "C"])
dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"])
result = dialog.show()
if result is None:
# Dialog was cancelled
pass
else:
checked = result['checkbox']
if checked:
print("You sure are living on the edge!")
import pprint
pprint.pprint(result)
| """Example showing the Ask User dialog controls and overall usage."""
import fusionless as fu
dialog = fu.AskUserDialog()
dialog.add_text("text", default="Default text value")
dialog.add_position("position", default=(0.2, 0.8))
dialog.add_slider("slider", default=0.5, min=-10, max=10)
dialog.add_screw("screw")
dialog.add_file_browse("file", default="C:/path/to/foo")
dialog.add_path_browse("path")
dialog.add_clip_browse("clip")
dialog.add_checkbox("checkbox", name="Do not check this!")
dialog.add_dropdown("dropdown", options=["A", "B", "C"])
dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"])
result = dialog.show()
if result is None:
# Dialog was cancelled
pass
else:
checked = result['checkbox']
if checked:
print("You sure are living on the edge!")
import pprint
pprint.pprint(result)
| Python | 0 |
b971cd102e30f721feb50c934012eb9c26105186 | query input working. have empty input handled | runsql.py | runsql.py | #!/usr/bin/python
import urwid
import mainview
"""
NOTES
-----
This module builds the widget to allow the user to enter in their own SQL query
This module will also run the sql query and show a success message if it works
"""
class Qinfo:
def __init__(self):
self.query_text = None
self.query_status = None
def show_runsql(frame, body, user_info):
#used to easily insert a blank line widget
blank = urwid.Divider()
query_info = Qinfo()
#signal handler for text input, stores input information from user
def edit_change_event(self, text):
query_info.query_text = text
#signal handler for the run button
def run_btn_press(button):
if query_info.query_text != None:
query_info.query_status = user_info.db_obj.runquery(user_info.db_conn, query_info.query_text)
if query_info.query_status == 1:
#show success message
frame.footer = urwid.AttrWrap(urwid.Text(u" Query executed successfully"), 'header')
#reload main view. this updates tables list if table was created
mainview.show_main_view(frame, body, user_info)
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(query_info.query_status), 'error')
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(u" You have enter in a query."), 'error')
#variables to hold text to show user for login view
text_1 = urwid.Text(u"Enter a SQL query to run below:")
text_2 = urwid.Text(u"(The edit box supports multiple lines when you press enter)")
text_error = urwid.AttrMap( urwid.Text(u""), 'body')
#setting up the edit input widgets for database name and password
sql_edit = urwid.Edit(caption="", edit_text="", multiline=True)
urwid.connect_signal(sql_edit, 'change', edit_change_event)
sql_edit = urwid.AttrWrap(sql_edit, 'btnf', 'btn')
#run button
runsql_btn = urwid.AttrWrap( urwid.Button(u"Run", run_btn_press), 'btnf', 'btn')
#This is the pile widget that holds all of the main body widgets
runsql = urwid.WidgetPlaceholder(
urwid.Pile([
urwid.Padding(text_error, left=5, width = 50),
blank,
urwid.Padding(text_1, left=2),
urwid.Padding(text_2, left=2),
urwid.Padding( sql_edit, left=2, width=60),
blank,
urwid.Padding(runsql_btn, left=10, width=11)
]))
return runsql
| #!/usr/bin/python
import urwid
import mainview
"""
NOTES
-----
This module builds the widget to allow the user to enter in their own SQL query
This module will also run the sql query and show a success message if it works
"""
class Qinfo:
def __init__(self):
query_text = ""
query_status = ""
def show_runsql(frame, body, user_info):
#used to easily insert a blank line widget
blank = urwid.Divider()
query_info = Qinfo()
#signal handler for text input, stores input information from user
def edit_change_event(self, text):
query_info.query_text = text
#signal handler for the run button
def run_btn_press(button):
if query_info.query_text != "":
query_info.query_status = user_info.db_obj.runquery(user_info.db_conn, query_info.query_text)
if query_info.query_status == 1:
#show success message
frame.footer = urwid.AttrWrap(urwid.Text(u" Query executed successfully"), 'header')
#reload main view. this updates tables list if table was created
mainview.show_main_view(frame, body, user_info)
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(query_info.query_status), 'error')
else:
text_error.original_widget = urwid.AttrWrap( urwid.Text(u"You have enter in a query."), 'error')
#variables to hold text to show user for login view
text_1 = urwid.Text(u"Enter a SQL query to run below:")
text_2 = urwid.Text(u"(The edit box supports multiple lines when you press enter)")
text_error = urwid.AttrMap( urwid.Text(u""), 'body')
#setting up the edit input widgets for database name and password
sql_edit = urwid.Edit(caption="", edit_text="", multiline=True)
urwid.connect_signal(sql_edit, 'change', edit_change_event)
sql_edit = urwid.AttrWrap(sql_edit, 'btnf', 'btn')
#run button
runsql_btn = urwid.AttrWrap( urwid.Button(u"Run", run_btn_press), 'btnf', 'btn')
#This is the pile widget that holds all of the main body widgets
runsql = urwid.WidgetPlaceholder(
urwid.Pile([
urwid.Padding(text_error, left=5, width = 50),
blank,
urwid.Padding(text_1, left=2),
urwid.Padding(text_2, left=2),
urwid.Padding( sql_edit, left=2, width=60),
blank,
urwid.Padding(runsql_btn, left=10, width=11)
]))
return runsql
| Python | 0.999999 |
c73d8fe3f83fb245095cf8f45c15aa8ec1982143 | Update views.py | app/grandchallenge/groups/views.py | app/grandchallenge/groups/views.py | from dal import autocomplete
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import UserPassesTestMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.db.models import CharField, Q, Value
from django.db.models.functions import Concat
from django.utils.html import format_html
from django.views.generic import FormView
from guardian.mixins import (
LoginRequiredMixin,
PermissionRequiredMixin as ObjectPermissionRequiredMixin,
)
from guardian.shortcuts import get_objects_for_user
from grandchallenge.verifications.models import Verification
class UserGroupUpdateMixin(
LoginRequiredMixin,
ObjectPermissionRequiredMixin,
SuccessMessageMixin,
FormView,
):
raise_exception = True
def get_permission_object(self):
return self.obj
@property
def obj(self):
raise NotImplementedError
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({"object": self.obj, "role": self.get_form().role})
return context
def get_success_url(self):
return self.obj.get_absolute_url()
def form_valid(self, form):
form.add_or_remove_user(obj=self.obj)
return super().form_valid(form)
class UserAutocomplete(
LoginRequiredMixin, UserPassesTestMixin, autocomplete.Select2QuerySetView
):
def test_func(self):
allowed_perms = [
"algorithms.change_algorithm",
"organizations.change_organization",
"archives.change_archive",
"reader_studies.change_readerstudy",
"workstations.change_workstation",
"algorithms.change_job",
]
# TODO reduce number of queries
return any(
get_objects_for_user(user=self.request.user, perms=perm,).exists()
for perm in allowed_perms
)
def get_queryset(self):
qs = (
get_user_model()
.objects.order_by("username")
.exclude(username=settings.ANONYMOUS_USER_NAME)
.annotate(
full_name=Concat(
"first_name",
Value(" "),
"last_name",
output_field=CharField(),
)
)
.select_related("verification", "user_profile")
)
if self.q:
qs = qs.filter(
Q(username__icontains=self.q)
| Q(email__icontains=self.q)
| Q(full_name__icontains=self.q)
| Q(verification__email__icontains=self.q)
)
return qs
def get_result_label(self, result):
try:
is_verified = result.verification.is_verified
except Verification.DoesNotExist:
is_verified = False
if is_verified:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {} "
'<i class="fas fa-user-check text-success"></i>'
" Verified email address at {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
result.verification.email.split("@")[1],
)
else:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
)
| from dal import autocomplete
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import UserPassesTestMixin
from django.contrib.messages.views import SuccessMessageMixin
from django.db.models import CharField, Q, Value
from django.db.models.functions import Concat
from django.utils.html import format_html
from django.views.generic import FormView
from guardian.mixins import (
LoginRequiredMixin,
PermissionRequiredMixin as ObjectPermissionRequiredMixin,
)
from guardian.shortcuts import get_objects_for_user
from grandchallenge.verifications.models import Verification
class UserGroupUpdateMixin(
LoginRequiredMixin,
ObjectPermissionRequiredMixin,
SuccessMessageMixin,
FormView,
):
raise_exception = True
def get_permission_object(self):
return self.obj
@property
def obj(self):
raise NotImplementedError
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({"object": self.obj, "role": self.get_form().role})
return context
def get_success_url(self):
return self.obj.get_absolute_url()
def form_valid(self, form):
form.add_or_remove_user(obj=self.obj)
return super().form_valid(form)
class UserAutocomplete(
LoginRequiredMixin, UserPassesTestMixin, autocomplete.Select2QuerySetView
):
def test_func(self):
allowed_perms = [
"algorithms.change_algorithm",
"organizations.change_organization",
"archives.change_archive",
"reader_studies.change_readerstudy",
"workstations.change_workstation",
"algorithms.change_job",
]
# TODO reduce number of queries
return any(
get_objects_for_user(user=self.request.user, perms=perm,).exists()
for perm in allowed_perms
)
def get_queryset(self):
qs = (
get_user_model()
.objects.order_by("username")
.exclude(username=settings.ANONYMOUS_USER_NAME)
.annotate(
full_name=Concat(
"first_name",
Value(" "),
"last_name",
output_field=CharField(),
)
)
.select_related("verification", "user_profile")
)
if self.q:
qs = qs.filter(
Q(username__icontains=self.q)
| Q(email__icontains=self.q)
| Q(full_name__icontains=self.q)
| Q(verification__email__icontains=self.q)
)
return qs
def get_result_label(self, result):
try:
is_verified = result.verification.is_verified
except Verification.DoesNotExist:
is_verified = False
if is_verified:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {} "
'<i class="fas fa-user-check text-success">'
" Verified email address at {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
result.verification.email.split("@")[1],
)
else:
return format_html(
'<img src="{}" width ="20" height ="20" style="vertical-align:top"> '
" <b>{}</b> {}",
result.user_profile.get_mugshot_url(),
result.get_username(),
result.get_full_name().title(),
)
| Python | 0 |
077ea35c78b750d4e091f62d38fe7f42e0d685bb | add token filters | api/rest/viewsets/xtas.py | api/rest/viewsets/xtas.py |
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from api.rest.viewsets.articleset import ArticleSetViewSetMixin
from api.rest.viewsets.project import ProjectViewSetMixin
from api.rest.viewsets.article import ArticleViewSetMixin
from api.rest.mixins import DatatablesMixin
from amcat.tools.amcatxtas import ANALYSES, get_result
import json
class XTasViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, ArticleViewSetMixin, ViewSet):
model_key = "xta"# HACK to get xtas in url. Sorry!
def retrieve(self, request, *args, **kargs):
aid = int(kargs['article'])
plugin = kargs['pk']
result = get_result(aid, plugin)
return Response({"results" : result})
def list(self, request, *args, **kargs):
plugins = ANALYSES.__dict__
return Response(plugins)
from rest_framework.serializers import Serializer
from amcat.models import Article, ArticleSet
from rest_framework.viewsets import ModelViewSet
import itertools
class ArticleXTasSerializer(Serializer):
@property
def module(self):
module = self.context['request'].GET.get('module')
if not module:
raise Exception("Please specify the NLP/xTas module to use "
"with a module= GET parameter")
elif not module in dir(ANALYSES):
raise Exception("Unknown module: {module}".format(**locals()))
return module
def field_to_native(self, obj, field_name):
result = super(ArticleXTasSerializer, self).field_to_native(obj, field_name)
if field_name == "results":
# flatting lists of tokens
result = itertools.chain(*result)
return result
def to_native(self, article):
saf = get_result(article.pk, self.module)
return list(self.get_xtas_results(article.pk, saf))
class ArticleLemmataSerializer(ArticleXTasSerializer):
@property
def filter_pos(self):
return self.context['request'].GET.get('pos1')
def output_token(self, token):
for key, vals in self.context['request'].GET.iterlists():
if key in token and token[key] not in vals:
return False
return True
def get_xtas_results(self, aid, saf):
if self.context['request'].GET.get('sources'):
return self.get_sources(aid, saf)
else:
return self.get_tokens(aid, saf)
def get_tokens(self, aid, saf):
for token in saf.get('tokens', []):
token["aid"] = aid
if self.output_token(token):
yield token
def get_sources(self, aid, saf):
if not 'tokens' in saf and 'sources' in saf:
return
tokendict = {t['id'] : t for t in saf['tokens']}
for sid, source in enumerate(saf['sources']):
for place, tokens in source.iteritems():
for tid in tokens:
token = tokendict[tid]
if self.output_token(token):
token["aid"] = aid
token["source_id"] = sid
token["source_place"] = place
yield token
class XTasLemmataViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, DatatablesMixin, ModelViewSet):
model_key = "token"
model = Article
model_serializer_class = ArticleLemmataSerializer
def filter_queryset(self, queryset):
queryset = super(XTasLemmataViewSet, self).filter_queryset(queryset)
# only(.) would be better on serializer, but meh
queryset = queryset.filter(articlesets_set=self.articleset).only("pk")
return queryset
|
from rest_framework.response import Response
from rest_framework.viewsets import ViewSet
from api.rest.viewsets.articleset import ArticleSetViewSetMixin
from api.rest.viewsets.project import ProjectViewSetMixin
from api.rest.viewsets.article import ArticleViewSetMixin
from api.rest.mixins import DatatablesMixin
from amcat.tools.amcatxtas import ANALYSES, get_result
import json
class XTasViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, ArticleViewSetMixin, ViewSet):
model_key = "xta"# HACK to get xtas in url. Sorry!
def retrieve(self, request, *args, **kargs):
aid = int(kargs['article'])
plugin = kargs['pk']
result = get_result(aid, plugin)
return Response({"results" : result})
def list(self, request, *args, **kargs):
plugins = ANALYSES.__dict__
return Response(plugins)
from rest_framework.serializers import Serializer
from amcat.models import Article, ArticleSet
from rest_framework.viewsets import ModelViewSet
import itertools
class ArticleXTasSerializer(Serializer):
@property
def module(self):
module = self.context['request'].GET.get('module')
if not module:
raise Exception("Please specify the NLP/xTas module to use "
"with a module= GET parameter")
elif not module in dir(ANALYSES):
raise Exception("Unknown module: {module}".format(**locals()))
return module
def field_to_native(self, obj, field_name):
result = super(ArticleXTasSerializer, self).field_to_native(obj, field_name)
if field_name == "results":
# flatting lists of tokens
result = itertools.chain(*result)
return result
def to_native(self, article):
saf = get_result(article.pk, self.module)
return list(self.get_xtas_results(article.pk, saf))
class ArticleLemmataSerializer(ArticleXTasSerializer):
@property
def filter_pos(self):
return self.context['request'].GET.get('pos1')
def output_token(self, token):
for key, vals in self.context['request'].GET.iterlists():
if key in token and token[key] not in vals:
return False
return True
def get_xtas_results(self, aid, saf):
if self.context['request'].GET.get('sources'):
return self.get_sources(aid, saf)
else:
return self.get_tokens(aid, saf)
def get_tokens(self, aid, saf):
for token in saf.get('tokens', []):
token["aid"] = aid
if self.output_token(token):
yield token
def get_sources(self, aid, saf):
if not 'tokens' in saf and 'sources' in saf:
return
tokendict = {t['id'] : t for t in saf['tokens']}
for sid, source in enumerate(saf['sources']):
for place, tokens in source.iteritems():
for tid in tokens:
token = tokendict[tid]
token["aid"] = aid
token["source_id"] = sid
token["source_place"] = place
yield token
class XTasLemmataViewSet(ProjectViewSetMixin, ArticleSetViewSetMixin, DatatablesMixin, ModelViewSet):
model_key = "token"
model = Article
model_serializer_class = ArticleLemmataSerializer
def filter_queryset(self, queryset):
queryset = super(XTasLemmataViewSet, self).filter_queryset(queryset)
# only(.) would be better on serializer, but meh
queryset = queryset.filter(articlesets_set=self.articleset).only("pk")
return queryset
| Python | 0.000001 |
2a5e84e1c4d9c8e4c4236e1eccfa580406a29b6b | Add failing test | tests/functional/test_new_resolver_errors.py | tests/functional/test_new_resolver_errors.py | import sys
from tests.lib import create_basic_wheel_for_package, create_test_package_with_setup
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
def test_new_resolver_requires_python_error(script):
compatible_python = ">={0.major}.{0.minor}".format(sys.version_info)
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
pkga = create_test_package_with_setup(
script,
name="pkga",
version="1.0",
python_requires=compatible_python,
)
pkgb = create_test_package_with_setup(
script,
name="pkgb",
version="1.0",
python_requires=incompatible_python,
)
# This always fails because pkgb can never be satisfied.
result = script.pip("install", "--no-index", pkga, pkgb, expect_error=True)
# The error message should mention the Requires-Python: value causing the
# conflict, not the compatible one.
assert incompatible_python in result.stderr, str(result)
assert compatible_python not in result.stderr, str(result)
| from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
| Python | 0.000138 |
eaa92ab6a207b5b7c10b15948eb37d16f3005ee8 | fix pandas compat | statsmodels/compat/pandas.py | statsmodels/compat/pandas.py | from __future__ import absolute_import
from distutils.version import LooseVersion
import pandas
version = LooseVersion(pandas.__version__)
pandas_lte_0_19_2 = version <= LooseVersion('0.19.2')
pandas_gt_0_19_2 = version > LooseVersion('0.19.2')
pandas_ge_20_0 = version >= LooseVersion('0.20.0')
pandas_ge_25_0 = version >= LooseVersion('0.25.0')
try:
from pandas.api.types import is_numeric_dtype # noqa:F401
except ImportError:
from pandas.core.common import is_numeric_dtype # noqa:F401
if pandas_ge_25_0:
from pandas.tseries import frequencies # noqa:F401
data_klasses = (pandas.Series, pandas.DataFrame)
elif pandas_ge_20_0:
try:
from pandas.tseries import offsets as frequencies
except ImportError:
from pandas.tseries import frequencies
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel)
else:
try:
import pandas.tseries.frequencies as frequencies
except ImportError:
from pandas.core import datetools as frequencies # noqa
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel,
pandas.WidePanel)
try:
import pandas.testing as testing
except ImportError:
import pandas.util.testing as testing
assert_frame_equal = testing.assert_frame_equal
assert_index_equal = testing.assert_index_equal
assert_series_equal = testing.assert_series_equal
| from __future__ import absolute_import
from distutils.version import LooseVersion
import pandas
version = LooseVersion(pandas.__version__)
pandas_lte_0_19_2 = version <= LooseVersion('0.19.2')
pandas_gt_0_19_2 = version > LooseVersion('0.19.2')
try:
from pandas.api.types import is_numeric_dtype # noqa:F401
except ImportError:
from pandas.core.common import is_numeric_dtype # noqa:F401
if version >= '0.20':
try:
from pandas.tseries import offsets as frequencies
except ImportError:
from pandas.tseries import frequencies
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel)
else:
try:
import pandas.tseries.frequencies as frequencies
except ImportError:
from pandas.core import datetools as frequencies # noqa
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel,
pandas.WidePanel)
try:
import pandas.testing as testing
except ImportError:
import pandas.util.testing as testing
assert_frame_equal = testing.assert_frame_equal
assert_index_equal = testing.assert_index_equal
assert_series_equal = testing.assert_series_equal
| Python | 0.000001 |
1e3f3e387230ac500289fe4064b24999d9727abd | use MongoClient instead of Connection if pymongo >= 2.4 | mtop.py | mtop.py | #!/usr/bin/python
#
# Copyright 2011 Allan Beaufour
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser
import sys
import pymongo
from pymongo.errors import AutoReconnect
from lib.runner import Runner
def main():
parser = OptionParser(usage='mtop.py [options]\nSee also: https://github.com/beaufour/mtop')
parser.add_option('-s', '--server',
dest='server', default='localhost',
help='connect to mongo on SERVER', metavar='SERVER')
parser.add_option('-d', '--delay',
dest='delay', type=int, default=1000,
help='update every MS', metavar='MS')
(options, _) = parser.parse_args()
try:
if hasattr(pymongo, 'version_tuple') and pymongo.version_tuple[0] >= 2 and pymongo.version_tuple[1] >= 4:
from pymongo import MongoClient
from pymongo.read_preferences import ReadPreference
connection = MongoClient(host=options.server,
read_preference=ReadPreference.SECONDARY)
else:
from pymongo.connection import Connection
connection = Connection(options.server, slave_okay=True)
except AutoReconnect, ex:
print 'Connection to %s failed: %s' % (options.server, str(ex))
return -1
runner = Runner(connection, options.delay)
rc = runner.run()
if rc == -3:
print 'Screen size too small'
return rc
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/python
#
# Copyright 2011 Allan Beaufour
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser
import sys
from pymongo.connection import Connection
from pymongo.errors import AutoReconnect
from lib.runner import Runner
def main():
parser = OptionParser(usage='mtop.py [options]\nSee also: https://github.com/beaufour/mtop')
parser.add_option('-s', '--server',
dest='server', default='localhost',
help='connect to mongo on SERVER', metavar='SERVER')
parser.add_option('-d', '--delay',
dest='delay', type=int, default=1000,
help='update every MS', metavar='MS')
(options, _) = parser.parse_args()
try:
connection = Connection(options.server, slave_okay=True)
except AutoReconnect, ex:
print 'Connection to %s failed: %s' % (options.server, str(ex))
return -1
runner = Runner(connection, options.delay)
rc = runner.run()
if rc == -3:
print 'Screen size too small'
return rc
if __name__ == '__main__':
sys.exit(main())
| Python | 0.000001 |
6c409362c6bf00f03700fadfc14e87dd93033ff9 | use 'get_variables' | atest/testdata/core/resources_and_variables/vars_from_cli2.py | atest/testdata/core/resources_and_variables/vars_from_cli2.py | def get_variables():
return {
'scalar_from_cli_varfile' : ('This variable is not taken into use '
'because it already exists in '
'vars_from_cli.py'),
'scalar_from_cli_varfile_2': ('Variable from second variable file '
'from cli')
}
| scalar_from_cli_varfile = 'This value is not taken into use because this ' \
+ 'variable already exists in vars_from_cli.py'
scalar_from_cli_varfile_2 = 'Variable from second variable file from cli'
| Python | 0.000001 |
1374807c05d9ebacb7a8cc6a75811697198bae32 | add template fixture to document tests | fiduswriter/document/tests/editor_helper.py | fiduswriter/document/tests/editor_helper.py | import time
from random import randrange
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from testing.selenium_helper import SeleniumHelper
from document.models import Document
class EditorHelper(SeleniumHelper):
"""
Common functions used in threaded tests
"""
def create_new_document(self):
doc = Document.objects.create(
owner=self.user,
template_id=1 # from fixture
)
doc.save()
return doc
def load_document_editor(self, driver, doc):
driver.get("%s%s" % (
self.live_server_url,
doc.get_absolute_url()
))
WebDriverWait(driver, self.wait_time).until(
EC.presence_of_element_located((By.CLASS_NAME, 'editor-toolbar'))
)
self.inject_helpers(driver)
def inject_helpers(self, driver):
test_caret_script = open(
'static-transpile/js/test_caret.js',
'r'
).read()
driver.execute_script(
test_caret_script
)
def input_text(self, document_input, text):
for char in text:
document_input.send_keys(char)
time.sleep(randrange(10, 40) / 200.0)
def add_title(self, driver):
title = "My title"
driver.execute_script(
'window.testCaret.setSelection(2,2)')
document_input = self.driver.find_element_by_class_name(
'ProseMirror'
)
self.input_text(document_input, title)
def wait_for_doc_size(self, driver, size, seconds=False):
if seconds is False:
seconds = self.wait_time
doc_size = driver.execute_script(
'return window.theApp.page.view.state.doc.content.size')
if doc_size < size and seconds > 0:
time.sleep(0.1)
self.wait_for_doc_size(driver, size, seconds - 0.1)
def wait_for_doc_sync(self, driver, driver2, seconds=False):
if seconds is False:
seconds = self.wait_time
doc_str = driver.execute_script(
'return window.theApp.page.view.state.doc.toString()')
doc2_str = driver2.execute_script(
'return window.theApp.page.view.state.doc.toString()')
if (doc_str != doc2_str):
# The strings don't match.
time.sleep(0.1)
self.wait_for_doc_sync(driver, driver2, seconds - 0.1)
| import time
from random import randrange
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from testing.selenium_helper import SeleniumHelper
from document.models import Document
class EditorHelper(SeleniumHelper):
"""
Common functions used in threaded tests
"""
def create_new_document(self):
doc = Document.objects.create(
owner=self.user,
)
doc.save()
return doc
def load_document_editor(self, driver, doc):
driver.get("%s%s" % (
self.live_server_url,
doc.get_absolute_url()
))
WebDriverWait(driver, self.wait_time).until(
EC.presence_of_element_located((By.CLASS_NAME, 'editor-toolbar'))
)
self.inject_helpers(driver)
def inject_helpers(self, driver):
test_caret_script = open(
'static-transpile/js/test_caret.js',
'r'
).read()
driver.execute_script(
test_caret_script
)
def input_text(self, document_input, text):
for char in text:
document_input.send_keys(char)
time.sleep(randrange(10, 40) / 200.0)
def add_title(self, driver):
title = "My title"
driver.execute_script(
'window.testCaret.setSelection(2,2)')
document_input = self.driver.find_element_by_class_name(
'ProseMirror'
)
self.input_text(document_input, title)
def wait_for_doc_size(self, driver, size, seconds=False):
if seconds is False:
seconds = self.wait_time
doc_size = driver.execute_script(
'return window.theApp.page.view.state.doc.content.size')
if doc_size < size and seconds > 0:
time.sleep(0.1)
self.wait_for_doc_size(driver, size, seconds - 0.1)
def wait_for_doc_sync(self, driver, driver2, seconds=False):
if seconds is False:
seconds = self.wait_time
doc_str = driver.execute_script(
'return window.theApp.page.view.state.doc.toString()')
doc2_str = driver2.execute_script(
'return window.theApp.page.view.state.doc.toString()')
if (doc_str != doc2_str):
# The strings don't match.
time.sleep(0.1)
self.wait_for_doc_sync(driver, driver2, seconds - 0.1)
| Python | 0 |
03cf1abcb9262b4b0b9dd3b57ac07f7d507ddd8f | Drop fts.backends.base.BaseManager.__call__ convenience method. It breaks using manager (and, more importantly, RelatedManager which inherits that method) in views. See http://stackoverflow.com/questions/1142411/reverse-foreign-key-in-django-template for details. | fts/backends/base.py | fts/backends/base.py | "Base Fts class."
from django.db import transaction
from django.db import models
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
class InvalidFtsBackendError(ImproperlyConfigured):
pass
class BaseClass(object):
class Meta:
abstract = True
class BaseManager(models.Manager):
class Meta:
abstract = True
def __init__(self, **kwargs):
super(BaseManager, self).__init__()
self.fields = kwargs.get('fields')
self.default_weight = kwargs.get('default_weight')
if self.default_weight not in ['A', 'B', 'C', 'D']:
self.default_weight = 'A'
self.language_code = kwargs.get('language_code')
if not self.language_code:
from django.utils import translation
self.language_code = translation.get_language().split('-',1)[0].lower()
def contribute_to_class(self, cls, name):
# Instances need to get to us to update their indexes.
setattr(cls, '_search_manager', self)
super(BaseManager, self).contribute_to_class(cls, name)
if not self.fields:
self.fields = self._find_text_fields()
if isinstance(self.fields, (list, tuple)):
self._fields = {}
for field in self.fields:
self._fields[field] = self.default_weight
else:
self._fields = fields
def update_index(self, pk=None):
"""
Updates the full-text index for one, many, or all instances of this manager's model.
"""
raise NotImplementedError
def search(self, query, **kwargs):
raise NotImplementedError
def _find_text_fields(self):
"""
Return the names of all CharField and TextField fields defined for this manager's model.
"""
fields = [f for f in self.model._meta.fields if isinstance(f, (models.CharField, models.TextField))]
return [f.name for f in fields]
class BaseModel(models.Model):
"""
A convience Model wrapper that provides an update_index method for object instances,
as well as automatic index updating. The index is stored as a tsvector column on the
model's table. A model may specify a boolean class variable, _auto_reindex, to control
whether the index is automatically updated when save is called.
"""
class Meta:
abstract = True
def update_index(self):
"""
Update the index.
"""
if hasattr(self, '_search_manager'):
self._search_manager.update_index(pk=self.pk)
@transaction.commit_on_success
def save(self, *args, **kwargs):
super(BaseModel, self).save(*args, **kwargs)
if hasattr(self, '_auto_reindex'):
if self._auto_reindex:
self.update_index()
else:
self.update_index()
| "Base Fts class."
from django.db import transaction
from django.db import models
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
class InvalidFtsBackendError(ImproperlyConfigured):
pass
class BaseClass(object):
class Meta:
abstract = True
class BaseManager(models.Manager):
class Meta:
abstract = True
def __init__(self, **kwargs):
super(BaseManager, self).__init__()
self.fields = kwargs.get('fields')
self.default_weight = kwargs.get('default_weight')
if self.default_weight not in ['A', 'B', 'C', 'D']:
self.default_weight = 'A'
self.language_code = kwargs.get('language_code')
if not self.language_code:
from django.utils import translation
self.language_code = translation.get_language().split('-',1)[0].lower()
def __call__(self, query, **kwargs):
return self.search(query, **kwargs)
def contribute_to_class(self, cls, name):
# Instances need to get to us to update their indexes.
setattr(cls, '_search_manager', self)
super(BaseManager, self).contribute_to_class(cls, name)
if not self.fields:
self.fields = self._find_text_fields()
if isinstance(self.fields, (list, tuple)):
self._fields = {}
for field in self.fields:
self._fields[field] = self.default_weight
else:
self._fields = fields
def update_index(self, pk=None):
"""
Updates the full-text index for one, many, or all instances of this manager's model.
"""
raise NotImplementedError
def search(self, query, **kwargs):
raise NotImplementedError
def _find_text_fields(self):
"""
Return the names of all CharField and TextField fields defined for this manager's model.
"""
fields = [f for f in self.model._meta.fields if isinstance(f, (models.CharField, models.TextField))]
return [f.name for f in fields]
class BaseModel(models.Model):
"""
A convience Model wrapper that provides an update_index method for object instances,
as well as automatic index updating. The index is stored as a tsvector column on the
model's table. A model may specify a boolean class variable, _auto_reindex, to control
whether the index is automatically updated when save is called.
"""
class Meta:
abstract = True
def update_index(self):
"""
Update the index.
"""
if hasattr(self, '_search_manager'):
self._search_manager.update_index(pk=self.pk)
@transaction.commit_on_success
def save(self, *args, **kwargs):
super(BaseModel, self).save(*args, **kwargs)
if hasattr(self, '_auto_reindex'):
if self._auto_reindex:
self.update_index()
else:
self.update_index()
| Python | 0 |
3828c02e73fa2a190f47ee7b3ad4b3670944367c | Swap fields. Closes https://github.com/p2pu/p2pu-website/issues/480 | custom_registration/forms.py | custom_registration/forms.py | # coding=utf-8
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import PasswordResetForm
from django.utils.translation import ugettext as _
from django.forms import ValidationError
from django.contrib.auth import password_validation
from django.contrib.auth.forms import UserCreationForm
from django.utils.safestring import mark_safe
from studygroups.models import Profile
newsletter_label = _('P2PU can contact me')
newsletter_help_text = _('Join our mailing list to learn about upcoming events, new courses, and news from the community. (Approximately six emails/year)')
class SignupForm(UserCreationForm):
communication_opt_in = forms.BooleanField(required=False, initial=False, label=newsletter_label, help_text=newsletter_help_text)
consent_opt_in = forms.BooleanField(required=True, initial=False, label=mark_safe(_('I consent to P2PU storing my data and accept the <a href="https://www.p2pu.org/en/terms/">terms of service</a>')), help_text=_('P2PU values your privacy and will never sell your data.'))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
self.fields['email'].required = True
self.fields['first_name'].required = True
self.fields['last_name'].required = True
def clean(self):
cleaned_data = super(SignupForm, self).clean()
username = cleaned_data.get('email')
if User.objects.filter(username__iexact=username).exists():
self.add_error('email', _('A user with that email address already exists.'))
return cleaned_data
class Meta:
model = User
fields = ['first_name', 'last_name', 'email', 'password1', 'password2', 'consent_opt_in', 'communication_opt_in']
class CustomPasswordResetForm(PasswordResetForm):
""" Use case insensitive email address when searching for users """
def clean_email(self):
email = self.cleaned_data['email']
if not User.objects.filter(email__iexact=email, is_active=True).exists():
raise ValidationError(_("There is no user registered with the specified email address!"))
return email
class UserForm(forms.ModelForm):
email = forms.EmailField(disabled=True, help_text=mark_safe(_('If youβd like to change the address affiliated with your account, please contact <a href="mailto:thepeople@p2pu.org">thepeople@p2pu.org</a>')))
class Meta:
model = User
fields = ['email', 'first_name', 'last_name']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['avatar', 'bio', 'contact_url', 'city', 'country', 'place_id', 'region', 'latitude', 'longitude', 'communication_opt_in']
labels = {
'avatar': _('Profile photo'),
'bio': _('Short bio (max 500 characters)'),
'contact_url': _('Contact URL'),
'city': _('City'),
'communication_opt_in': newsletter_label,
}
placeholders = {
'contact_url': _("Twitter, LinkedIn, website, etc.")
}
help_texts = {
'contact_url': _('Where can potential team members find your contact information? i.e. Staff page, Twitter, personal website, etc.'),
'communication_opt_in': newsletter_help_text,
}
widgets = {
'bio': forms.Textarea(attrs={'rows':5, 'cols':10}),
'latitude': forms.HiddenInput,
'longitude': forms.HiddenInput,
'place_id': forms.HiddenInput,
'country': forms.HiddenInput,
'region': forms.HiddenInput,
}
| # coding=utf-8
from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import PasswordResetForm
from django.utils.translation import ugettext as _
from django.forms import ValidationError
from django.contrib.auth import password_validation
from django.contrib.auth.forms import UserCreationForm
from django.utils.safestring import mark_safe
from studygroups.models import Profile
newsletter_label = _('P2PU can contact me')
newsletter_help_text = _('Join our mailing list to learn about upcoming events, new courses, and news from the community. (Approximately six emails/year)')
class SignupForm(UserCreationForm):
communication_opt_in = forms.BooleanField(required=False, initial=False, label=newsletter_label, help_text=newsletter_help_text)
consent_opt_in = forms.BooleanField(required=True, initial=False, label=mark_safe(_('I consent to P2PU storing my data and accept the <a href="https://www.p2pu.org/en/terms/">terms of service</a>')), help_text=_('P2PU values your privacy and will never sell your data.'))
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
self.fields['email'].required = True
self.fields['first_name'].required = True
self.fields['last_name'].required = True
def clean(self):
cleaned_data = super(SignupForm, self).clean()
username = cleaned_data.get('email')
if User.objects.filter(username__iexact=username).exists():
self.add_error('email', _('A user with that email address already exists.'))
return cleaned_data
class Meta:
model = User
fields = ['first_name', 'last_name', 'email', 'password1', 'password2', 'communication_opt_in', 'consent_opt_in']
class CustomPasswordResetForm(PasswordResetForm):
""" Use case insensitive email address when searching for users """
def clean_email(self):
email = self.cleaned_data['email']
if not User.objects.filter(email__iexact=email, is_active=True).exists():
raise ValidationError(_("There is no user registered with the specified email address!"))
return email
class UserForm(forms.ModelForm):
email = forms.EmailField(disabled=True, help_text=mark_safe(_('If youβd like to change the address affiliated with your account, please contact <a href="mailto:thepeople@p2pu.org">thepeople@p2pu.org</a>')))
class Meta:
model = User
fields = ['email', 'first_name', 'last_name']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['avatar', 'bio', 'contact_url', 'city', 'country', 'place_id', 'region', 'latitude', 'longitude', 'communication_opt_in']
labels = {
'avatar': _('Profile photo'),
'bio': _('Short bio (max 500 characters)'),
'contact_url': _('Contact URL'),
'city': _('City'),
'communication_opt_in': newsletter_label,
}
placeholders = {
'contact_url': _("Twitter, LinkedIn, website, etc.")
}
help_texts = {
'contact_url': _('Where can potential team members find your contact information? i.e. Staff page, Twitter, personal website, etc.'),
'communication_opt_in': newsletter_help_text,
}
widgets = {
'bio': forms.Textarea(attrs={'rows':5, 'cols':10}),
'latitude': forms.HiddenInput,
'longitude': forms.HiddenInput,
'place_id': forms.HiddenInput,
'country': forms.HiddenInput,
'region': forms.HiddenInput,
}
| Python | 0.000001 |
2cdfff730e66dccf749ca855e3c255568e248d01 | Use Unknown message with right path | vertica_python/vertica/messages/message.py | vertica_python/vertica/messages/message.py |
import types
from struct import pack
from vertica_python.vertica.messages import *
class Message(object):
@classmethod
def _message_id(cls, message_id):
instance_message_id = message_id
def message_id(self):
return instance_message_id
setattr(cls, 'message_id', types.MethodType(message_id, cls))
def message_string(self, msg):
if isinstance(msg, list):
msg = ''.join(msg)
if hasattr(msg, 'bytesize'):
bytesize = msg.bytesize + 4
else:
bytesize = len(msg) + 4
message_size = pack('!I', bytesize)
if self.message_id() is not None:
msg_with_size = self.message_id() + message_size + msg
else:
msg_with_size = message_size + msg
return msg_with_size
class BackendMessage(Message):
MessageIdMap = {}
@classmethod
def factory(cls, type_, data):
klass = cls.MessageIdMap[type_]
if klass is not None:
return klass(data)
else:
return Unknown(type_, data)
@classmethod
def _message_id(cls, message_id):
super(BackendMessage, cls)
cls.MessageIdMap[message_id] = cls
class FrontendMessage(Message):
def to_bytes(self):
return self.message_string(b'')
|
import types
from struct import pack
from vertica_python.vertica.messages import *
class Message(object):
@classmethod
def _message_id(cls, message_id):
instance_message_id = message_id
def message_id(self):
return instance_message_id
setattr(cls, 'message_id', types.MethodType(message_id, cls))
def message_string(self, msg):
if isinstance(msg, list):
msg = ''.join(msg)
if hasattr(msg, 'bytesize'):
bytesize = msg.bytesize + 4
else:
bytesize = len(msg) + 4
message_size = pack('!I', bytesize)
if self.message_id() is not None:
msg_with_size = self.message_id() + message_size + msg
else:
msg_with_size = message_size + msg
return msg_with_size
class BackendMessage(Message):
MessageIdMap = {}
@classmethod
def factory(cls, type_, data):
klass = cls.MessageIdMap[type_]
if klass is not None:
return klass(data)
else:
return messages.Unknown(type_, data)
@classmethod
def _message_id(cls, message_id):
super(BackendMessage, cls)
cls.MessageIdMap[message_id] = cls
class FrontendMessage(Message):
def to_bytes(self):
return self.message_string(b'')
| Python | 0 |
acf7d9c9748531d4bc800353a71f0b152fda6d53 | Update map-sum-pairs.py | Python/map-sum-pairs.py | Python/map-sum-pairs.py | # Time: O(n), n is the length of key
# Space: O(t), t is the number of nodes in trie
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
| # Time: O(n), n is the length of key
# Space: O(t), t is the total size of trie
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
| Python | 0.000001 |
98896c222c2686dbab96b58819c08131d31dc1b7 | Update self-crossing.py | Python/self-crossing.py | Python/self-crossing.py | # Time: O(n)
# Space: O(1)
# You are given an array x of n positive numbers.
# You start at point (0,0) and moves x[0] metres to
# the north, then x[1] metres to the west, x[2] metres
# to the south, x[3] metres to the east and so on.
# In other words, after each move your direction changes counter-clockwise.
#
# Write a one-pass algorithm with O(1) extra space to determine,
# if your path crosses itself, or not.
#
# Example 1:
# Given x = [2, 1, 1, 2]
# Return true (self crossing)
# Example 2:
# Given x = [1, 2, 3, 4]
# Return false (not self crossing)
# Example 3:
# Given x = [1, 1, 1, 1]
# Return true (self crossing)
class Solution(object):
def isSelfCrossing(self, x):
"""
:type x: List[int]
:rtype: bool
"""
if len(x) >= 5 and x[3] == x[1] and x[4] + x[0] >= x[2]:
# Crossing in a loop:
# 2
# 3 ββββββ
# ββββ>β1
# 4 0 (overlapped)
return True
for i in xrange(3, len(x)):
if x[i] >= x[i - 2] and x[i - 3] >= x[i - 1]:
# Case 1:
# i-2
# i-1βββ
# βββΌβ>i
# i-3
return True
elif i >= 5 and x[i - 4] <= x[i - 2] and x[i] + x[i - 4] >= x[i - 2] and \
x[i - 1] <= x[i - 3] and x[i - 5] + x[i - 1] >= x[i - 3]:
# Case 2:
# i-4
# ββββ
# βi<βΌββ
# i-3β i-5βi-1
# ββββββ
# i-2
return True
return False
| # Time: O(n)
# Space: O(1)
# You are given an array x of n positive numbers.
# You start at point (0,0) and moves x[0] metres to
# the north, then x[1] metres to the west, x[2] metres
# to the south, x[3] metres to the east and so on.
# In other words, after each move your direction changes counter-clockwise.
#
# Write a one-pass algorithm with O(1) extra space to determine,
# if your path crosses itself, or not.
#
# Example 1:
# Given x = [2, 1, 1, 2]
# Return true (self crossing)
# Example 2:
# Given x = [1, 2, 3, 4]
# Return false (not self crossing)
# Example 3:
# Given x = [1, 1, 1, 1]
# Return true (self crossing)
class Solution(object):
def isSelfCrossing(self, x):
"""
:type x: List[int]
:rtype: bool
"""
if len(x) >= 5 and x[3] == x[1] and x[4] + x[0] >= x[2]:
# Crossing in a loop:
# 2
# 3 ββββββ
# ββββ>β1
# 4 0 (overlapped)
return True
for i in xrange(3, len(x)):
if x[i] >= x[i - 2] and x[i - 3] >= x[i - 1]:
# Case 1:
# i-2
# i-1βββ
# βββΌβ>i
# i-3
return True
elif i >= 5 and x[i - 4] <= x[i - 2] and x[i] + x[i - 4] >= x[i - 2] and \
x[i - 1] <= x[i - 3] and x[i - 5] + x[i - 1] >= x[i - 3]:
# Case 2:
# i-4
# ββββ
# βi<βΌββ
# i-3β i-5βi-1
# ββββββ
# i-2
return True
return False
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.