commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
bbf6df137f4b9b37e733750937e58131f36c6aa3 | Clean old todos | guicore/guievents/alertevents.py | guicore/guievents/alertevents.py | from guicore.displayscreen import EventDispatch
import guicore.guiutils as guiutils
from controlevents import CEvent
import debug
import logsupport
from logsupport import ConsoleDetail, ConsoleWarning
from utils import timers
import alertsystem.alerttasks as alerttasks
import guicore.switcher as switcher
import screens.__screens as screens
TimerName = 0
def AlertEvents(event):
global TimerName
guiutils.HBEvents.Entry('Var or Alert' + repr(event))
evtype = 'variable' if event.type == CEvent.ISYVar else 'node'
debug.debugPrint('Dispatch', 'ISY ', evtype, ' change', event)
alert = event.alert
if alert.state in ('Armed', 'Init'):
if alert.trigger.IsTrue(): # alert condition holds
if alert.trigger.delay != 0: # delay invocation
alert.state = 'Delayed'
debug.debugPrint('Dispatch', "Post with delay:", alert.name, alert.trigger.delay)
TimerName += 1
alert.timer = timers.OnceTimer(alert.trigger.delay, start=True,
name='MainLoop' + str(TimerName),
proc=alerttasks.HandleDeferredAlert, param=alert)
else: # invoke now
alert.state = 'FiredNoDelay'
debug.debugPrint('Dispatch', "Invoke: ", alert.name)
alert.Invoke() # either calls a proc or enters a screen and adjusts alert state appropriately
else:
if alert.state == 'Armed':
# condition cleared after alert rearmed - timing in the queue?
logsupport.Logs.Log('Anomolous Trigger clearing while armed: ', repr(alert),
severity=ConsoleDetail, hb=True)
else:
alert.state = 'Armed'
logsupport.Logs.Log('Initial var value for trigger is benign: ', repr(alert),
severity=ConsoleDetail)
elif alert.state == 'Active' and not alert.trigger.IsTrue(): # alert condition has cleared and screen is up
debug.debugPrint('Dispatch', 'Active alert cleared', alert.name)
alert.state = 'Armed' # just rearm the alert
switcher.SwitchScreen(screens.HomeScreen, 'Dim', 'Cleared alert', newstate='Home')
elif alert.state == 'Active' and alert.trigger.IsTrue(): # alert condition changed but is still true
pass
elif ((alert.state == 'Delayed') or (alert.state == 'Deferred')) and not alert.trigger.IsTrue():
# condition changed under a pending action (screen or proc) so just cancel and rearm
if alert.timer is not None:
alert.timer.cancel()
alert.timer = None
else:
logsupport.DevPrint('Clear with no timer?? {}'.format(repr(alert)))
debug.debugPrint('Dispatch', 'Delayed event cleared before invoke', alert.name)
alert.state = 'Armed'
else: # todo
# this is ok until maybe I figure out how to avoid. The change causes a firing and the the arming causes a second posting
# logsupport.Logs.Log("Anomolous change situation State: ", alert.state, " Alert: ", repr(alert),
# " Trigger IsTrue: ",
# alert.trigger.IsTrue(), severity=ConsoleWarning, hb=True)
debug.debugPrint('Dispatch', 'ISYVar/ISYAlert passing: ', alert.state, alert.trigger.IsTrue(),
event,
alert)
# Armed and false: irrelevant report
# Active and true: extaneous report - can happen if value changes but still is in range of true
# Delayed or deferred and true: redundant report
EventDispatch[CEvent.ISYVar] = AlertEvents
EventDispatch[CEvent.ISYAlert] = AlertEvents
| from guicore.displayscreen import EventDispatch
import guicore.guiutils as guiutils
from controlevents import CEvent
import debug
import logsupport
from logsupport import ConsoleDetail, ConsoleWarning
from utils import timers
import alertsystem.alerttasks as alerttasks
import guicore.switcher as switcher
import screens.__screens as screens
TimerName = 0
def AlertEvents(event):
global TimerName
guiutils.HBEvents.Entry('Var or Alert' + repr(event))
evtype = 'variable' if event.type == CEvent.ISYVar else 'node'
debug.debugPrint('Dispatch', 'ISY ', evtype, ' change', event)
alert = event.alert
if alert.state in ('Armed', 'Init'):
if alert.trigger.IsTrue(): # alert condition holds
if alert.trigger.delay != 0: # delay invocation
alert.state = 'Delayed'
debug.debugPrint('Dispatch', "Post with delay:", alert.name, alert.trigger.delay)
TimerName += 1
alert.timer = timers.OnceTimer(alert.trigger.delay, start=True,
name='MainLoop' + str(TimerName),
proc=alerttasks.HandleDeferredAlert, param=alert)
else: # invoke now
alert.state = 'FiredNoDelay'
debug.debugPrint('Dispatch', "Invoke: ", alert.name)
alert.Invoke() # either calls a proc or enters a screen and adjusts alert state appropriately
else:
if alert.state == 'Armed':
# condition cleared after alert rearmed - timing in the queue?
logsupport.Logs.Log('Anomolous Trigger clearing while armed: ', repr(alert),
severity=ConsoleDetail, hb=True)
else:
alert.state = 'Armed'
logsupport.Logs.Log('Initial var value for trigger is benign: ', repr(alert),
severity=ConsoleDetail)
elif alert.state == 'Active' and not alert.trigger.IsTrue(): # alert condition has cleared and screen is up
debug.debugPrint('Dispatch', 'Active alert cleared', alert.name)
alert.state = 'Armed' # just rearm the alert
switcher.SwitchScreen(screens.HomeScreen, 'Dim', 'Cleared alert', newstate='Home')
elif alert.state == 'Active' and alert.trigger.IsTrue(): # alert condition changed but is still true
pass
elif ((alert.state == 'Delayed') or (alert.state == 'Deferred')) and not alert.trigger.IsTrue():
# condition changed under a pending action (screen or proc) so just cancel and rearm
if alert.timer is not None:
alert.timer.cancel()
alert.timer = None
else:
logsupport.DevPrint('Clear with no timer?? {}'.format(repr(alert)))
debug.debugPrint('Dispatch', 'Delayed event cleared before invoke', alert.name)
alert.state = 'Armed'
else:
logsupport.Logs.Log("Anomolous change situation State: ", alert.state, " Alert: ", repr(alert),
" Trigger IsTue: ",
alert.trigger.IsTrue(), severity=ConsoleWarning, hb=True)
debug.debugPrint('Dispatch', 'ISYVar/ISYAlert passing: ', alert.state, alert.trigger.IsTrue(),
event,
alert)
# Armed and false: irrelevant report
# Active and true: extaneous report - can happen if value changes but still is in range of true
# Delayed or deferred and true: redundant report
EventDispatch[CEvent.ISYVar] = AlertEvents
EventDispatch[CEvent.ISYAlert] = AlertEvents
| Python | 0.000002 |
f149baa8ca7a401f8d2d390d84fc85960edd743d | Work in progress | dius.py | dius.py | #!python3
# Copyright (c) 2016 Petr Veprek
"""Disk Usage"""
import math
import operator
import os
import string
import sys
import time
TITLE = "Disk Usage"
VERSION = "0.0.0"
VERBOSE = False
WIDTH = 80
COUNT = 20
def now(on="on", at="at"):
return "{}{} {}{}".format(on + " " if on != "" else "", time.strftime("%Y-%m-%d"), at + " " if at != "" else "", time.strftime("%H:%M:%S"))
def neat(str, max):
str = "".join([char if char in string.printable else "_" for char in str])
if len(str) > max: str = str[:max-3] + "..."
return str
def digits(max):
return math.ceil(math.log10(max))
def main():
print("{} {}".format(TITLE, VERSION))
if VERBOSE:
print("\a", end="")
print("Python {}".format(sys.version))
print("Command '{}'".format(sys.argv[0]))
print("Arguments {}".format(sys.argv[1:]))
print("Executed {}".format(now()))
start = time.time()
top = os.getcwd()
print("Analyzing {}".format(top))
usage = {}
for path, dirs, files in os.walk(top):
print("\rScanning {: <{}}".format(neat(path, WIDTH), WIDTH), end="")
usage[path] = sum(map(os.path.getsize, filter(os.path.isfile, map(lambda file: os.path.join(path, file), files))))
print("\r {: <{}}\r".format("", WIDTH), end="")
usage = sorted(usage.items(), key=operator.itemgetter(1), reverse=True)
for i, (path, size) in enumerate(usage[:COUNT]):
print("{:{}}/{} {:{}} {}".format(i+1, digits(COUNT), len(usage), size, digits(usage[0][1]), path))
if VERBOSE:
elapsed = time.time() - start
seconds = round(elapsed)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
weeks, days = divmod(days, 7)
print("Completed {}".format(now()))
print("Elapsed {:d}w {:d}d {:d}h {:d}m {:d}s ({:,.3f}s)".format(weeks, days, hours, minutes, seconds, elapsed))
print("\a", end="")
if '__main__' == __name__:
main()
| #!python3
# Copyright (c) 2016 Petr Veprek
"""Disk Usage"""
import math
import operator
import os
import string
import sys
import time
TITLE = "Disk Usage"
VERSION = "0.0.0"
VERBOSE = False
def now(on="on", at="at"):
return "{}{} {}{}".format(on + " " if on != "" else "", time.strftime("%Y-%m-%d"), at + " " if at != "" else "", time.strftime("%H:%M:%S"))
def neat(str, max):
str = "".join([char if char in string.printable else "_" for char in str])
if len(str) > max: str = str[:max-3] + "..."
return str
def digits(max):
return math.ceil(math.log10(max))
def main():
print("{} {}".format(TITLE, VERSION))
if VERBOSE:
print("\a", end="")
print("Python {}".format(sys.version))
print("Command '{}'".format(sys.argv[0]))
print("Arguments {}".format(sys.argv[1:]))
print("Executed {}".format(now()))
start = time.time()
top = os.getcwd()
top="./Petr/Docs/_Documents" #####################################################
print("Analyzing {}".format(top))
usage = {}
for path, dirs, files in os.walk(top):
print("\rScanning {: <80}".format(neat(path, 80)), end="")
usage[path] = sum(map(os.path.getsize, filter(os.path.isfile, map(lambda file: os.path.join(path, file), files))))
print("\r {: <80}\r".format(""), end="")
usage = sorted(usage.items(), key=operator.itemgetter(1), reverse=True)
for i, (path, size) in enumerate(usage[:20]):
print("{:{}}/{} {} {}".format(i+1, digits(20), len(usage), size, path))
if VERBOSE:
elapsed = time.time() - start
seconds = round(elapsed)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
days, hours = divmod(hours, 24)
weeks, days = divmod(days, 7)
print("Completed {}".format(now()))
print("Elapsed {:d}w {:d}d {:d}h {:d}m {:d}s ({:,.3f}s)".format(weeks, days, hours, minutes, seconds, elapsed))
print("\a", end="")
if '__main__' == __name__:
main()
| Python | 0.000003 |
9ec49083879831d7b2cfd863ea139e0e86d42c36 | Bump release version | debreach/__init__.py | debreach/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.4.0'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from distutils import version
__version__ = '1.3.1'
version_info = version.StrictVersion(__version__).version
default_app_config = 'debreach.apps.DebreachConfig'
| Python | 0 |
206e8c2da4677532add03deadac03e88a7cd0da8 | update __init__ | cleverhans/__init__.py | cleverhans/__init__.py | """The CleverHans adversarial example library"""
from cleverhans.devtools.version import append_dev_version
# If possible attach a hex digest to the version string to keep track of
# changes in the development branch
__version__ = append_dev_version('3.0.0')
| """The CleverHans adversarial example library"""
from cleverhans.devtools.version import append_dev_version
# If possible attach a hex digest to the version string to keep track of
# changes in the development branch
__version__ = append_dev_version('2.0.0')
| Python | 0.000004 |
878db5485946935f8784c6c9f15decbe15c0dfbc | Remove catchall redirect | democracylab/urls.py | democracylab/urls.py | """democracylab URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic.base import RedirectView
from . import views
urlpatterns = [
url(r'^signup/$', views.signup, name='signup'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
url(
r'^password_reset/$',
views.password_reset,
name="password_reset",
),
url(
r'^change_password/$',
views.change_password,
name="change_password",
),
url(
r'^verify_user/(?P<user_id>[0-9]+)/(?P<token>[0-9a-z\-]+)$',
views.verify_user,
name="verify_user"
),
url(
r'^verify_user/$',
views.send_verification_email,
name="send_verification_email"
),
url(r'^', include('civictechprojects.urls')),
url(r'^$', RedirectView.as_view(url='/index/', permanent=False)),
url(r'^admin/', admin.site.urls),
url(r'^platform$', RedirectView.as_view(url='http://connect.democracylab.org/platform/', permanent=False)),
# url(r'^.*$', RedirectView.as_view(url='/index/', permanent=False)),
# url(
# r'check_email/(?P<user_email>.*)$',
# views.check_email,
# name="check_email"
# )
] | """democracylab URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.views.generic.base import RedirectView
from . import views
urlpatterns = [
url(r'^signup/$', views.signup, name='signup'),
url(r'^login/$', views.login_view, name='login_view'),
url(r'^logout/$', auth_views.logout, {'next_page': '/'}, name='logout'),
url(
r'^password_reset/$',
views.password_reset,
name="password_reset",
),
url(
r'^change_password/$',
views.change_password,
name="change_password",
),
url(
r'^verify_user/(?P<user_id>[0-9]+)/(?P<token>[0-9a-z\-]+)$',
views.verify_user,
name="verify_user"
),
url(
r'^verify_user/$',
views.send_verification_email,
name="send_verification_email"
),
url(r'^', include('civictechprojects.urls')),
url(r'^$', RedirectView.as_view(url='/index/', permanent=False)),
url(r'^admin/', admin.site.urls),
url(r'^platform$', RedirectView.as_view(url='http://connect.democracylab.org/platform/', permanent=False)),
url(r'^.*$', RedirectView.as_view(url='/index/', permanent=False)),
# url(
# r'check_email/(?P<user_email>.*)$',
# views.check_email,
# name="check_email"
# )
] | Python | 0 |
491a99aa56d27d5c37af4e0538e808d2ec47d8a6 | update problem | hackerrank/016_lisas_workbook.py | hackerrank/016_lisas_workbook.py | #!/bin/python3
"""
https://www.hackerrank.com/challenges/bear-and-workbook?h_r=next-challenge&h_v=zen
Lisa just got a new math workbook. A workbook contains exercise problems, grouped into chapters.
* There are n chapters in Lisa's workbook, numbered from 1 to n.
* The i-th chapter has ti problems, numbered from 1 to ti.
* Each page can hold up to k problems. There are no empty pages or unnecessary spaces, so only the last page of a chapter may contain fewer than k problems.
* Each new chapter starts on a new page, so a page will never contain problems from more than one chapter.
* The page number indexing starts at 1.
Lisa believes a problem to be special if its index (within a chapter) is the same as the page number where it's located. Given the details for Lisa's workbook, can you count its number of special problems?
Note: See the diagram in the Explanation section for more details.
Input Format
The first line contains two integers n and k — the number of chapters and the maximum number of problems per page respectively.
The second line contains n integers t1, t2,...,tn, where ti denotes the number of problems in the i-th chapter.
Contraints
1 <= n,k,ti <= 100
Output Format
Print the number of special problems in Lisa's workbook.
Sample Input
5 3
4 2 6 1 10
Sample Output
4
Explanation
The diagram below depicts Lisa's workbook with n = 5 chapters and a maximum of k = 3 problems per page. Special problems are outlined in red, and page numbers are in yellow squares.
+-Chap1+--------+ +-Chap1+---+ +-Chap2+---+ +-Chap3+---+ +-Chap3+--------+
| | | | | | | | | |
| Problem1(RED) | | Problem4 | | Problem1 | | Problem1 | | Problem4 |
| Problem2 | | | | Problem2 | | Problem2 | | Problem5(RED) |
| Problem3 | | | | | | Problem3 | | Problem6 |
| | | | | | | | | |
+---------------+ +----------+ +----------+ +----------+ +---------------+
P1 P2 P3 P4 P5
+-Chap4----+ +-Chap5----+ +-Chap5----+ +-Chap5---------+ +-Chap5----------+
| | | | | | | | | |
| Problem1 | | Problem1 | | Problem4 | | Problem7 | | Problem10(RED) |
| | | Problem2 | | Problem5 | | Problem8 | | |
| | | Problem3 | | Problem6 | | Problem9(RED) | | |
| | | | | | | | | |
| | | | | | | | | |
+----------+ +----------+ +----------+ +---------------+ +----------------+
P6 P7 P8 P9 P10
There are 4 special problems and thus we print the number 4 on a new line.
"""
import sys
arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
n, k = arr[0], arr[1]
t_arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
page_number = 1
result = 0
for problems_count in t_arr:
pages_count = (problems_count - 1) // k + 1
| #!/bin/python3
"""
https://www.hackerrank.com/challenges/bear-and-workbook?h_r=next-challenge&h_v=zen
Lisa just got a new math workbook. A workbook contains exercise problems, grouped into chapters.
* There are n chapters in Lisa's workbook, numbered from 1 to n.
* The i-th chapter has ti problems, numbered from 1 to ti.
* Each page can hold up to k problems. There are no empty pages or unnecessary spaces, so only the last page of a chapter may contain fewer than k problems.
* Each new chapter starts on a new page, so a page will never contain problems from more than one chapter.
* The page number indexing starts at 1.
Lisa believes a problem to be special if its index (within a chapter) is the same as the page number where it's located. Given the details for Lisa's workbook, can you count its number of special problems?
Note: See the diagram in the Explanation section for more details.
Input Format
The first line contains two integers n and k — the number of chapters and the maximum number of problems per page respectively.
The second line contains n integers t1, t2,...,tn, where ti denotes the number of problems in the i-th chapter.
Contraints
1 <= n,k,ti <= 100
Output Format
Print the number of special problems in Lisa's workbook.
Sample Input
5 3
4 2 6 1 10
Sample Output
4
Explanation
The diagram below depicts Lisa's workbook with n = 5 chapters and a maximum of k = 3 problems per page. Special problems are outlined in red, and page numbers are in yellow squares.
+-Chap1+--------+ +-Chap1+---+ +-Chap2+---+ +-Chap3+---+ +-Chap3+--------+
| | | | | | | | | |
| Problem1(Red) | | Problem4 | | Problem1 | | Problem1 | | Problem4 |
| Problem2 | | | | Problem2 | | Problem2 | | Problem5(RED) |
| Problem3 | | | | | | Problem3 | | Problem6 |
| | | | | | | | | |
+---------------+ +----------+ +----------+ +----------+ +---------------+
P1 P2 P3 P4 P5
+-Chap4----+ +-Chap5----+ +-Chap5----+ +-Chap5---------+ +-Chap5----------+
| | | | | | | | | |
| Problem1 | | Problem1 | | Problem4 | | Problem7 | | Problem10(RED) |
| | | Problem2 | | Problem5 | | Problem8 | | |
| | | Problem3 | | Problem6 | | Problem9(RED) | | |
| | | | | | | | | |
| | | | | | | | | |
+----------+ +----------+ +----------+ +---------------+ +----------------+
P6 P7 P8 P9 P10
There are 4 special problems and thus we print the number 4 on a new line.
"""
import sys
arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
n, k = arr[0], arr[1]
t_arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
page_number = 1
result = 0
for problems_count in t_arr:
pages_count = (problems_count - 1) // k + 1
| Python | 0.000001 |
7be087d4bd62557acd64a9e703443a8df404e3e3 | Make storage size an int | director/sessions_/providers/ec2.py | director/sessions_/providers/ec2.py | import time
from django.conf import settings
import boto.ec2
class EC2:
'''
Amazon EC2 provider for session Workers
'''
def connection(self):
'''
Get a EC2 connection to use in start() and stop()
'''
return boto.ec2.connect_to_region(
'us-west-2',
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY
)
def ami(self):
'''
Get the id of the AMI (Amazon Machine Image)
for the worker. This changes everytime the worker
image is updated so needs to be fetched by name.
It is tempting to store the id but that means it could go stale when updates
are done. Since workers won't be started that often better just to suffer the
slight wait to get the image id.
'''
connection = self.connection()
image = connection.get_all_images(
filters={'name': 'stencila-worker-image'}
)[0]
return image.id
def launch(self, worker):
'''
Translates the worker's attributes into attributes of an EC2 instance and launches it
'''
connection = self.connection()
# Determine the instance type
# Currently a very simplistic choice of instance type
# until various optimisations are done.
# See https://aws.amazon.com/ec2/pricing/
instance_type = 't2.micro'
# Note these if statements act like a series of instance
# type upgrades, not a branching if/else.
# Also, because discrete combinations of CPU and memory
# there is no guarantee that your exact combination will
# be met
if worker.cpus >= 1 and worker.memory >= 2:
instance_type = 't2.small'
if worker.cpus >= 2 and worker.memory >= 4:
instance_type = 't2.medium'
if worker.cpus >= 2 and worker.memory >= 8:
instance_type = 't2.large'
# Specify root storage device
dev_sda1 = boto.ec2.blockdevicemapping.EBSBlockDeviceType()
dev_sda1.size = int(worker.storage)
dev_sda1.volume_type = 'gp2' # General Purpose (SSD) instead of the defaul 'standard' (magnetic)
block_device_map = boto.ec2.blockdevicemapping.BlockDeviceMapping()
block_device_map['/dev/sda1'] = dev_sda1
reservation = connection.run_instances(
image_id=self.ami(),
min_count=1,
max_count=1,
key_name='stencila-aws-us-west-2-key-pair-1',
instance_type=instance_type,
# stencila-private-subnet-1
subnet_id='subnet-a0599cf9',
# When launching into a subnet apparently `security_group_ids` must
# be used instead of `security_groups` (names)
security_group_ids=[
# stencila-worker-sg
'sg-930401f6'
],
block_device_map=block_device_map
)
instance = reservation.instances[0]
# Number of seconds to fail timeout waiting for server to launch
timeout = 120
start = time.time()
while True:
status = instance.update()
if status != 'pending':
break
if time.time()-start > timeout:
raise Exception('Timed out trying to start worker: %s' % worker)
time.sleep(1)
if status == 'running':
worker.provider_id = instance.id
worker.ip = instance.private_ip_address
instance.add_tag("Name", "stencila-worker")
else:
raise Exception('Failed to start worker: %s : %s' % (worker, status))
def terminate(self, worker):
connection = self.connection()
connection.terminate_instances(
instance_ids=[worker.provider_id]
)
| import time
from django.conf import settings
import boto.ec2
class EC2:
'''
Amazon EC2 provider for session Workers
'''
def connection(self):
'''
Get a EC2 connection to use in start() and stop()
'''
return boto.ec2.connect_to_region(
'us-west-2',
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY
)
def ami(self):
'''
Get the id of the AMI (Amazon Machine Image)
for the worker. This changes everytime the worker
image is updated so needs to be fetched by name.
It is tempting to store the id but that means it could go stale when updates
are done. Since workers won't be started that often better just to suffer the
slight wait to get the image id.
'''
connection = self.connection()
image = connection.get_all_images(
filters={'name': 'stencila-worker-image'}
)[0]
return image.id
def launch(self, worker):
'''
Translates the worker's attributes into attributes of an EC2 instance and launches it
'''
connection = self.connection()
# Determine the instance type
# Currently a very simplistic choice of instance type
# until various optimisations are done.
# See https://aws.amazon.com/ec2/pricing/
instance_type = 't2.micro'
# Note these if statements act like a series of instance
# type upgrades, not a branching if/else.
# Also, because discrete combinations of CPU and memory
# there is no guarantee that your exact combination will
# be met
if worker.cpus >= 1 and worker.memory >= 2:
instance_type = 't2.small'
if worker.cpus >= 2 and worker.memory >= 4:
instance_type = 't2.medium'
if worker.cpus >= 2 and worker.memory >= 8:
instance_type = 't2.large'
# Specify root storage device
dev_sda1 = boto.ec2.blockdevicemapping.EBSBlockDeviceType()
dev_sda1.size = worker.storage
dev_sda1.volume_type = 'gp2' # General Purpose (SSD) instead of the defaul 'standard' (magnetic)
block_device_map = boto.ec2.blockdevicemapping.BlockDeviceMapping()
block_device_map['/dev/sda1'] = dev_sda1
reservation = connection.run_instances(
image_id=self.ami(),
min_count=1,
max_count=1,
key_name='stencila-aws-us-west-2-key-pair-1',
instance_type=instance_type,
# stencila-private-subnet-1
subnet_id='subnet-a0599cf9',
# When launching into a subnet apparently `security_group_ids` must
# be used instead of `security_groups` (names)
security_group_ids=[
# stencila-worker-sg
'sg-930401f6'
],
block_device_map=block_device_map
)
instance = reservation.instances[0]
# Number of seconds to fail timeout waiting for server to launch
timeout = 120
start = time.time()
while True:
status = instance.update()
if status != 'pending':
break
if time.time()-start > timeout:
raise Exception('Timed out trying to start worker: %s' % worker)
time.sleep(1)
if status == 'running':
worker.provider_id = instance.id
worker.ip = instance.private_ip_address
instance.add_tag("Name", "stencila-worker")
else:
raise Exception('Failed to start worker: %s : %s' % (worker, status))
def terminate(self, worker):
connection = self.connection()
connection.terminate_instances(
instance_ids=[worker.provider_id]
)
| Python | 0.000123 |
a369a9648e1c0bc2cb2a4964fd2f5814d0950902 | echo 4 | echo.py | echo.py | import IPython
import warnings
# These are actually used for the application
import collections
import time
from golix import Ghid
from hypergolix.service import HypergolixLink
hgxlink = HypergolixLink(threaded=True)
desktop = Ghid(algo=1, address=b'\xc0TZ\x15+\x9a\x8e\x01\xbbvw\x83\xc8%\xd5RG\x9c8<\xf7\x1f\xa4e\x08\xc4\x9a\xa0o\x15\x83f\xf2>P/\xc1\xfbj3\xd6\xa9M\x03z\x98\x1b\xa7U\xb9b\xf3 \xfd\x81T+\xb3\x14\xaa\xcf$s\xac')
razpi = Ghid(algo=1, address=b'D\xe90\x1bpr\xd3\xed\xdd\xac-,\xa9{i\xca{[\xa8\x9fy\xe4\xf2C\x0fv\x18\xa4}\xd9\xa9)=+\xe0F\xd8j~6\x07H\xadD\xb9\xa9x/\x9a\xab\x9e\x8e\xe6\x03\xe9\xaf\xd7\xbaH\x08"w\xa1>')
# Declare api
request_api = bytes(64) + b'\x01'
response_api = bytes(64) + b'\x02'
# Etc
timer = collections.deque([0,0], maxlen=2)
recipients = { razpi, desktop } - { hgxlink.whoami }
def make_request(msg):
obj = hgxlink.new_object(
state = msg,
dynamic = True,
api_id = request_api
)
for recipient in recipients:
obj.share(recipient)
return obj
def timed_update(obj, msg):
timer.appendleft(time.monotonic())
obj.update(msg)
def timed_update_callback(obj):
timer.appendleft(time.monotonic())
elapsed = timer[0] - timer[1]
print('Update mirrored in', elapsed, 'seconds.')
# Store objects
incoming_requests = collections.deque(maxlen=10)
incoming_responses = collections.deque(maxlen=10)
outgoing_responses = collections.deque(maxlen=10)
def request_handler(obj):
incoming_requests.appendleft(obj)
reply = hgxlink.new_object(
state = obj.state,
dynamic = True,
api_id = response_api
)
reply.share(recipient=obj.author)
outgoing_responses.appendleft(reply)
def state_mirror(source_obj):
reply.update(source_obj.state)
obj.add_callback(state_mirror)
def response_handler(obj):
obj.add_callback(timed_update_callback)
incoming_responses.appendleft(obj)
# register api
hgxlink.register_api(request_api, object_handler=request_handler)
hgxlink.register_api(response_api, object_handler=response_handler)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
IPython.embed()
# Not strictly necessary but suppresses warnings
hgxlink.halt() | import IPython
import warnings
# These are actually used for the application
import collections
from golix import Ghid
from hypergolix.service import HypergolixLink
hgxlink = HypergolixLink(threaded=True)
desktop = Ghid(algo=1, address=b'\xc0TZ\x15+\x9a\x8e\x01\xbbvw\x83\xc8%\xd5RG\x9c8<\xf7\x1f\xa4e\x08\xc4\x9a\xa0o\x15\x83f\xf2>P/\xc1\xfbj3\xd6\xa9M\x03z\x98\x1b\xa7U\xb9b\xf3 \xfd\x81T+\xb3\x14\xaa\xcf$s\xac')
razpi = Ghid(algo=1, address=b'D\xe90\x1bpr\xd3\xed\xdd\xac-,\xa9{i\xca{[\xa8\x9fy\xe4\xf2C\x0fv\x18\xa4}\xd9\xa9)=+\xe0F\xd8j~6\x07H\xadD\xb9\xa9x/\x9a\xab\x9e\x8e\xe6\x03\xe9\xaf\xd7\xbaH\x08"w\xa1>')
# Declare api
request_api = bytes(64) + b'\x01'
response_api = bytes(64) + b'\x02'
# Store objects
incoming_requests = collections.deque(maxlen=10)
incoming_responses = collections.deque(maxlen=10)
outgoing_responses = collections.deque(maxlen=10)
def request_handler(obj):
incoming_requests.appendleft(obj)
reply = hgxlink.new_object(
state = obj.state,
dynamic = True,
api_id = response_api
)
reply.share(recipient=obj.author)
outgoing_responses.appendleft(reply)
def response_handler(obj):
incoming_responses.appendleft(obj)
# register api
hgxlink.register_api(request_api, object_handler=request_handler)
hgxlink.register_api(response_api, object_handler=response_handler)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
IPython.embed()
# Not strictly necessary but suppresses warnings
hgxlink.halt() | Python | 0.999988 |
4d86d5ffa27f5ee365bc832113cd6605a23d10d3 | Update src/acquisition/covidcast_nowcast/load_sensors.py | src/acquisition/covidcast_nowcast/load_sensors.py | src/acquisition/covidcast_nowcast/load_sensors.py | from shutil import move
import time
import delphi.operations.secrets as secrets
import pandas as pd
import sqlalchemy
from delphi.epidata.acquisition.covidcast.csv_importer import CsvImporter
SENSOR_CSV_PATH = "/common/covidcast-nowcast/receiving/"
TABLE_NAME = "covidcast_nowcast"
DB_NAME = "epidata"
CSV_DTYPES = {"geo_value": str, "value": float}
def main(csv_path: str = SENSOR_CSV_PATH) -> None:
"""
Parse all files in a given directory and insert them into the sensor table in the database.
For all the files found recursively in csv_path that match the naming scheme specified by
CsvImporter.find_csv_files(), attempt to load and insert them into the database. Files which do
not match the naming scheme will be moved to an archive/failed folder and skipped, and files
which raise an error during loading/uploading will be moved to the archive/failed folder and
have the error raised.
Parameters
----------
csv_path
Path to folder containing files to load.
Returns
-------
None.
"""
user, pw = secrets.db.epi
engine = sqlalchemy.create_engine(f"mysql+pymysql://{user}:{pw}@{secrets.db.host}/{DB_NAME}")
for filepath, attributes in CsvImporter.find_csv_files(csv_path):
if attributes is None:
move(filepath, filepath.replace("receiving", "archive/failed"))
continue
try:
data = load_and_prepare_file(filepath, attributes)
data.to_sql(TABLE_NAME, engine, if_exists="append", index=False)
except Exception:
move(filepath, filepath.replace("receiving", "archive/failed"))
raise
move(filepath, filepath.replace("receiving", "archive/successful"))
def load_and_prepare_file(filepath: str, attributes: tuple) -> pd.DataFrame:
"""
Read CSV file into a DataFrame and add relevant attributes as new columns to match DB table.
Parameters
----------
filepath
Path to CSV file.
attributes
(source, signal, time_type, geo_type, time_value, issue, lag) tuple
returned by CsvImport.find_csv_files
Returns
-------
DataFrame with additional attributes added as columns based on filename and current date.
"""
source, signal, time_type, geo_type, time_value, issue_value, lag_value = attributes
data = pd.read_csv(filepath, dtype=CSV_DTYPES)
data["source"] = source
data["signal"] = signal
data["time_type"] = time_type
data["geo_type"] = geo_type
data["time_value"] = time_value
data["issue"] = issue_value
data["lag"] = lag_value
data["value_updated_timestamp"] = int(time.time())
return data
if __name__ == "__main__":
main()
| from shutil import move
import time
import delphi.operations.secrets as secrets
import pandas as pd
import sqlalchemy
from delphi.epidata.acquisition.covidcast.csv_importer import CsvImporter
SENSOR_CSV_PATH = "/common/covidcast-nowcast/receiving/"
TABLE_NAME = "covidcast_nowcast"
DB_NAME = "epidata"
CSV_DTYPES = {"geo_value": str, "value": float}
def main(csv_path: str = SENSOR_CSV_PATH) -> None:
"""
Parse all files in a given directory and insert them into the sensor table in the database.
For all the files found recursively in csv_path that match the naming scheme specified by
CsvImporter.find_csv_files(), attempt to load the insert them into the database. Files which do
not match the naming scheme will be moved to an archive/failed folder and skipped, and files
which raise an error during loading/uploading will be moved to the archive/failed folder and
have the error raised.
Parameters
----------
csv_path
Path to folder containing files to load.
Returns
-------
None.
"""
user, pw = secrets.db.epi
engine = sqlalchemy.create_engine(f"mysql+pymysql://{user}:{pw}@{secrets.db.host}/{DB_NAME}")
for filepath, attributes in CsvImporter.find_csv_files(csv_path):
if attributes is None:
move(filepath, filepath.replace("receiving", "archive/failed"))
continue
try:
data = load_and_prepare_file(filepath, attributes)
data.to_sql(TABLE_NAME, engine, if_exists="append", index=False)
except Exception:
move(filepath, filepath.replace("receiving", "archive/failed"))
raise
move(filepath, filepath.replace("receiving", "archive/successful"))
def load_and_prepare_file(filepath: str, attributes: tuple) -> pd.DataFrame:
"""
Read CSV file into a DataFrame and add relevant attributes as new columns to match DB table.
Parameters
----------
filepath
Path to CSV file.
attributes
(source, signal, time_type, geo_type, time_value, issue, lag) tuple
returned by CsvImport.find_csv_files
Returns
-------
DataFrame with additional attributes added as columns based on filename and current date.
"""
source, signal, time_type, geo_type, time_value, issue_value, lag_value = attributes
data = pd.read_csv(filepath, dtype=CSV_DTYPES)
data["source"] = source
data["signal"] = signal
data["time_type"] = time_type
data["geo_type"] = geo_type
data["time_value"] = time_value
data["issue"] = issue_value
data["lag"] = lag_value
data["value_updated_timestamp"] = int(time.time())
return data
if __name__ == "__main__":
main()
| Python | 0 |
a82b3b5ba8d6fba12df1a3c1993325955da893b6 | Fix a typo in comment. Thanks for tmm1 for watching after me. | lib/carbon/util.py | lib/carbon/util.py | import os
import pwd
from os.path import abspath, basename, dirname, join
from twisted.python.util import initgroups
from twisted.scripts.twistd import runApp
from twisted.scripts._twistd_unix import daemonize
daemonize = daemonize # Backwards compatibility
def dropprivs(user):
uid, gid = pwd.getpwnam(user)[2:4]
initgroups(uid, gid)
os.setregid(gid, gid)
os.setreuid(uid, uid)
return (uid, gid)
def run_twistd_plugin(filename):
from carbon.conf import get_parser
from twisted.scripts.twistd import ServerOptions
bin_dir = dirname(abspath(filename))
root_dir = dirname(bin_dir)
storage_dir = join(root_dir, 'storage')
os.environ.setdefault('GRAPHITE_ROOT', root_dir)
os.environ.setdefault('GRAPHITE_STORAGE_DIR', storage_dir)
program = basename(filename).split('.')[0]
# First, parse command line options as the legacy carbon scripts used to
# do.
parser = get_parser(program)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
return
# This isn't as evil as you might think
__builtins__["instance"] = options.instance
__builtins__["program"] = program
# Then forward applicable options to either twistd or to the plugin itself.
twistd_options = ["--no_save"]
# If no reactor was selected yet, try to use the epoll reactor if
# available.
try:
from twisted.internet import epollreactor
twistd_options.append("--reactor=epoll")
except:
pass
if options.debug:
twistd_options.extend(["-n", "--logfile", "-"])
if options.profile:
twistd_options.append("--profile")
if options.pidfile:
twistd_options.extend(["--pidfile", options.pidfile])
# Now for the plugin-specific options.
twistd_options.append(program)
if options.debug:
twistd_options.append("--debug")
for option_name, option_value in vars(options).items():
if (option_value is not None and
option_name not in ("debug", "profile", "pidfile")):
twistd_options.extend(["--%s" % option_name.replace("_", "-"),
option_value])
# Finally, append extra args so that twistd has a chance to process them.
twistd_options.extend(args)
config = ServerOptions()
config.parseOptions(twistd_options)
runApp(config)
| import os
import pwd
from os.path import abspath, basename, dirname, join
from twisted.python.util import initgroups
from twisted.scripts.twistd import runApp
from twisted.scripts._twistd_unix import daemonize
daemonize = daemonize # Backwards compatibility
def dropprivs(user):
uid, gid = pwd.getpwnam(user)[2:4]
initgroups(uid, gid)
os.setregid(gid, gid)
os.setreuid(uid, uid)
return (uid, gid)
def run_twistd_plugin(filename):
from carbon.conf import get_parser
from twisted.scripts.twistd import ServerOptions
bin_dir = dirname(abspath(filename))
root_dir = dirname(bin_dir)
storage_dir = join(root_dir, 'storage')
os.environ.setdefault('GRAPHITE_ROOT', root_dir)
os.environ.setdefault('GRAPHITE_STORAGE_DIR', storage_dir)
program = basename(filename).split('.')[0]
# First, parse command line options as the legacy carbon scripts used to
# do.
parser = get_parser(program)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
return
# This isn't as evil as you might think
__builtins__["instance"] = options.instance
__builtins__["program"] = program
# Then forward applicable options to either twistd or to the plugin itself.
twistd_options = ["--no_save"]
# If no reactor was selected yet, try to use the epool reactor if
# available.
try:
from twisted.internet import epollreactor
twistd_options.append("--reactor=epoll")
except:
pass
if options.debug:
twistd_options.extend(["-n", "--logfile", "-"])
if options.profile:
twistd_options.append("--profile")
if options.pidfile:
twistd_options.extend(["--pidfile", options.pidfile])
# Now for the plugin-specific options.
twistd_options.append(program)
if options.debug:
twistd_options.append("--debug")
for option_name, option_value in vars(options).items():
if (option_value is not None and
option_name not in ("debug", "profile", "pidfile")):
twistd_options.extend(["--%s" % option_name.replace("_", "-"),
option_value])
# Finally, append extra args so that twistd has a chance to process them.
twistd_options.extend(args)
config = ServerOptions()
config.parseOptions(twistd_options)
runApp(config)
| Python | 0.000001 |
e611e9518945fa38165e8adf7103561f438b70b1 | Add subcommand to process directory | interdiagram/bin/interdiagram.py | interdiagram/bin/interdiagram.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pathlib import Path
from typing import Iterable, List, TypeVar
from typing.io import IO
import click
import yaml
from ..models import Diagram
click.disable_unicode_literals_warning = True
FileType = TypeVar('FileType', IO, Path)
def _is_file_obj(
f: FileType
) -> bool:
read_attr = getattr(f, 'read', None)
has_read_method = callable(read_attr)
return has_read_method
def _draw_files(
files: Iterable[FileType],
output_file: str
) -> None:
diagram = Diagram()
for f in files:
# TODO: Validate against schema
if not _is_file_obj(f):
f = f.open() # type: ignore
diagram.process_spec(yaml.load(f))
diagram.draw(output_file)
# TODO: Correct documentation schema once it's frozen
@click.group()
def cli():
"""Generate interaction/sitemap diagram."""
@cli.command('dir')
@click.argument(
'directory',
type=click.Path(exists=True, file_okay=False, resolve_path=True)
)
@click.argument('output-file', type=click.Path(resolve_path=True))
def directory(
directory: str,
output_file: str
) -> None:
"""Specify a directory where YAML files reside."""
files = Path(directory).glob('**/*.y*ml')
_draw_files(files, output_file)
@cli.command()
@click.argument('yaml-file', nargs=-1, type=click.File())
@click.argument('output-file', type=click.Path(resolve_path=True))
def files(
yaml_file: List[IO],
output_file: str
) -> None:
"""Specify individual YAML files.
Example: interdiagram data1.yaml data2.yaml output.pdf
The YAML spec is in the following format:
\b
sections: # App sections (pages)
Home: # Unique key for section
actions: # List of call to actions
- Sign up: # Action name
- Sign Up # Reference to another section or component
- Login:
- Login
- Search for registry: # Could be empty
components: # List of components in this section
- Experience cards:
- Experience Card
components: # Reusable components
Experience Card:
actions:
- Go to detail:
- Add to registry:
"""
_draw_files(yaml_file, output_file)
if __name__ == '__main__':
cli()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from typing import List
import click
import yaml
from ..models import Diagram
click.disable_unicode_literals_warning = True
# TODO: Correct documentation schema once it's frozen
@click.command()
@click.argument('yaml-file', nargs=-1, type=click.File())
@click.argument('output-file', type=click.Path(resolve_path=True))
def cli(
yaml_file: List,
output_file: str
) -> None:
"""Generate interaction/sitemap diagram.
Example: interdiagram data1.yaml data2.yaml output.pdf
The YAML spec is in the following format:
\b
sections: # App sections (pages)
Home: # Unique key for section
actions: # List of call to actions
- Sign up: # Action name
- Sign Up # Reference to another section or component
- Login:
- Login
- Search for registry: # Could be empty
components: # List of components in this section
- Experience cards:
- Experience Card
components: # Reusable components
Experience Card:
actions:
- Go to detail:
- Add to registry:
"""
diagram = Diagram()
for f in yaml_file:
# TODO: Validate against schema
diagram.process_spec(yaml.load(f))
diagram.draw(output_file)
if __name__ == '__main__':
cli()
| Python | 0 |
2163478d2d927c4e50fcef65a88ca9c81b9d245b | Remove print from tests | menpodetect/tests/opencv_test.py | menpodetect/tests/opencv_test.py | from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
| from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
print takeo_copy.landmarks
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['object_0'][None].n_points == 4
| Python | 0.000002 |
475552061da6a58c6953c387a639d5b4e941600b | Fix shell completion | metakernel/magics/shell_magic.py | metakernel/magics/shell_magic.py | # Copyright (c) Metakernel Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
from metakernel import Magic, pexpect
from metakernel.replwrap import cmd, bash
import os
import sys
class ShellMagic(Magic):
def __init__(self, kernel):
super(ShellMagic, self).__init__(kernel)
self.repl = None
self.cmd = None
self.start_process()
def line_shell(self, *args):
"""
%shell COMMAND - run the line as a shell command
This line command will run the COMMAND in the bash shell.
Examples:
%shell ls -al
%shell cd
Note: this is a persistent connection to a shell.
The working directory is synchronized to that of the notebook
before and after each call.
You can also use "!" instead of "%shell".
"""
# get in sync with the cwd
self.eval('cd %s' % os.getcwd())
command = " ".join(args)
resp = self.eval(command)
if self.cmd == 'cmd':
cwd = self.eval('cd')
else:
cwd = self.eval('pwd')
if os.path.exists(cwd):
os.chdir(cwd)
if resp:
self.kernel.Print(resp)
def eval(self, cmd):
return self.repl.run_command(cmd, timeout=None,
stream_handler=self.kernel.Print)
def start_process(self):
if self.repl is not None:
self.repl.child.terminate()
if not self.cmd:
if pexpect.which('bash'):
self.cmd = 'bash'
self.repl = bash()
elif pexpect.which('sh'):
self.cmd = 'sh'
self.repl = bash(command='sh')
elif os.name == 'nt':
self.cmd = 'cmd'
self.repl = cmd()
else:
msg = "The command was not found or was not executable: sh"
raise Exception(msg)
def cell_shell(self):
"""
%%shell - run the contents of the cell as shell commands
This shell command will run the cell contents in the bash shell.
Example:
%%shell
cd ..
ls -al
Note: this is a persistent connection to a shell.
The working directory is synchronized to that of the notebook
before and after each call.
You can also use "!!" instead of "%%shell".
"""
self.line_shell(self.code)
self.evaluate = False
def get_completions(self, info):
if self.cmd == 'cmd':
return []
command = 'compgen -cdfa "%s"' % info['code']
completion_text = self.repl.run_command(command, timeout=None)
return completion_text.split()
def get_help_on(self, info, level=0):
expr = info['code'].rstrip()
if self.cmd == 'cmd':
resp = self.eval('help %s' % expr)
elif level == 0:
resp = self.eval('%s --help' % expr)
else:
resp = self.eval('man %s' % expr)
if resp and not ': command not found' in resp:
return resp
else:
return "Sorry, no help is available on '%s'." % expr
def register_magics(kernel):
kernel.register_magics(ShellMagic)
| # Copyright (c) Metakernel Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
from metakernel import Magic, pexpect
from metakernel.replwrap import cmd, bash
import os
import sys
class ShellMagic(Magic):
def __init__(self, kernel):
super(ShellMagic, self).__init__(kernel)
self.repl = None
self.cmd = None
self.start_process()
def line_shell(self, *args):
"""
%shell COMMAND - run the line as a shell command
This line command will run the COMMAND in the bash shell.
Examples:
%shell ls -al
%shell cd
Note: this is a persistent connection to a shell.
The working directory is synchronized to that of the notebook
before and after each call.
You can also use "!" instead of "%shell".
"""
# get in sync with the cwd
self.eval('cd %s' % os.getcwd())
command = " ".join(args)
resp = self.eval(command)
if self.cmd == 'cmd':
cwd = self.eval('cd')
else:
cwd = self.eval('pwd')
if os.path.exists(cwd):
os.chdir(cwd)
if resp:
self.kernel.Print(resp)
def eval(self, cmd):
return self.repl.run_command(cmd, timeout=None,
stream_handler=self.kernel.Print)
def start_process(self):
if self.repl is not None:
self.repl.child.terminate()
if not self.cmd:
if pexpect.which('bash'):
self.cmd = 'bash'
self.repl = bash()
elif pexpect.which('sh'):
self.cmd = 'sh'
self.repl = bash(command='sh')
elif os.name == 'nt':
self.cmd = 'cmd'
self.repl = cmd()
else:
msg = "The command was not found or was not executable: sh"
raise Exception(msg)
def cell_shell(self):
"""
%%shell - run the contents of the cell as shell commands
This shell command will run the cell contents in the bash shell.
Example:
%%shell
cd ..
ls -al
Note: this is a persistent connection to a shell.
The working directory is synchronized to that of the notebook
before and after each call.
You can also use "!!" instead of "%%shell".
"""
self.line_shell(self.code)
self.evaluate = False
def get_completions(self, info):
if self.cmd == 'cmd':
return []
command = 'compgen -cdfa "%s"' % info['code']
completion_text = self.eval(command)
return completion_text.split()
def get_help_on(self, info, level=0):
expr = info['code'].rstrip()
if self.cmd == 'cmd':
resp = self.eval('help %s' % expr)
elif level == 0:
resp = self.eval('%s --help' % expr)
else:
resp = self.eval('man %s' % expr)
if resp and not ': command not found' in resp:
return resp
else:
return "Sorry, no help is available on '%s'." % expr
def register_magics(kernel):
kernel.register_magics(ShellMagic)
| Python | 0.000002 |
e8ad2ca0fc2ddec71645bef31686d9de2001dd88 | add translate type | modularodm/fields/StringField.py | modularodm/fields/StringField.py | from . import Field
from ..validators import StringValidator
class StringField(Field):
# default = ''
translate_type = str
validate = StringValidator()
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs) | from . import Field
from ..validators import StringValidator
class StringField(Field):
# default = ''
validate = StringValidator()
def __init__(self, *args, **kwargs):
super(StringField, self).__init__(*args, **kwargs) | Python | 0.000017 |
df5ac0a7f2246e5fbbb5f7d87903a5232e94fe87 | Test deprecation. | morepath/tests/test_autosetup.py | morepath/tests/test_autosetup.py | from collections import namedtuple
from morepath.autosetup import (
caller_module, caller_package, autoscan,
morepath_packages, import_package)
from base.m import App
import morepath
import pytest
def setup_module(module):
with pytest.deprecated_call():
morepath.disable_implicit()
def test_import():
import base
import sub
import entrypoint
from ns import real
from ns import real2
import under_score
# Pacakges to be ignored
import no_mp
from ns import nomp
import no_mp_sub
found = set(morepath_packages())
assert {base, entrypoint, real, real2, sub, under_score} <= found
assert {no_mp, nomp, no_mp_sub}.isdisjoint(found)
def test_load_distribution():
Distribution = namedtuple('Distribution', ['project_name'])
assert import_package(Distribution('base')).m.App is App
with pytest.raises(morepath.error.AutoImportError):
import_package(Distribution('inexistant-package'))
def invoke(callable):
"Add one frame to stack, no other purpose."
return callable()
def test_caller_module():
import sys
assert caller_module(1) == sys.modules[__name__]
assert invoke(caller_module) == sys.modules[__name__]
def test_caller_package():
import sys
assert caller_package(1) == sys.modules[__package__]
assert invoke(caller_package) == sys.modules[__package__]
def test_autoscan(monkeypatch):
import sys
for k in 'base.m', 'entrypoint.app', 'under_score.m':
monkeypatch.delitem(sys.modules, k, raising=False)
autoscan()
assert 'base.m' in sys.modules
assert 'entrypoint.app' in sys.modules
assert 'under_score.m' in sys.modules
| from collections import namedtuple
from morepath.autosetup import (
caller_module, caller_package, autoscan,
morepath_packages, import_package)
from base.m import App
import morepath
import pytest
def setup_module(module):
morepath.disable_implicit()
def test_import():
import base
import sub
import entrypoint
from ns import real
from ns import real2
import under_score
# Pacakges to be ignored
import no_mp
from ns import nomp
import no_mp_sub
found = set(morepath_packages())
assert {base, entrypoint, real, real2, sub, under_score} <= found
assert {no_mp, nomp, no_mp_sub}.isdisjoint(found)
def test_load_distribution():
Distribution = namedtuple('Distribution', ['project_name'])
assert import_package(Distribution('base')).m.App is App
with pytest.raises(morepath.error.AutoImportError):
import_package(Distribution('inexistant-package'))
def invoke(callable):
"Add one frame to stack, no other purpose."
return callable()
def test_caller_module():
import sys
assert caller_module(1) == sys.modules[__name__]
assert invoke(caller_module) == sys.modules[__name__]
def test_caller_package():
import sys
assert caller_package(1) == sys.modules[__package__]
assert invoke(caller_package) == sys.modules[__package__]
def test_autoscan(monkeypatch):
import sys
for k in 'base.m', 'entrypoint.app', 'under_score.m':
monkeypatch.delitem(sys.modules, k, raising=False)
autoscan()
assert 'base.m' in sys.modules
assert 'entrypoint.app' in sys.modules
assert 'under_score.m' in sys.modules
| Python | 0.000001 |
e1184f70abd477ae2d0c304321231c908c67882b | add comment to authorize() that uname and pw are saved in plain text | msl/package_manager/authorize.py | msl/package_manager/authorize.py | """
Create the GitHub authorization file.
"""
import getpass
from .utils import log, get_username, _get_input, _GITHUB_AUTH_PATH
WARNING_MESSAGE = """
Your username and password are saved in plain text in the file that
is created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
"""
def authorize(username=None, password=None):
"""
Create the GitHub authorization file.
When requesting information about the MSL repositories_ that are
available on GitHub there is a limit to how often you can send
requests to the GitHub API. If you have a GitHub account and
include your username and password with each request then this
limit is increased.
.. important::
Calling this function will create a file that contains your GitHub
username and password so that GitHub requests are authorized. Your
username and password are saved in plain text in the file that is
created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
.. versionadded:: 2.3.0
.. _repositories: https://github.com/MSLNZ
Parameters
----------
username : :class:`str`, optional
The GitHub username. If :data:`None` then you will be
asked for the `username`.
password : :class:`str`, optional
The GitHub password. If :data:`None` then you will be
asked for the `password`.
"""
if username is None:
default = get_username()
try:
username = _get_input('Enter your GitHub username [default: {}]: '.format(default))
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
else:
if not username:
username = default
if password is None:
try:
password = getpass.getpass('Enter your GitHub password: ')
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
if not username:
log.warning('You must enter a username. Did not create GitHub authorization file.')
return
if not password:
log.warning('You must enter a password. Did not create GitHub authorization file.')
return
with open(_GITHUB_AUTH_PATH, 'w') as fp:
fp.write(username + ':' + password)
log.warning(WARNING_MESSAGE)
log.info('GitHub credentials saved to ' + _GITHUB_AUTH_PATH)
| """
Create the GitHub authorization file.
"""
import getpass
from .utils import log, get_username, _get_input, _GITHUB_AUTH_PATH
WARNING_MESSAGE = """
Your username and password are saved in plain text in the file that
is created. You should set the file permissions provided by your
operating system to ensure that your GitHub credentials are safe.
"""
def authorize(username=None, password=None):
"""
Create the GitHub authorization file.
When requesting information about the MSL repositories_ that are
available on GitHub there is a limit to how often you can send
requests to the GitHub API. If you have a GitHub account and
include your username and password with each request then this
limit is increased.
Calling this function will create a file that contains your GitHub
username and password so that GitHub requests are authorized.
.. versionadded:: 2.3.0
.. _repositories: https://github.com/MSLNZ
Parameters
----------
username : :class:`str`, optional
The GitHub username. If :data:`None` then you will be
asked for the `username`.
password : :class:`str`, optional
The GitHub password. If :data:`None` then you will be
asked for the `password`.
"""
if username is None:
default = get_username()
try:
username = _get_input('Enter your GitHub username [default: {}]: '.format(default))
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
else:
if not username:
username = default
if password is None:
try:
password = getpass.getpass('Enter your GitHub password: ')
except KeyboardInterrupt:
log.warning('\nDid not create GitHub authorization file.')
return
if not username:
log.warning('You must enter a username. Did not create GitHub authorization file.')
return
if not password:
log.warning('You must enter a password. Did not create GitHub authorization file.')
return
with open(_GITHUB_AUTH_PATH, 'w') as fp:
fp.write(username + ':' + password)
log.warning(WARNING_MESSAGE)
log.info('GitHub credentials saved to ' + _GITHUB_AUTH_PATH)
| Python | 0 |
47b97cf311c36b993b59235dedc06993a6d58b6f | make TestVecSim subclass object | new_pmlib/TestVectorSimulator.py | new_pmlib/TestVectorSimulator.py | #=========================================================================
# TestVectorSimulator
#=========================================================================
# This class simplifies creating unit tests which simply set certain
# inputs and then check certain outputs every cycle. A user simply needs
# to instantiate and elaborate the model, create a list of test vectors,
# and create two helper functions (one to set the model inputs from the
# test vector and one to verify the model outputs against the test
# vector).
#
# Each test vector should be a list of values, so a collection of test
# vectors is just a list of lists. Each test vector specifies the
# inputs/outputs corresponding to a specific cycle in sequence.
#
from new_pymtl import *
class TestVectorSimulator( object ):
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( self, model, test_vectors,
set_inputs_func, verify_outputs_func, wait_cycles = 0 ):
self.model = model
self.set_inputs_func = set_inputs_func
self.verify_outputs_func = verify_outputs_func
self.test_vectors = test_vectors
self.vcd_file_name = None
self.wait_cycles = wait_cycles
#-----------------------------------------------------------------------
# Dump VCD
#-----------------------------------------------------------------------
def dump_vcd( self, vcd_file_name ):
self.vcd_file_name = vcd_file_name
#-----------------------------------------------------------------------
# Run test
#-----------------------------------------------------------------------
def run_test( self, ):
# Create a simulator using the simulation tool
sim = SimulationTool( self.model )
# Dump vcd
if self.vcd_file_name != None:
sim.dump_vcd( self.vcd_file_name )
# Iterate setting the inputs and verifying the outputs each cycle
print ""
sim.reset()
for test_vector in self.test_vectors:
# Set inputs
self.set_inputs_func( self.model, test_vector )
# Evaluate combinational concurrent blocks in simulator
if self.wait_cycles == 0: sim.eval_combinational()
else:
for i in xrange(self.wait_cycles):
sim.cycle()
# Print the line trace
sim.print_line_trace()
# Verify outputs
self.verify_outputs_func( self.model, test_vector )
# Tick the simulator one cycle
sim.cycle()
# Add a couple extra ticks so that the VCD dump is nicer
sim.cycle()
sim.cycle()
sim.cycle()
| #=========================================================================
# TestVectorSimulator
#=========================================================================
# This class simplifies creating unit tests which simply set certain
# inputs and then check certain outputs every cycle. A user simply needs
# to instantiate and elaborate the model, create a list of test vectors,
# and create two helper functions (one to set the model inputs from the
# test vector and one to verify the model outputs against the test
# vector).
#
# Each test vector should be a list of values, so a collection of test
# vectors is just a list of lists. Each test vector specifies the
# inputs/outputs corresponding to a specific cycle in sequence.
#
from new_pymtl import *
class TestVectorSimulator:
#-----------------------------------------------------------------------
# Constructor
#-----------------------------------------------------------------------
def __init__( self, model, test_vectors,
set_inputs_func, verify_outputs_func, wait_cycles = 0 ):
self.model = model
self.set_inputs_func = set_inputs_func
self.verify_outputs_func = verify_outputs_func
self.test_vectors = test_vectors
self.vcd_file_name = None
self.wait_cycles = wait_cycles
#-----------------------------------------------------------------------
# Dump VCD
#-----------------------------------------------------------------------
def dump_vcd( self, vcd_file_name ):
self.vcd_file_name = vcd_file_name
#-----------------------------------------------------------------------
# Run test
#-----------------------------------------------------------------------
def run_test( self, ):
# Create a simulator using the simulation tool
sim = SimulationTool( self.model )
# Dump vcd
if self.vcd_file_name != None:
sim.dump_vcd( self.vcd_file_name )
# Iterate setting the inputs and verifying the outputs each cycle
print ""
sim.reset()
for test_vector in self.test_vectors:
# Set inputs
self.set_inputs_func( self.model, test_vector )
# Evaluate combinational concurrent blocks in simulator
if self.wait_cycles == 0: sim.eval_combinational()
else:
for i in xrange(self.wait_cycles):
sim.cycle()
# Print the line trace
sim.print_line_trace()
# Verify outputs
self.verify_outputs_func( self.model, test_vector )
# Tick the simulator one cycle
sim.cycle()
# Add a couple extra ticks so that the VCD dump is nicer
sim.cycle()
sim.cycle()
sim.cycle()
| Python | 0 |
0cd4862062bbe19aec5bb2a23563e03eb8ca8cb7 | Fix stable release script | make_stable_release.py | make_stable_release.py | from robot.libdoc import libdoc
from src.WhiteLibrary.version import VERSION
import git
import sys
VERSION_FILE = './src/WhiteLibrary/version.py'
def change_stable(from_stable, to_stable):
with open(VERSION_FILE, 'r') as file:
filedata = file.read()
filedata = filedata.replace('STABLE = {0}'.format(from_stable), 'STABLE = {0}'.format(to_stable))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
def change_version_number(ver):
with open(VERSION_FILE, 'r') as file:
filedata = file.read()
filedata = filedata.replace('VERSION = "{0}"'.format(VERSION), 'VERSION = "{0}"'.format(ver))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
repo = git.Repo('.')
change_stable("False", "True")
new_version = sys.argv[1]
change_version_number(new_version)
libdoc("./src/WhiteLibrary", "./docs/keywords.html", version=new_version)
ver = "v{}".format(new_version)
repo.git.add(VERSION_FILE)
repo.git.add('./docs/keywords.html')
repo.git.commit(m='Making stable release: {0}'.format(ver))
tag = repo.create_tag(ver, message='New stable version: "{0}"'.format(ver))
repo.remotes.origin.push(tag)
repo.git.push()
change_stable("True", "False")
repo.git.add(VERSION_FILE)
repo.git.commit(m='Back to unstable release')
repo.git.push()
| from robot.libdoc import libdoc
from src.WhiteLibrary.version import VERSION
import git
import sys
VERSION_FILE = './src/WhiteLibrary/version.py'
def change_stable(from_stable, to_stable):
with open(VERSION_FILE, 'r') as file :
filedata = file.read()
filedata = filedata.replace('STABLE = {0}'.format(from_stable), 'STABLE = {0}'.format(to_stable)
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
def change_version_number(ver):
with open(VERSION_FILE, 'r') as file :
filedata = file.read()
filedata = filedata.replace('VERSION = "{0}"'.format(VERSION), 'VERSION = "{0}"'.format(ver))
with open(VERSION_FILE, 'w') as file:
file.write(filedata)
repo = git.Repo( '.' )
change_stable("False", "True")
print(str(sys.argv))
change_version_number(sys.argv[1])
libdoc("./src/WhiteLibrary", "./docs/keywords.html", version=VERSION)
ver = "v".format(VERSION)
repo.git.add(VERSION_FILE)
repo.git.add('./docs/keywords.html')
repo.git.commit( m='Making stable release: {0}'.format(ver) )
tag = repo.git.create_tag(ver, message='New stable version: "{0}"'.format(ver))
repo.git.push(tag)
change_stable("True", "False")
repo.git.add(VERSION_FILE)
repo.git.commit( m='Back to unstable release' )
repo.git.push()
| Python | 0.000001 |
6d910ea91b550864f445fe33d0a29ef11a82f762 | Replace iteritmes() with items() | malcolm/core/method.py | malcolm/core/method.py | #!/bin/env dls-python
from collections import OrderedDict
from malcolm.core.loggable import Loggable
class Method(Loggable):
"""Exposes a function with metadata for arguments and return values"""
def __init__(self, name):
super(Method, self).__init__(logger_name=name)
self.name = name
self.func = None
self.takes = None
self.returns = None
self.defaults = None
def set_function(self, func):
"""Set the function to expose.
Function must return accept a dictionary of keyword arguments
and return either a single value or dictionary of results.
"""
self.func = func
def set_function_takes(self, arg_meta, defaults=None):
"""Set the arguments and default values for the method
Args:
arg_meta (MapMeta): Arguments to the function
default (dict): Default values for arguments (default None)
"""
self.takes = arg_meta
if defaults is not None:
self.defaults = OrderedDict(defaults)
else:
self.defaults = OrderedDict()
def set_function_returns(self, return_meta):
"""Set the return parameters for the method to validate against"""
self.returns = return_meta
def __call__(self, *args, **kwargs):
"""Call the exposed function using regular keyword argument parameters.
Will validate the output against provided return parameters.
"""
# Assumes positional arguments represent arguments *before* any kw-args
# in the ordered dictionary.
for arg, arg_val in zip(self.takes.elements.keys(), args):
kwargs[arg] = arg_val
for arg in self.takes.elements:
if arg not in kwargs.keys():
if arg in self.defaults.keys():
kwargs[arg] = self.defaults[arg]
elif arg in self.takes.required:
raise ValueError(
"Argument %s is required but was not provided" % arg)
return_val = self.func(kwargs)
if self.returns is not None:
if return_val.keys() != self.returns.elements.keys():
raise ValueError(
"Return result did not match specified return structure")
for r_name, r_val in return_val.items():
self.returns.elements[r_name].validate(r_val)
return return_val
def handle_request(self, request):
"""Call exposed function using request parameters and respond with the
result"""
result = self(**request.parameters)
request.respond_with_return(result)
def to_dict(self):
"""Return ordered dictionary representing Method object."""
serialized = OrderedDict()
serialized["takes"] = self.takes.to_dict()
serialized["defaults"] = self.defaults.copy()
serialized["returns"] = self.returns.to_dict()
return serialized
| #!/bin/env dls-python
from collections import OrderedDict
from malcolm.core.loggable import Loggable
class Method(Loggable):
"""Exposes a function with metadata for arguments and return values"""
def __init__(self, name):
super(Method, self).__init__(logger_name=name)
self.name = name
self.func = None
self.takes = None
self.returns = None
self.defaults = None
def set_function(self, func):
"""Set the function to expose.
Function must return accept a dictionary of keyword arguments
and return either a single value or dictionary of results.
"""
self.func = func
def set_function_takes(self, arg_meta, defaults=None):
"""Set the arguments and default values for the method
Args:
arg_meta (MapMeta): Arguments to the function
default (dict): Default values for arguments (default None)
"""
self.takes = arg_meta
if defaults is not None:
self.defaults = OrderedDict(defaults)
else:
self.defaults = OrderedDict()
def set_function_returns(self, return_meta):
"""Set the return parameters for the method to validate against"""
self.returns = return_meta
def __call__(self, *args, **kwargs):
"""Call the exposed function using regular keyword argument parameters.
Will validate the output against provided return parameters.
"""
# Assumes positional arguments represent arguments *before* any kw-args
# in the ordered dictionary.
for arg, arg_val in zip(self.takes.elements.keys(), args):
kwargs[arg] = arg_val
for arg in self.takes.elements:
if arg not in kwargs.keys():
if arg in self.defaults.keys():
kwargs[arg] = self.defaults[arg]
elif arg in self.takes.required:
raise ValueError(
"Argument %s is required but was not provided" % arg)
return_val = self.func(kwargs)
if self.returns is not None:
if return_val.keys() != self.returns.elements.keys():
raise ValueError(
"Return result did not match specified return structure")
for r_name, r_val in return_val.iteritems():
self.returns.elements[r_name].validate(r_val)
return return_val
def handle_request(self, request):
"""Call exposed function using request parameters and respond with the
result"""
result = self(**request.parameters)
request.respond_with_return(result)
def to_dict(self):
"""Return ordered dictionary representing Method object."""
serialized = OrderedDict()
serialized["takes"] = self.takes.to_dict()
serialized["defaults"] = self.defaults.copy()
serialized["returns"] = self.returns.to_dict()
return serialized
| Python | 0.99999 |
ef5ed1bedd80ae03b9174088e579b5d85507266c | Update forms.py | app/auth/forms.py | app/auth/forms.py | from flask.ext.wtf import Form
from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError
from wtforms.validators import Required, Email, Length, Regexp, EqualTo
from ..models import User
from app.exceptions import ValidationError
class LoginForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
password = PasswordField('Password', validators=[Required()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Login')
class RegistrationForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
username = StringField('Username', validators=[Required(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0, 'Usernames must have only letters, numbers, dots or underscore.')])
password = PasswordField('Password', validators=[Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class ChangePasswordForm(Form):
old_password = PasswordField('Old password', validators=[Required()])
password = PasswordField('New password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm new password', validators=[Required()])
submit = SubmitField('Update Password')
class PasswordResetRequestForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
submit = SubmitField('Reset Password')
class PasswordResetForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('New Password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Reset Password')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is None:
raise ValidationError('Unknown email address.')
class ChangeEmailForm(Form):
email = StringField('New Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[Required()])
submit = SubmitField('Update Email Address')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
| from flask.ext.wtf import Form
from wtforms import StringField, PasswordField, BooleanField, SubmitField, ValidationError
from wtforms.validators import Required, Email, Length, Regexp, EqualTo
from ..models import User
class LoginForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
password = PasswordField('Password', validators=[Required()])
remember_me = BooleanField('Keep me logged in')
submit = SubmitField('Login')
class RegistrationForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64), Email()])
username = StringField('Username', validators=[Required(), Length(1, 64), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0, 'Usernames must have only letters, numbers, dots or underscore.')])
password = PasswordField('Password', validators=[Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Register')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
def validate_username(self, field):
if User.query.filter_by(username=field.data).first():
raise ValidationError('Username already in use.')
class ChangePasswordForm(Form):
old_password = PasswordField('Old password', validators=[Required()])
password = PasswordField('New password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm new password', validators=[Required()])
submit = SubmitField('Update Password')
class PasswordResetRequestForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
submit = SubmitField('Reset Password')
class PasswordResetForm(Form):
email = StringField('Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('New Password', validators=[
Required(), EqualTo('password2', message='Passwords must match')])
password2 = PasswordField('Confirm password', validators=[Required()])
submit = SubmitField('Reset Password')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first() is None:
raise ValidationError('Unknown email address.')
class ChangeEmailForm(Form):
email = StringField('New Email', validators=[Required(), Length(1, 64),
Email()])
password = PasswordField('Password', validators=[Required()])
submit = SubmitField('Update Email Address')
def validate_email(self, field):
if User.query.filter_by(email=field.data).first():
raise ValidationError('Email already registered.')
| Python | 0.000001 |
58bab9291c85edc3f13d3dc0659eff3c17201eb1 | Improve pixelcnn namings and comments | eva/models/pixelcnn.py | eva/models/pixelcnn.py | from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense, Reshape, Lambda
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from keras.optimizers import Nadam
import keras.backend.tensorflow_backend as K
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, build=True):
width, height, channels = input_shape
# TODO: Make it scalable to any amount of channels.
input_map = Input(shape=input_shape, name='input_map')
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same', name='masked2d_A')(input_map)
model = ResidualBlockList(filters, blocks)(model)
model = PReLU()(model)
model = MaskedConvolution2D(filters, 1, 1)(model)
model = PReLU()(model)
model = MaskedConvolution2D(3*256, 1, 1, name='channels_mult_palette')(model)
model = Reshape((input_shape[0], input_shape[1], 256, input_shape[2]), name='palette_channels')(model)
# TODO: Make it scalable to any amount of channels.
red = Lambda(lambda x: x[:, :, :, :, 0], name='red_extract')(model)
red = Reshape((input_shape[0] * input_shape[1], 256), name='hw_red-palette')(red)
red = Activation('softmax', name='red')(red)
green = Lambda(lambda x: x[:, :, :, :, 1], name='green_extract')(model)
green = Reshape((input_shape[0] * input_shape[1], 256), name='hw_green-palette')(green)
green = Activation('softmax', name='green')(green)
blue = Lambda(lambda x: x[:, :, :, :, 2], name='blue_extract')(model)
blue = Reshape((input_shape[0] * input_shape[1], 256), name='hw_blue-palette')(blue)
blue = Activation('softmax', name='blue')(blue)
# TODO: Make is scalable to any amount of channels.
if build:
model = Model(input=input_map, output=[red, green, blue])
model.compile(optimizer=Nadam(),
loss={ 'red': 'sparse_categorical_crossentropy',
'green': 'sparse_categorical_crossentropy',
'blue': 'sparse_categorical_crossentropy'})
return model
| from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense, Reshape, Lambda
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from keras.optimizers import Nadam
import keras.backend.tensorflow_backend as K
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, build=True):
width, height, channels = input_shape
input_map = Input(shape=input_shape, name='input_map')
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same')(input_map)
model = ResidualBlockList(model, filters, blocks)
model = PReLU()(model)
model = MaskedConvolution2D(filters, 1, 1)(model)
model = PReLU()(model)
model = MaskedConvolution2D(3*256, 1, 1)(model)
# TODO: Make it scalable to any amount of channels.
model = Reshape((input_shape[0], input_shape[1], 256, input_shape[2]))(model)
# TODO: Make it scalable to any amount of channels.
red = Lambda(lambda x: x[:, :, :, :, 0])(model)
red = Reshape((input_shape[0] * input_shape[1], 256))(red)
red = Activation('softmax', name='red')(red)
green = Lambda(lambda x: x[:, :, :, :, 1])(model)
green = Reshape((input_shape[0] * input_shape[1], 256))(green)
green = Activation('softmax', name='green')(green)
blue = Lambda(lambda x: x[:, :, :, :, 2])(model)
blue = Reshape((input_shape[0] * input_shape[1], 256))(blue)
blue = Activation('softmax', name='blue')(blue)
# TODO: Make is scalable to any amount of channels.
if build:
model = Model(input=input_map, output=[red, green, blue])
model.compile(optimizer=Nadam(),
loss={ 'red': 'sparse_categorical_crossentropy',
'green': 'sparse_categorical_crossentropy',
'blue': 'sparse_categorical_crossentropy'})
return model
| Python | 0 |
79b258dedda23c9130dda6d4d674f077ac52683e | Add select field for food type | app/main/forms.py | app/main/forms.py | import datetime
from dateutil.parser import parse
from flask.ext.wtf import Form
from wtforms import StringField, TextAreaField, BooleanField, SelectField,\
DateTimeField, SubmitField
from wtforms.validators import Required, Length, Email
from wtforms import ValidationError
from ..models import User, Event, Location
from autocomplete.forms import AutocompleteField
def get_loc_by_id(id):
loc = Location.query.filter_by(id=id).first()
return loc
class EditProfileForm(Form):
text_updates = BooleanField('Send notifications through text')
phone = StringField('Phone Number (To recieve event notifications)')
location = AutocompleteField('School',
url='auth.autocomplete',
get_label='name',
getter=get_loc_by_id,
validators=[Required()]
)
submit = SubmitField('Submit')
def validate_phone(self, field):
if field.data != '' and User.query.filter_by(phone=num).first():
raise ValidationError('That number is already in use.')
class MakeEventForm(Form):
name = StringField('What is the event?', validators=[Required()])
food_types = [("Fruit","Fruit"), ("Lemonade","Lemonade"), ("Breakfast","Breakfast"), ("Meat","Meat"), ("Sausage","Sausage"), ("Hot dogs","Hot dogs"),
("Burgers","Burgers"), ("Candy","Candy"), ("Ice cream","Ice cream"), ("Drinks","Drinks"), ("Soup","Soup"), ("Alcohol","Alcohol"), ("Pizza","Pizza"),
("Chicken","Chicken"), ("Fish","Fish"), ("Cake","Cake"), ("BBQ","BBQ"), ("Formal dinner","Formal dinner"), ("Smoothie","Smoothie"), ("Coffee","Coffee"),
("Tea","Tea")]
serving = SelectField('What is being offered?', choices=food_types)
place = StringField('Where is this happening (Building/room)?', validators=[Required()])
now = datetime.datetime.now()#.strftime('%m-%d %H:%M')
time = DateTimeField('When is this happening?', default=now, format='%m/%d %I:%M%p')
body = StringField('Anything else we should know?')
submit = SubmitField('Submit')
def validate_time(self, field):
pass
#if field.data < datetime.datetime.now():
# raise ValidationError('Time must be in the future')
class SchoolSearchForm(Form):
location = AutocompleteField(
url='main.autocomplete',
placeholder='Your school...',
get_label='name',
getter=get_loc_by_id,
validators=[Required()]
)
| import datetime
from dateutil.parser import parse
from flask.ext.wtf import Form
from wtforms import StringField, TextAreaField, BooleanField, SelectField,\
DateTimeField, SubmitField
from wtforms.validators import Required, Length, Email
from wtforms import ValidationError
from ..models import User, Event, Location
from autocomplete.forms import AutocompleteField
def get_loc_by_id(id):
loc = Location.query.filter_by(id=id).first()
return loc
class EditProfileForm(Form):
text_updates = BooleanField('Send notifications through text')
phone = StringField('Phone Number (To recieve event notifications)')
location = AutocompleteField('School',
url='auth.autocomplete',
get_label='name',
getter=get_loc_by_id,
validators=[Required()]
)
submit = SubmitField('Submit')
def validate_phone(self, field):
if field.data != '' and User.query.filter_by(phone=num).first():
raise ValidationError('That number is already in use.')
class MakeEventForm(Form):
name = StringField('What is the event?', validators=[Required()])
serving = StringField('What is being offered?', default='Pizza!')
place = StringField('Where is this happening (Building/room)?', validators=[Required()])
now = datetime.datetime.now()#.strftime('%m-%d %H:%M')
time = DateTimeField('When is this happening?', default=now, format='%m/%d %I:%M%p')
body = StringField('Anything else we should know?')
submit = SubmitField('Submit')
def validate_time(self, field):
pass
#if field.data < datetime.datetime.now():
# raise ValidationError('Time must be in the future')
class SchoolSearchForm(Form):
location = AutocompleteField(
url='main.autocomplete',
placeholder='Your school...',
get_label='name',
getter=get_loc_by_id,
validators=[Required()]
)
| Python | 0.000001 |
41b3bfdbaa7540e85d74aa6d248e6214f17fe432 | Converted to Python 3 code using 2to3 command | django-os2webscanner/os2webscanner/migrations/0003_organization_do_notify_all_scans.py | django-os2webscanner/os2webscanner/migrations/0003_organization_do_notify_all_scans.py | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('os2webscanner', '0002_auto_20160401_0817'),
]
operations = [
migrations.AddField(
model_name='organization',
name='do_notify_all_scans',
field=models.BooleanField(default=True),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('os2webscanner', '0002_auto_20160401_0817'),
]
operations = [
migrations.AddField(
model_name='organization',
name='do_notify_all_scans',
field=models.BooleanField(default=True),
),
]
| Python | 0.999973 |
060b124888953d302d8611c459f978ef9201ae96 | update g95 version pattern | numpy/distutils/fcompiler/g95.py | numpy/distutils/fcompiler/g95.py | # http://g95.sourceforge.net/
import os
import sys
from numpy.distutils.cpuinfo import cpu
from numpy.distutils.fcompiler import FCompiler
class G95FCompiler(FCompiler):
compiler_type = 'g95'
# version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*'
# $ g95 --version
# G95 (GCC 4.0.3 (g95!) May 22 2006)
version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95 (?P<version>.*)!\) (?P<date>.*)\).*'
# $ g95 --version
# G95 (GCC 4.0.3 (g95 0.90!) Aug 22 2006)
executables = {
'version_cmd' : ["g95", "--version"],
'compiler_f77' : ["g95", "-ffixed-form"],
'compiler_fix' : ["g95", "-ffixed-form"],
'compiler_f90' : ["g95"],
'linker_so' : ["g95","-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
pic_flags = ['-fpic']
module_dir_switch = '-fmod='
module_include_switch = '-I'
def get_flags(self):
return ['-fno-second-underscore']
def get_flags_opt(self):
return ['-O']
def get_flags_debug(self):
return ['-g']
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
#compiler = new_fcompiler(compiler='g95')
compiler = G95FCompiler()
compiler.customize()
print compiler.get_version()
| # http://g95.sourceforge.net/
import os
import sys
from numpy.distutils.cpuinfo import cpu
from numpy.distutils.fcompiler import FCompiler
class G95FCompiler(FCompiler):
compiler_type = 'g95'
version_pattern = r'G95 \((GCC (?P<gccversion>[\d.]+)|.*?) \(g95!\) (?P<version>.*)\).*'
# $ g95 --version
# G95 (GCC 4.0.3 (g95!) May 22 2006)
executables = {
'version_cmd' : ["g95", "--version"],
'compiler_f77' : ["g95", "-ffixed-form"],
'compiler_fix' : ["g95", "-ffixed-form"],
'compiler_f90' : ["g95"],
'linker_so' : ["g95","-shared"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
pic_flags = ['-fpic']
module_dir_switch = '-fmod='
module_include_switch = '-I'
def get_flags(self):
return ['-fno-second-underscore']
def get_flags_opt(self):
return ['-O']
def get_flags_debug(self):
return ['-g']
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils.fcompiler import new_fcompiler
#compiler = new_fcompiler(compiler='g95')
compiler = G95FCompiler()
compiler.customize()
print compiler.get_version()
| Python | 0 |
594da79c98f360331613f786918f08957c39d13c | apply train_hook.gradients() | hypergan/trainers/simultaneous_trainer.py | hypergan/trainers/simultaneous_trainer.py | import tensorflow as tf
import numpy as np
import hyperchamber as hc
import inspect
from hypergan.trainers.base_trainer import BaseTrainer
TINY = 1e-12
class SimultaneousTrainer(BaseTrainer):
""" Steps G and D simultaneously """
def _create(self):
gan = self.gan
config = self.config
if hasattr(self, 'loss'):
loss = self.loss
else:
loss = self.gan.loss
d_loss, g_loss = loss.sample
self.d_log = -tf.log(tf.abs(d_loss+TINY))
self.d_loss = d_loss
self.g_loss = g_loss
self.step_ops = None
config.optimizer["loss"] = loss.sample
self.optimizer = self.gan.create_optimizer(config.optimizer)
d_vars = self.d_vars or self.gan.d_vars()
g_vars = self.g_vars or self.gan.g_vars()
if self.gan.distribution_strategy is not None:
return
d_grads = tf.gradients(d_loss, d_vars)
g_grads = tf.gradients(g_loss, g_vars)
apply_vec = list(zip((d_grads + g_grads), (d_vars + g_vars))).copy()
for grad, v in apply_vec:
if grad is None:
print("Gradient is None:", v)
for t in self.train_hooks:
d_grads, g_grads = t.gradients(d_grads, g_grads)
apply_vec = list(zip((d_grads + g_grads), (d_vars + g_vars))).copy()
self.g_loss = g_loss
self.d_loss = d_loss
self.gan.trainer = self
self.optimize_t = self.optimizer.apply_gradients(apply_vec)
def required(self):
return "".split()
def _step(self, feed_dict):
gan = self.gan
sess = gan.session
config = self.config
loss = gan.loss
metrics = gan.metrics()
d_loss, g_loss = loss.sample
self.before_step(self.current_step, feed_dict)
if self.step_ops is None:
ops = [self.optimize_t]
update_train_hooks = [t.update_op() for t in self.train_hooks]
update_train_hooks = [op for op in update_train_hooks if op is not None]
self.step_ops = ops + update_train_hooks
sess.run(self.step_ops, feed_dict)
self.after_step(self.current_step, feed_dict)
if self.current_step % 10 == 0:
metric_values = self.gan.session.run(self.output_variables(metrics))
self.print_metrics(self.current_step)
def print_metrics(self, step):
metrics = self.gan.metrics()
metric_values = self.gan.session.run(self.output_variables(metrics))
print(str(self.output_string(metrics) % tuple([step] + metric_values)))
| import tensorflow as tf
import numpy as np
import hyperchamber as hc
import inspect
from hypergan.trainers.base_trainer import BaseTrainer
TINY = 1e-12
class SimultaneousTrainer(BaseTrainer):
""" Steps G and D simultaneously """
def _create(self):
gan = self.gan
config = self.config
if hasattr(self, 'loss'):
loss = self.loss
else:
loss = self.gan.loss
d_loss, g_loss = loss.sample
self.d_log = -tf.log(tf.abs(d_loss+TINY))
self.d_loss = d_loss
self.g_loss = g_loss
self.step_ops = None
config.optimizer["loss"] = loss.sample
self.optimizer = self.gan.create_optimizer(config.optimizer)
d_vars = self.d_vars or self.gan.d_vars()
g_vars = self.g_vars or self.gan.g_vars()
if self.gan.distribution_strategy is not None:
return
d_grads = tf.gradients(d_loss, d_vars)
g_grads = tf.gradients(g_loss, g_vars)
apply_vec = list(zip((d_grads + g_grads), (d_vars + g_vars))).copy()
for grad, v in apply_vec:
if grad is None:
print("Gradient is None:", v)
self.gan.gradient_mean = sum([tf.reduce_mean(tf.abs(grad)) for grad in d_grads+g_grads])/len(d_grads+g_grads)
self.g_loss = g_loss
self.d_loss = d_loss
self.gan.trainer = self
self.optimize_t = self.optimizer.apply_gradients(apply_vec)
def required(self):
return "".split()
def _step(self, feed_dict):
gan = self.gan
sess = gan.session
config = self.config
loss = gan.loss
metrics = gan.metrics()
d_loss, g_loss = loss.sample
self.before_step(self.current_step, feed_dict)
if self.step_ops is None:
ops = [self.optimize_t]
update_train_hooks = [t.update_op() for t in self.train_hooks]
update_train_hooks = [op for op in update_train_hooks if op is not None]
self.step_ops = ops + update_train_hooks
sess.run(self.step_ops, feed_dict)
self.after_step(self.current_step, feed_dict)
if self.current_step % 10 == 0:
metric_values = self.gan.session.run(self.output_variables(metrics))
self.print_metrics(self.current_step)
def print_metrics(self, step):
metrics = self.gan.metrics()
metric_values = self.gan.session.run(self.output_variables(metrics))
print(str(self.output_string(metrics) % tuple([step] + metric_values)))
| Python | 0 |
adfe91d2f6066d8f28aeca9574465be452fcd20e | Correct excel file extension | octoprint_printhistory/export.py | octoprint_printhistory/export.py | # coding=utf-8
__author__ = "Jarek Szczepanski <imrahil@imrahil.com>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 Jarek Szczepanski - Released under terms of the AGPLv3 License"
def exportHistoryData(self, exportType):
import flask
import csv
import StringIO
history_dict = self._getHistoryDict()
if history_dict is not None:
si = StringIO.StringIO()
headers = ['File name', 'Timestamp', 'Success', 'Print time', 'Filament length', 'Filament volume']
if exportType == 'csv':
writer = csv.writer(si, quoting=csv.QUOTE_ALL)
writer.writerow(headers)
for historyDetails in history_dict:
output = list()
output.append(historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-")
output.append(historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-")
output.append(historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-")
output.append(historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-")
output.append(historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-")
output.append(historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-")
writer.writerow(output);
response = flask.make_response(si.getvalue())
response.headers["Content-type"] = "text/csv"
response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.csv"
elif exportType == 'excel':
import xlsxwriter
workbook = xlsxwriter.Workbook(si)
worksheet = workbook.add_worksheet()
col = 0
for header in headers:
worksheet.write(0, col, header)
col += 1
row = 1
for historyDetails in history_dict:
worksheet.write(row, 0, (historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-"))
worksheet.write(row, 1, (historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-"))
worksheet.write(row, 2, (historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-"))
worksheet.write(row, 3, (historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-"))
worksheet.write(row, 4, (historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-"))
worksheet.write(row, 5, (historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-"))
row += 1
workbook.close()
response = flask.make_response(si.getvalue())
response.headers["Content-type"] = "application/vnd.ms-excel"
response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.xlsx"
return response
else:
return flask.make_response("No history file", 400)
| # coding=utf-8
__author__ = "Jarek Szczepanski <imrahil@imrahil.com>"
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
__copyright__ = "Copyright (C) 2014 Jarek Szczepanski - Released under terms of the AGPLv3 License"
def exportHistoryData(self, exportType):
import flask
import csv
import StringIO
history_dict = self._getHistoryDict()
if history_dict is not None:
si = StringIO.StringIO()
headers = ['File name', 'Timestamp', 'Success', 'Print time', 'Filament length', 'Filament volume']
if exportType == 'csv':
writer = csv.writer(si, quoting=csv.QUOTE_ALL)
writer.writerow(headers)
for historyDetails in history_dict:
output = list()
output.append(historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-")
output.append(historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-")
output.append(historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-")
output.append(historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-")
output.append(historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-")
output.append(historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-")
writer.writerow(output);
response = flask.make_response(si.getvalue())
response.headers["Content-type"] = "text/csv"
response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.csv"
elif exportType == 'excel':
import xlsxwriter
workbook = xlsxwriter.Workbook(si)
worksheet = workbook.add_worksheet()
col = 0
for header in headers:
worksheet.write(0, col, header)
col += 1
row = 1
for historyDetails in history_dict:
worksheet.write(row, 0, (historyDetails["fileName"] if "fileName" in historyDetails and historyDetails["fileName"] is not None else "-"))
worksheet.write(row, 1, (historyDetails["timestamp"] if "timestamp" in historyDetails and historyDetails["timestamp"] is not None else "-"))
worksheet.write(row, 2, (historyDetails["success"] if "success" in historyDetails and historyDetails["success"] is not None else "-"))
worksheet.write(row, 3, (historyDetails["printTime"] if "printTime" in historyDetails and historyDetails["printTime"] is not None else "-"))
worksheet.write(row, 4, (historyDetails["filamentLength"] if "filamentLength" in historyDetails and historyDetails["filamentLength"] is not None else "-"))
worksheet.write(row, 5, (historyDetails["filamentVolume"] if "filamentVolume" in historyDetails and historyDetails["filamentVolume"] is not None else "-"))
row += 1
workbook.close()
response = flask.make_response(si.getvalue())
response.headers["Content-type"] = "application/vnd.ms-excel"
response.headers["Content-Disposition"] = "attachment; filename=octoprint_print_history_export.xls"
return response
else:
return flask.make_response("No history file", 400)
| Python | 0 |
ee330d0b0092e1a8fe7c25cbe170973647d59baa | enable staff with clearance to create own office hours | web/impact/impact/v1/serializers/office_hours_serializer.py | web/impact/impact/v1/serializers/office_hours_serializer.py | from datetime import timedelta
from rest_framework.serializers import (
ModelSerializer,
ValidationError,
)
from accelerator_abstract.models.base_clearance import (
CLEARANCE_LEVEL_STAFF
)
from accelerator_abstract.models.base_user_utils import is_employee
from accelerator.models import (
MentorProgramOfficeHour,
UserRole
)
from .location_serializer import LocationSerializer
from .user_serializer import UserSerializer
INVALID_END_DATE = 'office hour end time must be later than the start time'
INVALID_USER = ('must have clearance or be of type Mentor or Alumni in '
'residence in an active program')
INVALID_SESSION_DURATION = 'Please specify a duration of 30 minutes or more.'
THIRTY_MINUTES = timedelta(minutes=30)
NO_START_DATE_TIME = "start_date_time must be specified"
NO_END_DATE_TIME = "end_date_time must be specified"
class OfficeHourSerializer(ModelSerializer):
class Meta:
model = MentorProgramOfficeHour
fields = [
'id', 'mentor', 'start_date_time', 'end_date_time',
'topics', 'description', 'location',
]
def validate(self, attrs):
start_date_time = None
end_date_time = None
if self.instance is not None:
start_date_time = self.instance.start_date_time
end_date_time = self.instance.end_date_time
start_date_time = attrs.get('start_date_time') or start_date_time
end_date_time = attrs.get('end_date_time') or end_date_time
if not start_date_time:
raise ValidationError({
'start_date_time': NO_START_DATE_TIME})
if not end_date_time:
raise ValidationError({
'end_date_time': NO_END_DATE_TIME})
if start_date_time > end_date_time:
raise ValidationError({
'end_date_time': INVALID_END_DATE})
if end_date_time - start_date_time < THIRTY_MINUTES:
raise ValidationError({
'end_date_time': INVALID_SESSION_DURATION})
return attrs
def is_allowed_mentor(self, mentor):
staff_user = self.context['request'].user
roles = [UserRole.MENTOR, UserRole.AIR]
if staff_user == mentor:
return staff_user.clearances.filter(
level=CLEARANCE_LEVEL_STAFF,
program_family__programs__program_status='active'
).exists()
return mentor.programrolegrant_set.filter(
program_role__user_role__name__in=roles,
program_role__program__program_status='active',
).exists()
def validate_mentor(self, mentor):
user = self.context['request'].user
if not is_employee(user):
return user
if not self.is_allowed_mentor(mentor):
raise ValidationError(INVALID_USER)
return mentor
def to_representation(self, instance):
data = super().to_representation(instance)
data['mentor'] = UserSerializer(instance.mentor).data
data['location'] = LocationSerializer(instance.location).data
return data
| from datetime import timedelta
from rest_framework.serializers import (
ModelSerializer,
ValidationError,
)
from accelerator_abstract.models.base_user_utils import is_employee
from accelerator.models import (
MentorProgramOfficeHour,
UserRole
)
from .location_serializer import LocationSerializer
from .user_serializer import UserSerializer
INVALID_END_DATE = 'office hour end time must be later than the start time'
INVALID_USER = ('must be of type Mentor or Alumni in residence '
'in an active program')
INVALID_SESSION_DURATION = 'Please specify a duration of 30 minutes or more.'
THIRTY_MINUTES = timedelta(minutes=30)
NO_START_DATE_TIME = "start_date_time must be specified"
NO_END_DATE_TIME = "end_date_time must be specified"
class OfficeHourSerializer(ModelSerializer):
class Meta:
model = MentorProgramOfficeHour
fields = [
'id', 'mentor', 'start_date_time', 'end_date_time',
'topics', 'description', 'location',
]
def validate(self, attrs):
start_date_time = None
end_date_time = None
if self.instance is not None:
start_date_time = self.instance.start_date_time
end_date_time = self.instance.end_date_time
start_date_time = attrs.get('start_date_time') or start_date_time
end_date_time = attrs.get('end_date_time') or end_date_time
if not start_date_time:
raise ValidationError({
'start_date_time': NO_START_DATE_TIME})
if not end_date_time:
raise ValidationError({
'end_date_time': NO_END_DATE_TIME})
if start_date_time > end_date_time:
raise ValidationError({
'end_date_time': INVALID_END_DATE})
if end_date_time - start_date_time < THIRTY_MINUTES:
raise ValidationError({
'end_date_time': INVALID_SESSION_DURATION})
return attrs
def validate_mentor(self, mentor):
user = self.context['request'].user
if not is_employee(user):
return user
roles = [UserRole.MENTOR, UserRole.AIR]
is_allowed_mentor = mentor.programrolegrant_set.filter(
program_role__user_role__name__in=roles,
program_role__program__program_status='active',
).exists()
if not is_allowed_mentor:
raise ValidationError(INVALID_USER)
return mentor
def to_representation(self, instance):
data = super().to_representation(instance)
data['mentor'] = UserSerializer(instance.mentor).data
data['location'] = LocationSerializer(instance.location).data
return data
| Python | 0 |
7e8e5ceb765189974bcaee86e15c26b94ac05f3a | Update modulation.py | examples/modulation.py | examples/modulation.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import initExample
import os
from lase.core import KClient
# Driver to use
from lase.drivers import Oscillo
# Modules to import
import numpy as np
import matplotlib.pyplot as plt
import time
# Load the oscillo instrument
host = os.getenv('HOST','192.168.1.100')
password = os.getenv('PASSWORD','changeme')
ssh = ZynqSSH(host, password)
ssh.unzip_app()
ssh.install_instrument('oscillo')
# Connect to the instrument
client = KClient(host)
driver = Oscillo(client)
# Enable laser
driver.start_laser()
# Set laser current
current = 30 # mA
driver.set_laser_current(current)
# Modulation on DAC
amp_mod = 0.2
freq_mod = 1e6
driver.dac[1, :] = amp_mod*np.sin(2 * np.pi * freq_mod * driver.sampling.t)
driver.set_dac()
# Signal on ADC
driver.get_adc()
signal = driver.adc[0, :]
# Plot
plt.plot(driver.sampling.t, signal)
plt.show()
# Plot
psd_signal = np.abs(np.fft.fft(signal)) ** 2
plt.semilogy(1e-6 * np.fft.fftshift(driver.sampling.f_fft), np.fft.fftshift(psd_signal))
plt.xlabel('Frequency (MHz)')
plt.show()
# Disable laser
driver.stop_laser()
driver.close()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import initExample
import os
from lase.core import KClient
# Driver to use
from lase.drivers import Oscillo
# Modules to import
import numpy as np
import matplotlib.pyplot as plt
import time
# Connect to Lase
host = os.getenv('HOST','192.168.1.100')
client = KClient(host)
driver = Oscillo(client) # Replace with appropriate driver
# Enable laser
driver.start_laser()
# Set laser current
current = 30 # mA
driver.set_laser_current(current)
# Modulation on DAC
amp_mod = 0.2
freq_mod = 1e6
driver.dac[1, :] = amp_mod*np.sin(2 * np.pi * freq_mod * driver.sampling.t)
driver.set_dac()
# Signal on ADC
driver.get_adc()
signal = driver.adc[0, :]
# Plot
plt.plot(driver.sampling.t, signal)
plt.show()
# Plot
psd_signal = np.abs(np.fft.fft(signal)) ** 2
plt.semilogy(1e-6 * np.fft.fftshift(driver.sampling.f_fft), np.fft.fftshift(psd_signal))
plt.xlabel('Frequency (MHz)')
plt.show()
# Disable laser
driver.stop_laser()
driver.close()
| Python | 0.000001 |
d876ce9e7b50b6e5f1161a937097f00b6fafe09f | update test | myideas/core/tests/test_views.py | myideas/core/tests/test_views.py | from django.test import TestCase
from django.shortcuts import resolve_url as r
from django.contrib.auth import get_user_model
from myideas.core.forms import IdeasForm
from myideas.core.models import Ideas
class HomeTest(TestCase):
def setUp(self):
self.response = self.client.get(r('home'))
def test_get(self):
"""GET 'Home' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'Home' must use template index.html and base.html"""
self.assertTemplateUsed(self.response, 'index.html')
self.assertTemplateUsed(self.response, 'base.html')
def test_login_link(self):
"""base.html navbar must contains login page link"""
expected = 'href="{}"'.format(r('auth_login'))
self.assertContains(self.response, expected)
def test_register_link(self):
"""base.html navbar must contains register page link"""
expected = 'href="{}"'.format(r('registration_register'))
self.assertContains(self.response, expected)
def test_ideas_form_link(self):
"""base.html navbar contains ideas_form link"""
expected = 'href="{}"'.format(r('ideas_form'))
self.assertContains(self.response, expected)
class DetailsTest(TestCase):
def setUp(self):
user = get_user_model().objects.create(username='adminapp')
self.idea = Ideas.objects.create(user=user, title='test app')
self.response = self.client.get(r(self.idea.get_absolute_url()))
def test_get(self):
"""GET 'Ideas Details' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'Ideas Details' must use template ideas_details.html and base.html"""
self.assertTemplateUsed(self.response, 'ideas_details.html')
self.assertTemplateUsed(self.response, 'base.html')
class ProfileTest(TestCase):
def setUp(self):
user = get_user_model().objects.create(username='adminapp')
self.idea = Ideas.objects.create(user=user)
self.response = self.client.get(r('profile', self.idea.user))
def test_get(self):
"""GET 'User Profile' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'User Profile' must use template profile.html and base.html"""
self.assertTemplateUsed(self.response, 'profile.html')
self.assertTemplateUsed(self.response, 'base.html')
class IdeaFormTest(TestCase):
def setUp(self):
self.response = self.client.get(r('ideas_form'))
def test_get(self):
"""GET 'Ideas Form' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_has_form_on_context(self):
self.assertIsInstance(self.response.context['form'], IdeasForm)
def test_template(self):
"""'Ideas Form' must use template index.html and base.html"""
self.assertTemplateUsed(self.response, 'idea_form.html')
self.assertTemplateUsed(self.response, 'base.html')
class IdeasDetailNotFound(TestCase):
def setUp(self):
self.response = self.client.get(r('ideas_details.html', slug='not-found'))
def test_not_found(self):
self.assertEqual(404, self.response.status_code)
def test_template(self):
"""'page not found' must use template 404.html and base.html"""
self.assertTemplateUsed(self.response, '404.html')
self.assertTemplateUsed(self.response, 'base.html')
| from django.test import TestCase
from django.shortcuts import resolve_url as r
from django.contrib.auth import get_user_model
from myideas.core.forms import IdeasForm
from myideas.core.models import Ideas
class HomeTest(TestCase):
def setUp(self):
self.response = self.client.get(r('home'))
def test_get(self):
"""GET 'Home' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'Home' must use template index.html and base.html"""
self.assertTemplateUsed(self.response, 'index.html')
self.assertTemplateUsed(self.response, 'base.html')
def test_login_link(self):
"""base.html navbar must contains login page link"""
expected = 'href="{}"'.format(r('auth_login'))
self.assertContains(self.response, expected)
def test_register_link(self):
"""base.html navbar must contains register page link"""
expected = 'href="{}"'.format(r('registration_register'))
self.assertContains(self.response, expected)
def test_ideas_form_link(self):
"""base.html navbar contains ideas_form link"""
expected = 'href="{}"'.format(r('ideas_form'))
self.assertContains(self.response, expected)
class DetailsTest(TestCase):
def setUp(self):
user = get_user_model().objects.create(username='adminapp')
self.idea = Ideas.objects.create(user=user, title='test app')
self.response = self.client.get(r(self.idea.get_absolute_url()))
def test_get(self):
"""GET 'Ideas Details' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'Ideas Details' must use template ideas_details.html and base.html"""
self.assertTemplateUsed(self.response, 'ideas_details.html')
self.assertTemplateUsed(self.response, 'base.html')
class ProfileTest(TestCase):
def setUp(self):
user = get_user_model().objects.create(username='adminapp')
self.idea = Ideas.objects.create(user=user)
self.response = self.client.get(r('profile', self.idea.user))
def test_get(self):
"""GET 'User Profile' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_template(self):
"""'User Profile' must use template profile.html and base.html"""
self.assertTemplateUsed(self.response, 'profile.html')
self.assertTemplateUsed(self.response, 'base.html')
class IdeaFormTest(TestCase):
def setUp(self):
self.response = self.client.get(r('ideas_form'))
def test_get(self):
"""GET 'Ideas Form' must return status code 200"""
self.assertEqual(200, self.response.status_code)
def test_has_form_on_context(self):
self.assertIsInstance(self.response.context['form'], IdeasForm)
def test_template(self):
"""'Ideas Form' must use template index.html and base.html"""
self.assertTemplateUsed(self.response, 'idea_form.html')
self.assertTemplateUsed(self.response, 'base.html')
class IdeasDetailNotFound(TestCase):
def setUp(self):
self.response = self.client.get(r('ideas_details.html', slug='not-found'))
def test_not_found(self):
self.assertEqual(404, self.response.status_code)
def test_template(self):
"""'page not found' must use template 404.html and base.html"""
self.assertTemplateUsed(self.response, '404.html')
self.assertTemplateUsed(self.response, 'base.html') | Python | 0.000001 |
2bc474b83f0e3ad340127f626cfe6597cbd2ba4e | Allow for interactive inputs | cal_pipe/manual_flagging.py | cal_pipe/manual_flagging.py |
import sys
'''
Plot visibility data for each spw to allow for easy manual flags
'''
try:
vis = sys.argv[1]
field = sys.argv[2]
corrstring = sys.argv[3]
except IndexError:
vis = raw_input("MS Name? : ")
field = raw_input("Field Name/Number? : ")
corrstring = raw_input("Corrstring? : ")
tb.open(vis + '/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(0, len(freqs))
for spw in spws:
nchan = nchans[spw]
print "On " + str(spw+1) + " of " + str(len(freqs))
default('plotms')
vis = vis
xaxis = 'channel'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'channel'
yaxis = 'phase'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'time'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'time'
yaxis = 'phase'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'uvwave'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
|
import sys
'''
Plot visibility data for each spw to allow for easy manual flags
'''
vis = sys.argv[1]
field = sys.argv[2]
corrstring = sys.argv[3]
tb.open(vis + '/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(0, len(freqs))
for spw in spws:
nchan = nchans[spw]
print "On " + str(spw+1) + " of " + str(len(freqs))
default('plotms')
vis = vis
xaxis = 'channel'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'channel'
yaxis = 'phase'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'time'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'time'
yaxis = 'phase'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
default('plotms')
vis = vis
xaxis = 'uvwave'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = field
spw = str(spw)
correlation = corrstring
averagedata = True
avgchannel = str(nchan)
avgtime = '1e8s'
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
raw_input("Continue?")
| Python | 0.000002 |
6dbcc892f8b659a22a33fce3836cb082b64dd817 | load order | campos_event/__openerp__.py | campos_event/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of CampOS Event,
# an Odoo module.
#
# Copyright (c) 2015 Stein & Gabelgaard ApS
# http://www.steingabelgaard.dk
# Hans Henrik Gaelgaard
#
# CampOS Event is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# CampOS Event is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CampOS Event.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "CampOS Event",
'summary': """
Scout Camp Management Solution""",
# 'description': put the module description in README.rst
'author': "Hans Henrik Gabelgaard",
'website': "http://www.steingabelgaard.dk",
# Categories can be used to filter modules in modules listing
# Check http://goo.gl/0TfwzD for the full list
'category': 'Uncategorized',
'version': '0.1',
'license': 'AGPL-3',
# any module necessary for this one to work correctly
'depends': [
'base',
'mail',
'event',
'website',
'portal',
],
# always loaded
'data': [
'security/campos_event_security.xml',
'security/ir.model.access.csv',
'security/ir.rule.csv',
'data/campos.municipality.csv',
'data/campos.scout.org.csv',
'views/templates.xml',
'views/participant_view.xml',
'views/committee_view.xml',
'views/municipality_view.xml',
"views/scout_org_view.xml",
"views/res_partner_view.xml",
"views/job_view.xml",
"views/job_template.xml",
"views/mail_templates.xml",
"views/portal_menu.xml",
"views/res_users_view.xml",
'views/campos_menu.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of CampOS Event,
# an Odoo module.
#
# Copyright (c) 2015 Stein & Gabelgaard ApS
# http://www.steingabelgaard.dk
# Hans Henrik Gaelgaard
#
# CampOS Event is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# CampOS Event is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CampOS Event.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "CampOS Event",
'summary': """
Scout Camp Management Solution""",
# 'description': put the module description in README.rst
'author': "Hans Henrik Gabelgaard",
'website': "http://www.steingabelgaard.dk",
# Categories can be used to filter modules in modules listing
# Check http://goo.gl/0TfwzD for the full list
'category': 'Uncategorized',
'version': '0.1',
'license': 'AGPL-3',
# any module necessary for this one to work correctly
'depends': [
'base',
'mail',
'event',
'website',
'portal',
],
# always loaded
'data': [
'security/campos_event_security.xml',
'security/ir.model.access.csv',
'security/ir.rule.csv',
'data/campos.municipality.csv',
'data/campos.scout.org.csv',
'views/campos_menu.xml',
'views/templates.xml',
'views/participant_view.xml',
'views/committee_view.xml',
'views/municipality_view.xml',
"views/scout_org_view.xml",
"views/res_partner_view.xml",
"views/job_view.xml",
"views/job_template.xml",
"views/mail_templates.xml",
"views/portal_menu.xml",
"views/res_users_view.xml",
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| Python | 0.000001 |
1908abeb6aa0082fb49428185c340a7231cdd467 | fix typo in binaryoperator | emu/processes/wps_binaryoperator.py | emu/processes/wps_binaryoperator.py | from pywps import Process, LiteralInput, LiteralOutput
from pywps.app.Common import Metadata
import logging
logger = logging.getLogger("PYWPS")
class BinaryOperator(Process):
def __init__(self):
inputs = [
LiteralInput('inputa', 'Input 1', data_type='float',
abstract='Enter Input 1',
default="2.0"),
LiteralInput('inputb', 'Input 2', data_type='float',
abstract='Enter Input 2',
default="3.0"),
LiteralInput('operator', 'Operator', data_type='string',
abstract='Choose a binary Operator',
default='add',
allowed_values=['add', 'substract', 'divide', 'multiply'])]
outputs = [
LiteralOutput('output', 'Binary operator result',
data_type='float')]
super(BinaryOperator, self).__init__(
self._handler,
identifier='binaryoperatorfornumbers',
title='Binary Operator for Numbers',
abstract='Performs operation on two numbers and returns the answer.\
This example process is taken from Climate4Impact.',
metadata=[
Metadata('Birdhouse', 'http://bird-house.github.io/'),
Metadata('User Guide', 'http://emu.readthedocs.io/en/latest/'),
Metadata('Climate4Impact', 'https://dev.climate4impact.eu')],
version='1.0',
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
logger.info("run binary_operator")
operator = request.inputs['operator'][0].data
input_a = request.inputs['inputa'][0].data
input_b = request.inputs['inputb'][0].data
if operator == 'substract':
response.outputs['output'].data = input_a - input_b
elif operator == 'multiply':
response.outputs['output'].data = input_a * input_b
elif operator == 'divide':
response.outputs['output'].data = input_a / input_b
else:
response.outputs['output'].data = input_a + input_b
return response
| from pywps import Process, LiteralInput, LiteralOutput
from pywps.app.Common import Metadata
import logging
logger = logging.getLogger("PYWPS")
class BinaryOperator(Process):
def __init__(self):
inputs = [
LiteralInput('inputa', 'Input 1', data_type='float',
abstract='Enter Input 1',
default="2.0"),
LiteralInput('inputb', 'Input 2', data_type='float',
abstract='Enter Input 2',
default="3.0"),
LiteralInput('operator', 'Operator', data_type='string',
abstract='Choose a binary Operator',
default='add',
allowed_values=['add', 'substract', 'divide', 'multipy'])]
outputs = [
LiteralOutput('output', 'Binary operator result',
data_type='float')]
super(BinaryOperator, self).__init__(
self._handler,
identifier='binaryoperatorfornumbers',
title='Binary Operator for Numbers',
abstract='Performs operation on two numbers and returns the answer.\
This example process is taken from Climate4Impact.',
metadata=[
Metadata('Birdhouse', 'http://bird-house.github.io/'),
Metadata('User Guide', 'http://emu.readthedocs.io/en/latest/'),
Metadata('Climate4Impact', 'https://dev.climate4impact.eu')],
version='1.0',
inputs=inputs,
outputs=outputs,
store_supported=True,
status_supported=True
)
@staticmethod
def _handler(request, response):
logger.info("run binary_operator")
operator = request.inputs['operator'][0].data
input_a = request.inputs['inputa'][0].data
input_b = request.inputs['inputb'][0].data
if operator == 'substract':
response.outputs['output'].data = input_a - input_b
elif operator == 'multiply':
response.outputs['output'].data = input_a * input_b
elif operator == 'divide':
response.outputs['output'].data = input_a / input_b
else:
response.outputs['output'].data = input_a + input_b
return response
| Python | 0.0171 |
9911604243a1fb3612317fda91a61653ae396e20 | Fix script permissions | Source/Documentation/Runme.py | Source/Documentation/Runme.py | #!/usr/bin/python
import os
import sys
import shutil
import subprocess
def create_page(orig_path, page_name, page_header):
orig = open(orig_path)
dest = open("Temp/" + os.path.split(orig_path)[1] + ".txt", "w")
dest.write("/** @page " + page_name + " " + page_header + "\n")
dest.write(orig.read())
dest.write("\n*/")
orig.close()
dest.close()
beforeDir = os.getcwd()
scriptDir = os.path.dirname(sys.argv[0])
os.chdir(scriptDir)
# create Legal page
if os.path.isdir("Temp"):
shutil.rmtree("Temp")
os.mkdir("Temp")
create_page("../../NOTICE", "legal", "Legal Stuff & Acknowledgments")
create_page("../../ReleaseNotes.txt", "release_notes", "Release Notes")
errfile = "Temp/doxy_error"
subprocess.check_call(["doxygen", "Doxyfile"], stdout=open(os.devnull,"w"), stderr=open(errfile,"w"))
os.chdir(beforeDir) | #!/usr/bin/python
import os
import sys
import shutil
import subprocess
def create_page(orig_path, page_name, page_header):
orig = open(orig_path)
dest = open("Temp/" + os.path.split(orig_path)[1] + ".txt", "w")
dest.write("/** @page " + page_name + " " + page_header + "\n")
dest.write(orig.read())
dest.write("\n*/")
orig.close()
dest.close()
beforeDir = os.getcwd()
scriptDir = os.path.dirname(sys.argv[0])
os.chdir(scriptDir)
# create Legal page
if os.path.isdir("Temp"):
shutil.rmtree("Temp")
os.mkdir("Temp")
create_page("../../NOTICE", "legal", "Legal Stuff & Acknowledgments")
create_page("../../ReleaseNotes.txt", "release_notes", "Release Notes")
errfile = "Temp/doxy_error"
subprocess.check_call(["doxygen", "Doxyfile"], stdout=open(os.devnull,"w"), stderr=open(errfile,"w"))
os.chdir(beforeDir) | Python | 0.000001 |
00c07a76185aff4873abe3eb814b847b6d02f58f | fix date | 02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py | 02_BasicDataTypes/04_find-second-maximum-number-in-a-list.py | #!/usr/bin/python3.6
"""Jerod Gawne, 2017-09-28
Find the Second Largest Number
https://www.hackerrank.com/challenges/find-second-maximum-number-in-a-list/
Editorial:
- There are many ways to solve this problem.
This can be solved by maintaining two variables max and second_max.
Iterate through the list and find the maximum and store it.
Iterate again and find the next maximum value by having an if
condition that checks if it's not equal to first maximum.
Create a counter from the given array. Extract the keys, sort them
and print the second last element.
Transform the list to a set and then list again, removing all the
duplicates. Then sort the list and print the second last element.
"""
def main():
"""
Main/Tests
"""
input()
print(sorted(set(map(int, input().split())))[-2])
if __name__ == '__main__':
try:
main()
except Exception:
import sys
import traceback
print(traceback.print_exception(*sys.exc_info()))
| #!/usr/bin/python3.6
"""Jerod Gawne, 2017-09-12
Find the Second Largest Number
https://www.hackerrank.com/challenges/find-second-maximum-number-in-a-list/
Editorial:
- There are many ways to solve this problem.
This can be solved by maintaining two variables max and second_max.
Iterate through the list and find the maximum and store it.
Iterate again and find the next maximum value by having an if
condition that checks if it's not equal to first maximum.
Create a counter from the given array. Extract the keys, sort them
and print the second last element.
Transform the list to a set and then list again, removing all the
duplicates. Then sort the list and print the second last element.
"""
def main():
"""
Main/Tests
"""
input()
print(sorted(set(map(int, input().split())))[-2])
if __name__ == '__main__':
try:
main()
except Exception:
import sys
import traceback
print(traceback.print_exception(*sys.exc_info()))
| Python | 0.00188 |
0c305e8bd8624af2d2ce1ca292bb26095d0e1dcc | Fix __eq__ method on ExecuteContainer to compare only proper classes | cekit/descriptor/execute.py | cekit/descriptor/execute.py | import yaml
import cekit
from cekit.descriptor import Descriptor
execute_schemas = [yaml.safe_load("""
map:
name: {type: str}
script: {type: str}
user: {type: text}""")]
container_schemas = [yaml.safe_load("""
seq:
- {type: any}""")]
class Execute(Descriptor):
def __init__(self, descriptor, module_name):
self.schemas = execute_schemas
super(Execute, self).__init__(descriptor)
descriptor['directory'] = module_name
if 'user' not in descriptor:
descriptor['user'] = cekit.DEFAULT_USER
descriptor['module_name'] = module_name
if 'name' not in descriptor:
descriptor['name'] = "%s/%s" % (module_name,
descriptor['script'])
class ExecuteContainer(Descriptor):
"""Container holding Execute classes. I't responsible for correct
Execute Class merging and ordering"""
def __init__(self, descriptor, module_name):
self.schemas = container_schemas
super(ExecuteContainer, self).__init__(descriptor)
self.name = module_name
if not descriptor:
descriptor = [{'name': 'noop'}]
self._descriptor = [Execute(x, module_name) for x in descriptor]
def _get_real_executes(self):
return [x for x in self._descriptor if x['name'] != 'noop']
def __len__(self):
return len(self._get_real_executes())
def __iter__(self):
return iter(self._get_real_executes())
def merge(self, descriptor):
"""To merge modules in correct order we need to insert
new executes before the last module. This the raeson why noop
execut exists"""
prev_module = self._descriptor[-1]['module_name']
pos = 0
for executes in self._descriptor:
if executes['module_name'] == prev_module:
continue
pos += 1
for executes in reversed(list(descriptor)):
if executes not in self._descriptor:
self._descriptor.insert(pos, executes)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
for i, execute in enumerate(self._descriptor):
if execute != other[i]:
return False
return True
| import yaml
import cekit
from cekit.descriptor import Descriptor
execute_schemas = [yaml.safe_load("""
map:
name: {type: str}
script: {type: str}
user: {type: text}""")]
container_schemas = [yaml.safe_load("""
seq:
- {type: any}""")]
class Execute(Descriptor):
def __init__(self, descriptor, module_name):
self.schemas = execute_schemas
super(Execute, self).__init__(descriptor)
descriptor['directory'] = module_name
if 'user' not in descriptor:
descriptor['user'] = cekit.DEFAULT_USER
descriptor['module_name'] = module_name
if 'name' not in descriptor:
descriptor['name'] = "%s/%s" % (module_name,
descriptor['script'])
class ExecuteContainer(Descriptor):
"""Container holding Execute classes. I't responsible for correct
Execute Class merging and ordering"""
def __init__(self, descriptor, module_name):
self.schemas = container_schemas
super(ExecuteContainer, self).__init__(descriptor)
self.name = module_name
if not descriptor:
descriptor = [{'name': 'noop'}]
self._descriptor = [Execute(x, module_name) for x in descriptor]
def _get_real_executes(self):
return [x for x in self._descriptor if x['name'] != 'noop']
def __len__(self):
return len(self._get_real_executes())
def __iter__(self):
return iter(self._get_real_executes())
def merge(self, descriptor):
"""To merge modules in correct order we need to insert
new executes before the last module. This the raeson why noop
execut exists"""
prev_module = self._descriptor[-1]['module_name']
pos = 0
for executes in self._descriptor:
if executes['module_name'] == prev_module:
continue
pos += 1
for executes in reversed(list(descriptor)):
if executes not in self._descriptor:
self._descriptor.insert(pos, executes)
def __eq__(self, other):
for i, execute in enumerate(self._descriptor):
if execute != other[i]:
return False
return True
| Python | 0 |
b0d88fccf51f240800fda462bddb9d934931c078 | Update tests.py | intermediate_words_search_python/tests.py | intermediate_words_search_python/tests.py | # imports from solution
from main import solution, LOCAL_DICTIONARY
from utils import show_path
from time import time
TEST_CASES = (
# start word, target word, minimal path length
( 'cat', 'dog', 4 ),
( 'cat', 'mistrial', 9 ),
( 'strong', 'weak', 7 ),
( 'hot', 'cold', 4 ),
( 'up', 'down', 5 ),
( 'left', 'right', 7 ),
( 'light', 'heavy', 10 ),
( 'computer', 'virus', 12 ),
( 'strike', 'freeze', 6 ),
( 'fan', 'for', 3 ),
( 'duck', 'dusty', 4 ),
( 'rue', 'be', 3 ),
( 'rue', 'defuse', 5 ),
( 'rue', 'bend', 5 ),
( 'zoologist', 'zoology', None ) # no path; these two words are disjoint
)
def tests2():
t0 = time()
opts = { 'search_method': 'A*', 'dictionary_filename': LOCAL_DICTIONARY }
for start_word,target_word,path_len in TEST_CASES:
path = solution(start_word, target_word, opts)
assert (len(path) if path else None) == path_len
return 'tests pass in {} seconds!'.format(time() - t0)
def tests():
for search_method in ('BFS', 'A*'):
opts = { 'search_method': search_method }
assert solution('cat', 'dog', opts) == ('cat', 'cot', 'dot', 'dog')
assert solution('cat', 'dot', opts) == ('cat', 'cot', 'dot')
assert solution('cat', 'cot', opts) == ('cat', 'cot')
assert solution('cat', 'cat', opts) == ('cat', )
assert solution('fan', 'for', opts) == ('fan', 'fin', 'fir', 'for')
assert solution('place', 'places', opts) == ('place', 'places')
assert solution('duck', 'dusty', opts) == ('duck', 'dusk', 'dust', 'dusty')
assert solution('duck', 'ducked', opts) is None
assert solution('rue', 'be', opts) == ('rue', 'run', 'runt', 'bunt', 'bent', 'beet', 'bee', 'be')
assert solution('rue', 'defuse', opts) == ('rue', 'ruse', 'reuse', 'refuse', 'defuse')
not_a_word_1 = 'NotAWord'
assert solution('rue', not_a_word_1, opts) is None
not_a_word_2 = 'plar'
assert solution(not_a_word_2, 'play', opts) == (not_a_word_2, 'play')
not_a_word_3 = 'blah'
assert solution(not_a_word_3, 'defuse', opts) is None
return 'tests pass!'
if __name__ == '__main__':
# print tests()
print tests2()
| # imports from solution
from main import solution
from utils import show_path
def tests():
for search_method in ('BFS', 'A*'):
opts = { 'search_method': search_method }
assert solution('cat', 'dog', opts) == ('cat', 'cot', 'dot', 'dog')
assert solution('cat', 'dot', opts) == ('cat', 'cot', 'dot')
assert solution('cat', 'cot', opts) == ('cat', 'cot')
assert solution('cat', 'cat', opts) == ('cat', )
assert solution('fan', 'for', opts) == ('fan', 'fin', 'fir', 'for')
assert solution('place', 'places', opts) == ('place', 'places')
assert solution('duck', 'dusty', opts) == ('duck', 'dusk', 'dust', 'dusty')
assert solution('duck', 'ducked', opts) is None
assert solution('rue', 'be', opts) == ('rue', 'run', 'runt', 'bunt', 'bent', 'beet', 'bee', 'be')
assert solution('rue', 'defuse', opts) == ('rue', 'ruse', 'reuse', 'refuse', 'defuse')
not_a_word_1 = 'NotAWord'
assert solution('rue', not_a_word_1, opts) is None
not_a_word_2 = 'plar'
assert solution(not_a_word_2, 'play', opts) == (not_a_word_2, 'play')
not_a_word_3 = 'blah'
assert solution(not_a_word_3, 'defuse', opts) is None
return 'tests pass!'
if __name__ == '__main__':
print tests()
| Python | 0.000001 |
446d578ec9765a22478abecc7df526f666f5e57c | remove 'devel' in filename and add 'runtime' | cerbero/packages/android.py | cerbero/packages/android.py | # cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import tarfile
import zipfile
from cerbero.packages import PackageType
from cerbero.packages.disttarball import DistTarball
from cerbero.errors import UsageError
class AndroidPackager(DistTarball):
''' Creates a distribution tarball for Android '''
def __init__(self, config, package, store):
DistTarball.__init__(self, config, package, store)
def _create_tarball(self, output_dir, package_type, files, force,
package_prefix):
filenames = []
# Create the bz2 file first
filename = os.path.join(output_dir, self._get_name(package_type))
if os.path.exists(filename):
if force:
os.remove(filename)
else:
raise UsageError("File %s already exists" % filename)
tar = tarfile.open(filename, "w:bz2")
for f in files:
filepath = os.path.join(self.prefix, f)
tar.add(filepath, os.path.join(package_prefix, f))
tar.close()
filenames.append(filename)
# Create the zip file for windows
filename = os.path.join(output_dir, self._get_name(package_type,
ext='zip'))
if os.path.exists(filename):
if force:
os.remove(filename)
else:
raise UsageError("File %s already exists" % filename)
zipf = zipfile.ZipFile(filename, 'w')
for f in files:
filepath = os.path.join(self.prefix, f)
zipf.write(filepath, os.path.join(package_prefix, f),
compress_type=zipfile.ZIP_DEFLATED)
zipf.close()
filenames.append(filename)
return ' '.join(filenames)
def _get_name(self, package_type, ext='tar.bz2'):
if package_type == PackageType.DEVEL:
package_type = ''
elif package_type == PackageType.RUNTIME:
package_type = '-runtime'
return "%s%s-%s-%s-%s%s.%s" % (self.package_prefix, self.package.name,
self.config.target_platform, self.config.target_arch,
self.package.version, package_type, ext)
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
register_packager(Distro.ANDROID, AndroidPackager)
| # cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import tarfile
import zipfile
from cerbero.packages.disttarball import DistTarball
from cerbero.errors import UsageError
class AndroidPackager(DistTarball):
''' Creates a distribution tarball for Android '''
def __init__(self, config, package, store):
DistTarball.__init__(self, config, package, store)
def _create_tarball(self, output_dir, package_type, files, force,
package_prefix):
filenames = []
# Create the bz2 file first
filename = os.path.join(output_dir, self._get_name(package_type))
if os.path.exists(filename):
if force:
os.remove(filename)
else:
raise UsageError("File %s already exists" % filename)
tar = tarfile.open(filename, "w:bz2")
for f in files:
filepath = os.path.join(self.prefix, f)
tar.add(filepath, os.path.join(package_prefix, f))
tar.close()
filenames.append(filename)
# Create the zip file for windows
filename = os.path.join(output_dir, self._get_name(package_type,
ext='zip'))
if os.path.exists(filename):
if force:
os.remove(filename)
else:
raise UsageError("File %s already exists" % filename)
zipf = zipfile.ZipFile(filename, 'w')
for f in files:
filepath = os.path.join(self.prefix, f)
zipf.write(filepath, os.path.join(package_prefix, f),
compress_type=zipfile.ZIP_DEFLATED)
zipf.close()
filenames.append(filename)
return ' '.join(filenames)
def register():
from cerbero.packages.packager import register_packager
from cerbero.config import Distro
register_packager(Distro.ANDROID, AndroidPackager)
| Python | 0 |
22f7c82d63fb7ccc9226d734bdacd4a47e96da89 | Fix the `clear_app` command for Django 1.8 | ixdjango/management/commands/clear_app.py | ixdjango/management/commands/clear_app.py | """
Management command to clear specified app's models of data.
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import connection, transaction
# pylint:disable=no-name-in-module
from django.db.models import get_app, get_model, get_models
# pylint:disable=protected-access
real_print = print # pylint:disable=invalid-name
def print(*args, **kwargs): # pylint:disable=redefined-builtin
"""
Only print if required
"""
if kwargs.pop('verbosity') >= 1:
real_print(*args, **kwargs)
class Command(BaseCommand):
"""
A command to clear app data.
"""
help = ('Cleans the specified applications\' tables to a pristine state.')
args = '<app_label> <app_label> ... '
def handle(self, *targets, **options):
verbosity = int(options['verbosity'])
models = []
for target in targets:
target = target.split('.')
try:
app, = target
model = None
except ValueError:
app, model = target
if model:
models.append(get_model(app, model))
else:
app_models = [
model
for model
in get_models(get_app(app), include_auto_created=True)
if model._meta.managed
]
models += app_models
print("Found %d model(s) for %s" % (len(app_models), app),
verbosity=verbosity)
with transaction.atomic():
for model in models:
print("Clearing %s table %s" % (
model, model._meta.db_table),
verbosity=verbosity)
cursor = connection.cursor()
cursor.execute('TRUNCATE TABLE {} CASCADE'.format(
model._meta.db_table))
sql = connection.ops.sequence_reset_sql(no_style(), [model])
for cmd in sql:
connection.cursor().execute(cmd)
print("Cleared %d models" % len(models), verbosity=verbosity)
| """
Management command to clear specified app's models of data.
.. moduleauthor:: Infoxchange Development Team <development@infoxchange.net.au>
"""
from __future__ import print_function
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import connection, transaction
# pylint:disable=no-name-in-module
from django.db.models import get_app, get_model, get_models
# pylint:disable=protected-access
real_print = print # pylint:disable=invalid-name
def print(*args, **kwargs): # pylint:disable=redefined-builtin
"""
Only print if required
"""
if kwargs.pop('verbosity') >= 1:
real_print(*args, **kwargs)
class Command(BaseCommand):
"""
A command to clear app data.
"""
help = ('Cleans the specified applications\' tables to a pristine state.')
def handle(self, *targets, **options):
verbosity = int(options['verbosity'])
models = []
for target in targets:
target = target.split('.')
try:
app, = target
model = None
except ValueError:
app, model = target
if model:
models.append(get_model(app, model))
else:
app_models = [
model
for model
in get_models(get_app(app), include_auto_created=True)
if model._meta.managed
]
models += app_models
print("Found %d model(s) for %s" % (len(app_models), app),
verbosity=verbosity)
with transaction.atomic():
for model in models:
print("Clearing %s table %s" % (
model, model._meta.db_table),
verbosity=verbosity)
cursor = connection.cursor()
cursor.execute('TRUNCATE TABLE {} CASCADE'.format(
model._meta.db_table))
sql = connection.ops.sequence_reset_sql(no_style(), [model])
for cmd in sql:
connection.cursor().execute(cmd)
print("Cleared %d models" % len(models), verbosity=verbosity)
| Python | 0.00069 |
6c4219e92611ed0f6721e86ca3d24a28c30825c2 | add 15 min total | cgi-bin/precip/snetRates.py | cgi-bin/precip/snetRates.py | #!/mesonet/python/bin/python
import mx.DateTime, cgi, sys
from pyIEM import iemdb
i = iemdb.iemdb()
mydb = i['snet']
def diff(nowVal, pastVal, mulli):
if (nowVal < 0 or pastVal < 0): return "%5s," % ("M")
differ = nowVal - pastVal
if differ < 0: return "%5s," % ("M")
return "%5.2f," % (differ * mulli)
def Main():
form = cgi.FormContent()
year = form["year"][0]
month = form["month"][0]
day = form["day"][0]
station = form["station"][0][:5]
s = mx.DateTime.DateTime(int(year), int(month), int(day))
e = s + mx.DateTime.RelativeDateTime(days=+1)
interval = mx.DateTime.RelativeDateTime(minutes=+1)
print 'Content-type: text/plain\n\n'
print "SID , DATE ,TIME ,PCOUNT,P15MIN,60min ,30min ,20min ,15min ,10min , 5min , 1min ,"
rs = mydb.query("SELECT station, valid, pday from t%s WHERE \
station = '%s' and date(valid) = '%s' ORDER by valid ASC" \
% (s.strftime("%Y_%m"), station, s.strftime("%Y-%m-%d") ) ).dictresult()
pcpn = [-1]*(24*60)
if (len(rs) == 0):
print 'NO RESULTS FOUND FOR THIS DATE!'
sys.exit(0)
lminutes = 0
lval = 0
for i in range(len(rs)):
ts = mx.DateTime.strptime(rs[i]['valid'][:16], "%Y-%m-%d %H:%M")
minutes = int((ts - s).minutes)
val = float(rs[i]['pday'])
pcpn[minutes] = val
if ((val - lval) < 0.02):
for b in range(lminutes, minutes):
pcpn[b] = val
lminutes = minutes
lval = val
for i in range(len(pcpn)):
ts = s + (interval * i)
print "%s,%s," % (rs[0]['station'], ts.strftime("%Y-%m-%d,%H:%M") ),
if (pcpn[i] < 0):
print "%5s," % ("M"),
else:
print "%5.2f," % (pcpn[i],),
if (i >= 15):
print diff(pcpn[i], pcpn[i-15], 1),
else:
print "%5s," % (" "),
if (i >= 60):
print diff(pcpn[i], pcpn[i-60], 1),
else:
print "%5s," % (" "),
if (i >= 30):
print diff(pcpn[i], pcpn[i-30], 2),
else:
print "%5s," % (" "),
if (i >= 20):
print diff(pcpn[i], pcpn[i-20], 3),
else:
print "%5s," % (" "),
if (i >= 15):
print diff(pcpn[i], pcpn[i-15], 4),
else:
print "%5s," % (" "),
if (i >= 10):
print diff(pcpn[i], pcpn[i-10], 6),
else:
print "%5s," % (" "),
if (i >= 5):
print diff(pcpn[i], pcpn[i-5], 12),
else:
print "%5s," % (" "),
if (i >= 1):
print diff(pcpn[i], pcpn[i-1], 60),
else:
print "%5s," % (" "),
print
Main()
| #!/mesonet/python/bin/python
import mx.DateTime, cgi, sys
from pyIEM import iemdb
i = iemdb.iemdb()
mydb = i['snet']
def diff(nowVal, pastVal, mulli):
if (nowVal < 0 or pastVal < 0): return "%5s," % ("M")
differ = nowVal - pastVal
if differ < 0: return "%5s," % ("M")
return "%5.2f," % (differ * mulli)
def Main():
form = cgi.FormContent()
year = form["year"][0]
month = form["month"][0]
day = form["day"][0]
station = form["station"][0][:5]
s = mx.DateTime.DateTime(int(year), int(month), int(day))
e = s + mx.DateTime.RelativeDateTime(days=+1)
interval = mx.DateTime.RelativeDateTime(minutes=+1)
print 'Content-type: text/plain\n\n'
print "SID , DATE ,TIME ,PCOUNT,60min ,30min ,20min ,15min ,10min , 5min , 1min ,"
rs = mydb.query("SELECT station, valid, pday from t%s WHERE \
station = '%s' and date(valid) = '%s' ORDER by valid ASC" \
% (s.strftime("%Y_%m"), station, s.strftime("%Y-%m-%d") ) ).dictresult()
pcpn = [-1]*(24*60)
if (len(rs) == 0):
print 'NO RESULTS FOUND FOR THIS DATE!'
sys.exit(0)
lminutes = 0
lval = 0
for i in range(len(rs)):
ts = mx.DateTime.strptime(rs[i]['valid'][:16], "%Y-%m-%d %H:%M")
minutes = int((ts - s).minutes)
val = float(rs[i]['pday'])
pcpn[minutes] = val
if ((val - lval) < 0.02):
for b in range(lminutes, minutes):
pcpn[b] = val
lminutes = minutes
lval = val
for i in range(len(pcpn)):
ts = s + (interval * i)
print "%s,%s," % (rs[0]['station'], ts.strftime("%Y-%m-%d,%H:%M") ),
if (pcpn[i] < 0):
print "%5s," % ("M"),
else:
print "%5.2f," % (pcpn[i],),
if (i >= 60):
print diff(pcpn[i], pcpn[i-60], 1),
else:
print "%5s," % (" "),
if (i >= 30):
print diff(pcpn[i], pcpn[i-30], 2),
else:
print "%5s," % (" "),
if (i >= 20):
print diff(pcpn[i], pcpn[i-20], 3),
else:
print "%5s," % (" "),
if (i >= 15):
print diff(pcpn[i], pcpn[i-15], 4),
else:
print "%5s," % (" "),
if (i >= 10):
print diff(pcpn[i], pcpn[i-10], 6),
else:
print "%5s," % (" "),
if (i >= 5):
print diff(pcpn[i], pcpn[i-5], 12),
else:
print "%5s," % (" "),
if (i >= 1):
print diff(pcpn[i], pcpn[i-1], 60),
else:
print "%5s," % (" "),
print
Main()
| Python | 0.000229 |
32e83559e00b7d5a363585d599cd087af854c445 | Support custom initializer in links.CRF1d | chainer/links/loss/crf1d.py | chainer/links/loss/crf1d.py | from chainer.functions.loss import crf1d
from chainer import link
from chainer import variable
class CRF1d(link.Link):
"""Linear-chain conditional random field loss layer.
This link wraps the :func:`~chainer.functions.crf1d` function.
It holds a transition cost matrix as a parameter.
Args:
n_label (int): Number of labels.
.. seealso:: :func:`~chainer.functions.crf1d` for more detail.
Attributes:
cost (~chainer.Variable): Transition cost parameter.
"""
def __init__(self, n_label, initialW=0):
super(CRF1d, self).__init__()
with self.init_scope():
self.cost = variable.Parameter(initializer=initialW,
shape=(n_label, n_label))
def forward(self, xs, ys, reduce='mean'):
return crf1d.crf1d(self.cost, xs, ys, reduce)
def argmax(self, xs):
"""Computes a state that maximizes a joint probability.
Args:
xs (list of Variable): Input vector for each label.
Returns:
tuple: A tuple of :class:`~chainer.Variable` representing each
log-likelihood and a list representing the argmax path.
.. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more
detail.
"""
return crf1d.argmax_crf1d(self.cost, xs)
| from chainer.functions.loss import crf1d
from chainer import link
from chainer import variable
class CRF1d(link.Link):
"""Linear-chain conditional random field loss layer.
This link wraps the :func:`~chainer.functions.crf1d` function.
It holds a transition cost matrix as a parameter.
Args:
n_label (int): Number of labels.
.. seealso:: :func:`~chainer.functions.crf1d` for more detail.
Attributes:
cost (~chainer.Variable): Transition cost parameter.
"""
def __init__(self, n_label):
super(CRF1d, self).__init__()
with self.init_scope():
self.cost = variable.Parameter(0, (n_label, n_label))
def forward(self, xs, ys, reduce='mean'):
return crf1d.crf1d(self.cost, xs, ys, reduce)
def argmax(self, xs):
"""Computes a state that maximizes a joint probability.
Args:
xs (list of Variable): Input vector for each label.
Returns:
tuple: A tuple of :class:`~chainer.Variable` representing each
log-likelihood and a list representing the argmax path.
.. seealso:: See :func:`~chainer.frunctions.crf1d_argmax` for more
detail.
"""
return crf1d.argmax_crf1d(self.cost, xs)
| Python | 0 |
8214d516b3feba92ab3ad3b1f2fa1cf253e83012 | Remove use of deprecated `scan_plugins` method | pyexcel/internal/__init__.py | pyexcel/internal/__init__.py | """
pyexcel.internal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Pyexcel internals that subjected to change
:copyright: (c) 2015-2017 by Onni Software Ltd.
:license: New BSD License
"""
from lml.loader import scan_plugins
from pyexcel.internal.plugins import PARSER, RENDERER # noqa
from pyexcel.internal.source_plugin import SOURCE # noqa
from pyexcel.internal.generators import SheetStream, BookStream # noqa
BLACK_LIST = [
"pyexcel_io",
"pyexcel_webio",
"pyexcel_xlsx",
"pyexcel_xls",
"pyexcel_ods3",
"pyexcel_ods",
"pyexcel_odsr",
"pyexcel_xlsxw",
]
WHITE_LIST = [
"pyexcel.plugins.parsers",
"pyexcel.plugins.renderers",
"pyexcel.plugins.sources",
]
scan_plugins_regex("^pyexcel_.+$", "pyexcel", BLACK_LIST, WHITE_LIST)
| """
pyexcel.internal
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Pyexcel internals that subjected to change
:copyright: (c) 2015-2017 by Onni Software Ltd.
:license: New BSD License
"""
from lml.loader import scan_plugins
from pyexcel.internal.plugins import PARSER, RENDERER # noqa
from pyexcel.internal.source_plugin import SOURCE # noqa
from pyexcel.internal.generators import SheetStream, BookStream # noqa
BLACK_LIST = [
"pyexcel_io",
"pyexcel_webio",
"pyexcel_xlsx",
"pyexcel_xls",
"pyexcel_ods3",
"pyexcel_ods",
"pyexcel_odsr",
"pyexcel_xlsxw",
]
WHITE_LIST = [
"pyexcel.plugins.parsers",
"pyexcel.plugins.renderers",
"pyexcel.plugins.sources",
]
scan_plugins("pyexcel_", "pyexcel", BLACK_LIST, WHITE_LIST)
| Python | 0.000004 |
708e105713d7fd480b4b45d3ef31a46e35e63a8e | annotate calculate_tx_fee. | pypeerassets/transactions.py | pypeerassets/transactions.py |
'''transaction assembly/dissasembly'''
from time import time
from math import ceil
from btcpy.structs.address import Address
from btcpy.structs.transaction import TxOut, TxIn, Sequence, Locktime, MutableTransaction
from btcpy.structs.script import StackData, ScriptSig, NulldataScript, ScriptSig, ScriptPubKey
from btcpy.structs.script import P2pkhScript, MultisigScript, P2shScript
from .networks import query
def calculate_tx_fee(tx_size: int) -> float:
'''return tx fee from tx size in bytes'''
min_fee = 0.01 # minimum
return ceil(tx_size / 1000) * min_fee
def nulldata_script(data: bytes):
'''create nulldata (OP_return) script'''
stack = StackData.from_bytes(data)
return NulldataScript(stack)
def p2pkh_script(address: str):
'''create pay-to-key-hash (P2PKH) script'''
addr = Address.from_string(address)
return P2pkhScript(addr)
def tx_output(value: float, seq: int, script: ScriptSig):
'''create TxOut object'''
return TxOut(int(value * 1000000), seq, script)
def make_raw_transaction(inputs: list, outputs: list, locktime=Locktime(0),
timestamp: int=int(time()), version=1):
'''create raw transaction'''
return MutableTransaction(version, timestamp, inputs, outputs, locktime)
def find_parent_outputs(provider, utxo: TxIn):
'''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing'''
index = utxo.txout # utxo index
return TxOut.from_json(provider.getrawtransaction(utxo.txid)['vout'][index])
|
'''transaction assembly/dissasembly'''
from time import time
from math import ceil
from btcpy.structs.address import Address
from btcpy.structs.transaction import TxOut, TxIn, Sequence, Locktime, MutableTransaction
from btcpy.structs.script import StackData, ScriptSig, NulldataScript, ScriptSig, ScriptPubKey
from btcpy.structs.script import P2pkhScript, MultisigScript, P2shScript
from .networks import query
def calculate_tx_fee(tx_size: int):
'''return tx fee from tx size in bytes'''
min_fee = 0.01 # minimum
return ceil(tx_size / 1000) * min_fee
def nulldata_script(data: bytes):
'''create nulldata (OP_return) script'''
stack = StackData.from_bytes(data)
return NulldataScript(stack)
def p2pkh_script(address: str):
'''create pay-to-key-hash (P2PKH) script'''
addr = Address.from_string(address)
return P2pkhScript(addr)
def tx_output(value: float, seq: int, script: ScriptSig):
'''create TxOut object'''
return TxOut(int(value * 1000000), seq, script)
def make_raw_transaction(inputs: list, outputs: list, locktime=Locktime(0),
timestamp: int=int(time()), version=1):
'''create raw transaction'''
return MutableTransaction(version, timestamp, inputs, outputs, locktime)
def find_parent_outputs(provider, utxo: TxIn):
'''due to design of the btcpy library, TxIn object must be converted to TxOut object before signing'''
index = utxo.txout # utxo index
return TxOut.from_json(provider.getrawtransaction(utxo.txid)['vout'][index])
| Python | 0.000001 |
56e4c14ea6e2266bb8fa6f25ef1c0a3b2123f5ad | fix py3k | pystacia/image/_impl/blur.py | pystacia/image/_impl/blur.py | # coding: utf-8
# pystacia/image/_impl/blur.py
# Copyright (C) 2011-2012 by Paweł Piotr Przeradowski
# This module is part of Pystacia and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from future_builtins import zip
def _make_radius_strength_bias(c_name, names, order=None):
def function(image, *args):
kwargs = dict(zip(names, args))
if kwargs['strength'] is None:
kwargs['strength'] = kwargs['radius']
if 'bias' in kwargs and kwargs['bias'] is None:
kwargs['bias'] = 0
order_ = order or names
values = [kwargs[k] for k in order_]
c_call(image, c_name, *values)
return function
blur = _make_radius_strength_bias('blur', ['radius', 'strength'])
gaussian_blur = _make_radius_strength_bias(
'gaussian_blur', ['radius', 'strength', 'bias'])
motion_blur = _make_radius_strength_bias(
'motion_blur', ['radius', 'angle', 'strength', 'bias'],
['radius', 'strength', 'angle', 'bias'])
adaptive_blur = _make_radius_strength_bias(
'adaptive_blur', ['radius', 'strength', 'bias'])
sharpen = _make_radius_strength_bias(
'sharpen', ['radius', 'strength', 'bias'])
adaptive_sharpen = _make_radius_strength_bias(
'adaptive_sharpen', ['radius', 'strength', 'bias'])
detect_edges = _make_radius_strength_bias('edge', ['radius', 'strength'])
#TODO: moving center here
def radial_blur(image, angle):
"""Performs radial blur.
:param angle: Blur angle in degrees
:type angle: ``float``
Radial blurs image within given angle.
This method can be chained.
"""
c_call(image, 'radial_blur', angle)
def denoise(image):
"""Attempt to remove noise preserving edges.
Applies a digital filter that improves the quality of a
noisy image.
This method can be chained.
"""
c_call(image, 'enhance')
def despeckle(image):
"""Attempt to remove speckle preserving edges.
Resulting image almost solid color areas are smoothed preserving
edges.
This method can be chained.
"""
c_call(image, 'despeckle')
emboss = _make_radius_strength_bias('emboss', ['radius', 'strength'])
from pystacia.api.func import c_call
| # coding: utf-8
# pystacia/image/_impl/blur.py
# Copyright (C) 2011-2012 by Paweł Piotr Przeradowski
# This module is part of Pystacia and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
def _make_radius_strength_bias(c_name, names, order=None):
def function(image, *args):
kwargs = dict(zip(names, args))
if kwargs['strength'] is None:
kwargs['strength'] = kwargs['radius']
if 'bias' in kwargs and kwargs['bias'] is None:
kwargs['bias'] = 0
order_ = order or names
values = [kwargs[k] for k in order_]
c_call(image, c_name, *values)
return function
blur = _make_radius_strength_bias('blur', ['radius', 'strength'])
gaussian_blur = _make_radius_strength_bias(
'gaussian_blur', ['radius', 'strength', 'bias'])
motion_blur = _make_radius_strength_bias(
'motion_blur', ['radius', 'angle', 'strength', 'bias'],
['radius', 'strength', 'angle', 'bias'])
adaptive_blur = _make_radius_strength_bias(
'adaptive_blur', ['radius', 'strength', 'bias'])
sharpen = _make_radius_strength_bias(
'sharpen', ['radius', 'strength', 'bias'])
adaptive_sharpen = _make_radius_strength_bias(
'adaptive_sharpen', ['radius', 'strength', 'bias'])
detect_edges = _make_radius_strength_bias('edge', ['radius', 'strength'])
#TODO: moving center here
def radial_blur(image, angle):
"""Performs radial blur.
:param angle: Blur angle in degrees
:type angle: ``float``
Radial blurs image within given angle.
This method can be chained.
"""
c_call(image, 'radial_blur', angle)
def denoise(image):
"""Attempt to remove noise preserving edges.
Applies a digital filter that improves the quality of a
noisy image.
This method can be chained.
"""
c_call(image, 'enhance')
def despeckle(image):
"""Attempt to remove speckle preserving edges.
Resulting image almost solid color areas are smoothed preserving
edges.
This method can be chained.
"""
c_call(image, 'despeckle')
emboss = _make_radius_strength_bias('emboss', ['radius', 'strength'])
from pystacia.api.func import c_call
| Python | 0.000002 |
27273335422781dcee950ee081ed13e53816e6d6 | Bump version | pytablewriter/__version__.py | pytablewriter/__version__.py | __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "0.63.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| __author__ = "Tsuyoshi Hombashi"
__copyright__ = f"Copyright 2016, {__author__}"
__license__ = "MIT License"
__version__ = "0.62.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| Python | 0 |
349f975e257192458b1944753d4f609869b9e8d6 | use yield_fixture to support pytest on py2.7 | python-cim/tests/fixtures.py | python-cim/tests/fixtures.py | import os
import pytest
import cim
import cim.objects
@pytest.fixture
def repopath():
"""
Returns:
str: path to the repos/win7/deleted-instance repository
"""
cd = os.path.dirname(os.path.abspath(__file__))
return os.path.join(cd, 'repos', 'win7', 'deleted-instance')
@pytest.fixture
def repo():
"""
Returns:
cim.CIM: repos/win7/deleted-instance repository
"""
return cim.CIM(cim.CIM_TYPE_WIN7, repopath())
@pytest.yield_fixture
def root():
r = repo()
with cim.objects.Namespace(r, cim.objects.ROOT_NAMESPACE_NAME) as ns:
yield ns
| import os
import pytest
import cim
import cim.objects
@pytest.fixture
def repopath():
"""
Returns:
str: path to the repos/win7/deleted-instance repository
"""
cd = os.path.dirname(os.path.abspath(__file__))
return os.path.join(cd, 'repos', 'win7', 'deleted-instance')
@pytest.fixture
def repo():
"""
Returns:
cim.CIM: repos/win7/deleted-instance repository
"""
return cim.CIM(cim.CIM_TYPE_WIN7, repopath())
@pytest.fixture
def root():
r = repo()
with cim.objects.Namespace(r, cim.objects.ROOT_NAMESPACE_NAME) as ns:
yield ns
| Python | 0 |
fe4c66b2e50035ab2701923d6a2cd0cb82e63780 | Fix call mkl gemm in mkldnn.py (#7007) | python/tvm/contrib/mkldnn.py | python/tvm/contrib/mkldnn.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""External function interface to BLAS libraries."""
import tvm
from tvm import te
def matmul(lhs, rhs, transa=False, transb=False, **kwargs):
"""Create an extern op that compute matrix mult of A and rhs with CrhsLAS
This function serves as an example on how to call external libraries.
Parameters
----------
lhs: Tensor
The left matrix operand
rhs: Tensor
The right matrix operand
transa: bool
Whether transpose lhs
transb: bool
Whether transpose rhs
Returns
-------
C: Tensor
The result tensor.
"""
n = lhs.shape[1] if transa else lhs.shape[0]
m = rhs.shape[0] if transb else rhs.shape[1]
return te.extern(
(n, m),
[lhs, rhs],
lambda ins, outs: tvm.tir.call_packed(
"tvm.contrib.mkldnn.matmul", ins[0], ins[1], outs[0], transa, transb
),
name="C",
**kwargs,
)
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""External function interface to BLAS libraries."""
import tvm
from tvm import te
def matmul(lhs, rhs, transa=False, transb=False, **kwargs):
"""Create an extern op that compute matrix mult of A and rhs with CrhsLAS
This function serves as an example on how to call external libraries.
Parameters
----------
lhs: Tensor
The left matrix operand
rhs: Tensor
The right matrix operand
transa: bool
Whether transpose lhs
transb: bool
Whether transpose rhs
Returns
-------
C: Tensor
The result tensor.
"""
n = lhs.shape[1] if transa else lhs.shape[0]
m = rhs.shape[0] if transb else rhs.shape[1]
return te.extern(
(n, m),
[lhs, rhs],
lambda ins, outs: tvm.tir.call_packed(
"tvm.contrib.mkl.matmul", ins[0], ins[1], outs[0], transa, transb
),
name="C",
**kwargs,
)
| Python | 0 |
a2097bf7c3103ec6e0482bb34f3b33c753ea7889 | Enable QtNetwork test for PySide | qtpy/tests/test_qtnetwork.py | qtpy/tests/test_qtnetwork.py | from __future__ import absolute_import
import pytest
from qtpy import PYSIDE, PYSIDE2, QtNetwork
def test_qtnetwork():
"""Test the qtpy.QtNetwork namespace"""
assert QtNetwork.QAbstractNetworkCache is not None
assert QtNetwork.QNetworkCacheMetaData is not None
if not PYSIDE and not PYSIDE2:
assert QtNetwork.QHttpMultiPart is not None
assert QtNetwork.QHttpPart is not None
assert QtNetwork.QNetworkAccessManager is not None
assert QtNetwork.QNetworkCookie is not None
assert QtNetwork.QNetworkCookieJar is not None
assert QtNetwork.QNetworkDiskCache is not None
assert QtNetwork.QNetworkReply is not None
assert QtNetwork.QNetworkRequest is not None
assert QtNetwork.QNetworkConfigurationManager is not None
assert QtNetwork.QNetworkConfiguration is not None
assert QtNetwork.QNetworkSession is not None
assert QtNetwork.QAuthenticator is not None
assert QtNetwork.QHostAddress is not None
assert QtNetwork.QHostInfo is not None
assert QtNetwork.QNetworkAddressEntry is not None
assert QtNetwork.QNetworkInterface is not None
assert QtNetwork.QNetworkProxy is not None
assert QtNetwork.QNetworkProxyFactory is not None
assert QtNetwork.QNetworkProxyQuery is not None
assert QtNetwork.QAbstractSocket is not None
assert QtNetwork.QLocalServer is not None
assert QtNetwork.QLocalSocket is not None
assert QtNetwork.QTcpServer is not None
assert QtNetwork.QTcpSocket is not None
assert QtNetwork.QUdpSocket is not None
if not PYSIDE:
assert QtNetwork.QSslCertificate is not None
assert QtNetwork.QSslCipher is not None
assert QtNetwork.QSslConfiguration is not None
assert QtNetwork.QSslError is not None
assert QtNetwork.QSslKey is not None
assert QtNetwork.QSslSocket is not None
| from __future__ import absolute_import
import pytest
from qtpy import PYSIDE, PYSIDE2, QtNetwork
@pytest.mark.skipif(PYSIDE2 or PYSIDE, reason="It fails on PySide/PySide2")
def test_qtnetwork():
"""Test the qtpy.QtNetwork namespace"""
assert QtNetwork.QAbstractNetworkCache is not None
assert QtNetwork.QNetworkCacheMetaData is not None
assert QtNetwork.QHttpMultiPart is not None
assert QtNetwork.QHttpPart is not None
assert QtNetwork.QNetworkAccessManager is not None
assert QtNetwork.QNetworkCookie is not None
assert QtNetwork.QNetworkCookieJar is not None
assert QtNetwork.QNetworkDiskCache is not None
assert QtNetwork.QNetworkReply is not None
assert QtNetwork.QNetworkRequest is not None
assert QtNetwork.QNetworkConfigurationManager is not None
assert QtNetwork.QNetworkConfiguration is not None
assert QtNetwork.QNetworkSession is not None
assert QtNetwork.QAuthenticator is not None
assert QtNetwork.QHostAddress is not None
assert QtNetwork.QHostInfo is not None
assert QtNetwork.QNetworkAddressEntry is not None
assert QtNetwork.QNetworkInterface is not None
assert QtNetwork.QNetworkProxy is not None
assert QtNetwork.QNetworkProxyFactory is not None
assert QtNetwork.QNetworkProxyQuery is not None
assert QtNetwork.QAbstractSocket is not None
assert QtNetwork.QLocalServer is not None
assert QtNetwork.QLocalSocket is not None
assert QtNetwork.QTcpServer is not None
assert QtNetwork.QTcpSocket is not None
assert QtNetwork.QUdpSocket is not None
assert QtNetwork.QSslCertificate is not None
assert QtNetwork.QSslCipher is not None
assert QtNetwork.QSslConfiguration is not None
assert QtNetwork.QSslError is not None
assert QtNetwork.QSslKey is not None
assert QtNetwork.QSslSocket is not None
| Python | 0 |
cd323386b61cd280fcf3e599ae6a02b889f81a40 | Remove support for old style api urls | raven/contrib/django/urls.py | raven/contrib/django/urls.py | """
raven.contrib.django.urls
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
try:
from django.conf.urls import url
except ImportError:
# for Django version less than 1.4
from django.conf.urls.defaults import url # NOQA
import raven.contrib.django.views
urlpatterns = (
url(r'^api/(?P<project_id>[\w_-]+)/store/$', raven.contrib.django.views.report, name='raven-report'),
url(r'^report/', raven.contrib.django.views.report),
)
| """
raven.contrib.django.urls
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
try:
from django.conf.urls import url
except ImportError:
# for Django version less than 1.4
from django.conf.urls.defaults import url # NOQA
import raven.contrib.django.views
urlpatterns = (
url(r'^api/(?:(?P<project_id>[\w_-]+)/)?store/$', raven.contrib.django.views.report, name='raven-report'),
url(r'^report/', raven.contrib.django.views.report),
)
| Python | 0 |
d3e9e7f6873e54f9657d1fdc9c3339b49c4936ae | Update tests to use required request argument | rcamp/tests/test_lib_auth.py | rcamp/tests/test_lib_auth.py | from django.conf import settings
import mock
import pam
from lib.pam_backend import PamBackend
from tests.utilities.ldap import (
LdapTestCase,
get_ldap_user_defaults
)
from accounts.models import (
RcLdapUser,
User
)
# This test case covers functionality in the custom PAM Auth Backend
class PamBackendTestCase(LdapTestCase):
def setUp(self):
self.pb = PamBackend()
super(PamBackendTestCase,self).setUp()
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_authenticate(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
rc_user = RcLdapUser.objects.get(username='testuser')
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(None, username='testuser',password='passwd')
self.assertIsNotNone(user)
self.assertEqual(user.username,rc_user.username)
self.assertEqual(user.first_name,rc_user.first_name)
self.assertEqual(user.last_name,rc_user.last_name)
self.assertEqual(user.email,rc_user.email)
reauthed_user = self.pb.authenticate(None, username='testuser',password='passwd')
self.assertEqual(reauthed_user,user)
self.assertFalse(reauthed_user.is_staff)
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=False))
def test_authenticate_failed(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(None, username='testuser',password='badpasswd')
self.assertIsNone(user)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_authenticate_update_user(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
rc_user = RcLdapUser.objects.get(username='testuser')
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(None, username='testuser',password='passwd')
self.assertIsNotNone(user)
rc_user.first_name = 'pamtested'
rc_user.save(organization='ucb',)
user = self.pb.authenticate(None, username='testuser',password='passwd')
self.assertEqual(user.first_name,'pamtested')
self.assertFalse(user.is_staff)
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_get_user(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(None, username='testuser',password='passwd')
self.assertIsNotNone(user)
user = self.pb.get_user(user.id)
self.assertEqual(user.username, 'testuser')
# class LDAPBackendTestCase(LdapTestCase):
| from django.conf import settings
import mock
import pam
from lib.pam_backend import PamBackend
from tests.utilities.ldap import (
LdapTestCase,
get_ldap_user_defaults
)
from accounts.models import (
RcLdapUser,
User
)
# This test case covers functionality in the custom PAM Auth Backend
class PamBackendTestCase(LdapTestCase):
def setUp(self):
self.pb = PamBackend()
super(PamBackendTestCase,self).setUp()
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_authenticate(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
rc_user = RcLdapUser.objects.get(username='testuser')
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(username='testuser',password='passwd')
self.assertIsNotNone(user)
self.assertEqual(user.username,rc_user.username)
self.assertEqual(user.first_name,rc_user.first_name)
self.assertEqual(user.last_name,rc_user.last_name)
self.assertEqual(user.email,rc_user.email)
reauthed_user = self.pb.authenticate(username='testuser',password='passwd')
self.assertEqual(reauthed_user,user)
self.assertFalse(reauthed_user.is_staff)
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=False))
def test_authenticate_failed(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(username='testuser',password='badpasswd')
self.assertIsNone(user)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_authenticate_update_user(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
rc_user = RcLdapUser.objects.get(username='testuser')
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(username='testuser',password='passwd')
self.assertIsNotNone(user)
rc_user.first_name = 'pamtested'
rc_user.save(organization='ucb',)
user = self.pb.authenticate(username='testuser',password='passwd')
self.assertEqual(user.first_name,'pamtested')
self.assertFalse(user.is_staff)
@mock.patch('pam.pam.authenticate',mock.MagicMock(return_value=True))
def test_get_user(self):
rc_user_defaults = get_ldap_user_defaults()
RcLdapUser.objects.create(organization='ucb',**rc_user_defaults)
self.assertRaises(User.DoesNotExist, User.objects.get,
username='testuser')
user = self.pb.authenticate(username='testuser',password='passwd')
self.assertIsNotNone(user)
user = self.pb.get_user(user.id)
self.assertEqual(user.username, 'testuser')
| Python | 0 |
15865668659ebee4cb49a28d09f9ff1d67d2a96d | Set issue to in progress when sending issue | rdmo/projects/views/issue.py | rdmo/projects/views/issue.py | import logging
from django.conf import settings
from django.core.mail import EmailMessage
from django.http import HttpResponseRedirect
from django.views.generic import DetailView, UpdateView
from rdmo.core.views import ObjectPermissionMixin, RedirectViewMixin
from ..forms import IssueMailForm
from ..models import Issue
logger = logging.getLogger(__name__)
class IssueUpdateView(ObjectPermissionMixin, RedirectViewMixin, UpdateView):
model = Issue
queryset = Issue.objects.all()
fields = ('status', )
permission_required = 'projects.change_issue_object'
def get_permission_object(self):
return self.get_object().project
class IssueSendView(ObjectPermissionMixin, RedirectViewMixin, DetailView):
queryset = Issue.objects.all()
permission_required = 'projects.change_issue_object'
template_name = 'projects/issue_send.html'
def get_permission_object(self):
return self.get_object().project
def get_context_data(self, **kwargs):
if 'form' not in kwargs:
kwargs['form'] = IssueMailForm(initial={
'subject': self.object.task.title,
'message': self.object.task.text,
'cc_myself': True
})
context = super().get_context_data(**kwargs)
context['integrations'] = self.get_object().project.integrations.all()
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.status = Issue.ISSUE_STATUS_IN_PROGRESS
self.object.save()
integration_id = request.POST.get('integration')
if integration_id:
# send via integration
integration = self.get_object().project.integrations.get(pk=integration_id)
return integration.provider.send_issue(request, integration.options_dict, self.object)
else:
# send via mail
form = IssueMailForm(request.POST)
if form.is_valid():
from_email = settings.DEFAULT_FROM_EMAIL
to_emails = form.cleaned_data.get('recipients') + form.cleaned_data.get('recipients_input', [])
cc_emails = [request.user.email] if form.cleaned_data.get('cc_myself') else []
reply_to = [request.user.email]
subject = form.cleaned_data.get('subject')
message = form.cleaned_data.get('message')
EmailMessage(subject, message, from_email, to_emails, cc=cc_emails, reply_to=reply_to).send()
return HttpResponseRedirect(self.get_object().project.get_absolute_url())
else:
return self.render_to_response(self.get_context_data(form=form))
| import logging
from django.conf import settings
from django.core.mail import EmailMessage
from django.http import HttpResponseRedirect
from django.views.generic import DetailView, UpdateView
from rdmo.core.views import ObjectPermissionMixin, RedirectViewMixin
from ..forms import IssueMailForm
from ..models import Issue
logger = logging.getLogger(__name__)
class IssueUpdateView(ObjectPermissionMixin, RedirectViewMixin, UpdateView):
model = Issue
queryset = Issue.objects.all()
fields = ('status', )
permission_required = 'projects.change_issue_object'
def get_permission_object(self):
return self.get_object().project
class IssueSendView(ObjectPermissionMixin, RedirectViewMixin, DetailView):
queryset = Issue.objects.all()
permission_required = 'projects.change_issue_object'
template_name = 'projects/issue_send.html'
def get_permission_object(self):
return self.get_object().project
def get_context_data(self, **kwargs):
if 'form' not in kwargs:
kwargs['form'] = IssueMailForm(initial={
'subject': self.object.task.title,
'message': self.object.task.text,
'cc_myself': True
})
context = super().get_context_data(**kwargs)
context['integrations'] = self.get_object().project.integrations.all()
return context
def post(self, request, *args, **kwargs):
self.object = self.get_object()
integration_id = request.POST.get('integration')
if integration_id:
# send via integration
integration = self.get_object().project.integrations.get(pk=integration_id)
return integration.provider.send_issue(request, integration.options_dict, self.object)
else:
# send via mail
form = IssueMailForm(request.POST)
if form.is_valid():
from_email = settings.DEFAULT_FROM_EMAIL
to_emails = form.cleaned_data.get('recipients') + form.cleaned_data.get('recipients_input', [])
cc_emails = [request.user.email] if form.cleaned_data.get('cc_myself') else []
reply_to = [request.user.email]
subject = form.cleaned_data.get('subject')
message = form.cleaned_data.get('message')
EmailMessage(subject, message, from_email, to_emails, cc=cc_emails, reply_to=reply_to).send()
return HttpResponseRedirect(self.get_object().project.get_absolute_url())
else:
return self.render_to_response(self.get_context_data(form=form))
| Python | 0 |
80c0f29d7b81a68d14e261ce1c062d37a0e6d4f7 | Increase ARM process startup time baseline | tests/integration_tests/performance/test_process_startup_time.py | tests/integration_tests/performance/test_process_startup_time.py | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Test that the process startup time up to socket bind is within spec."""
import json
import os
import platform
import time
import host_tools.logging as log_tools
MAX_STARTUP_TIME_CPU_US = {'x86_64': 5500, 'aarch64': 2800}
""" The maximum acceptable startup time in CPU us. """
# TODO: Keep a `current` startup time in S3 and validate we don't regress
def test_startup_time(test_microvm_with_api):
"""Check the startup time for jailer and Firecracker up to socket bind."""
microvm = test_microvm_with_api
microvm.spawn()
microvm.basic_config(vcpu_count=2, mem_size_mib=1024)
# Configure metrics.
metrics_fifo_path = os.path.join(microvm.path, 'metrics_fifo')
metrics_fifo = log_tools.Fifo(metrics_fifo_path)
response = microvm.metrics.put(
metrics_path=microvm.create_jailed_resource(metrics_fifo.path)
)
assert microvm.api_session.is_status_no_content(response.status_code)
microvm.start()
time.sleep(0.4)
# The metrics fifo should be at index 1.
# Since metrics are flushed at InstanceStart, the first line will suffice.
lines = metrics_fifo.sequential_reader(1)
metrics = json.loads(lines[0])
startup_time_us = metrics['api_server']['process_startup_time_us']
cpu_startup_time_us = metrics['api_server']['process_startup_time_cpu_us']
print('Process startup time is: {} us ({} CPU us)'
.format(startup_time_us, cpu_startup_time_us))
assert cpu_startup_time_us > 0
assert cpu_startup_time_us <= MAX_STARTUP_TIME_CPU_US[platform.machine()]
| # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Test that the process startup time up to socket bind is within spec."""
import json
import os
import platform
import time
import host_tools.logging as log_tools
MAX_STARTUP_TIME_CPU_US = {'x86_64': 5500, 'aarch64': 2600}
""" The maximum acceptable startup time in CPU us. """
# TODO: Keep a `current` startup time in S3 and validate we don't regress
def test_startup_time(test_microvm_with_api):
"""Check the startup time for jailer and Firecracker up to socket bind."""
microvm = test_microvm_with_api
microvm.spawn()
microvm.basic_config(vcpu_count=2, mem_size_mib=1024)
# Configure metrics.
metrics_fifo_path = os.path.join(microvm.path, 'metrics_fifo')
metrics_fifo = log_tools.Fifo(metrics_fifo_path)
response = microvm.metrics.put(
metrics_path=microvm.create_jailed_resource(metrics_fifo.path)
)
assert microvm.api_session.is_status_no_content(response.status_code)
microvm.start()
time.sleep(0.4)
# The metrics fifo should be at index 1.
# Since metrics are flushed at InstanceStart, the first line will suffice.
lines = metrics_fifo.sequential_reader(1)
metrics = json.loads(lines[0])
startup_time_us = metrics['api_server']['process_startup_time_us']
cpu_startup_time_us = metrics['api_server']['process_startup_time_cpu_us']
print('Process startup time is: {} us ({} CPU us)'
.format(startup_time_us, cpu_startup_time_us))
assert cpu_startup_time_us > 0
assert cpu_startup_time_us <= MAX_STARTUP_TIME_CPU_US[platform.machine()]
| Python | 0 |
0a80fa2e610637a6c657f99c8eac5d99e33b5844 | Use /usr/bin/env in #! | lines2jsonarray.py | lines2jsonarray.py | #!/usr/bin/env python
from __future__ import print_function
import sys
print('[')
for i, line in enumerate(sys.stdin):
if i != 0:
print(',')
print(line)
print(']')
| #!/usr/bin/python
from __future__ import print_function
import sys
print('[')
for i, line in enumerate(sys.stdin):
if i != 0:
print(',')
print(line)
print(']')
| Python | 0 |
6cae1c77094f91443781f33b04abe96541739599 | hello2 | robo/test/visual_test.py | robo/test/visual_test.py | import GPy
import matplotlib; matplotlib.use('GTKAgg')
import matplotlib.pyplot as plt
import numpy as np
import os
from robo.models.GPyModel import GPyModel
from robo.acquisition.EI import EI
from robo.maximizers.maximize import stochastic_local_search
from robo.recommendation.incumbent import compute_incumbent
from robo.visualization import plotting as plotting
# The optimization function that we want to optimize. It gets a numpy array with shape (N,D) where N >= 1 are the number of datapoints and D are the number of features
def objective_function(x):
return np.sin(3 * x) * 4 * (x - 1) * (x + 2)
def run():
# Defining the bounds and dimensions of the input space
X_lower = np.array([0])
X_upper = np.array([6])
dims = 1
# Set the method that we will use to optimize the acquisition function
maximizer = stochastic_local_search
# Defining the method to model the objective function
kernel = GPy.kern.Matern52(input_dim=dims)
model = GPyModel(kernel, optimize=True, noise_variance=1e-4, num_restarts=10)
# The acquisition function that we optimize in order to pick a new x
acquisition_func = EI(model, X_upper=X_upper, X_lower=X_lower, compute_incumbent=compute_incumbent, par=0.1) # par is the minimum improvement that a point has to obtain
# Draw one random point and evaluate it to initialize BO
X = np.array([np.random.uniform(X_lower, X_upper, dims)])
Y = objective_function(X)
# Fit the model on the data we observed so far
model.train(X, Y)
# Update the acquisition function model with the retrained model
acquisition_func.update(model)
# Optimize the acquisition function to obtain a new point
new_x = maximizer(acquisition_func, X_lower, X_upper)
# Evaluate the point and add the new observation to our set of previous seen points
new_y = objective_function(np.array(new_x))
X = np.append(X, new_x, axis=0)
Y = np.append(Y, new_y, axis=0)
# Visualize the objective function, model and the acquisition function
fig = plt.figure()
#Sub plot for the model and the objective function
ax1 = fig.add_subplot(2,1,1)
#Sub plot for the acquisition function
ax2 = fig.add_subplot(2,1,2)
resolution = 0.1
# Call plot_model function
ax1=plotting.plot_model(model,X_lower,X_upper,ax1,resolution,'b','blue',"Prosterior Mean",3,True)
#Call plot_objective_function
ax1=plotting.plot_objective_function(objective_function,X_lower,X_upper,X,Y,ax1,resolution,'black','ObjectiveFunction',True)
ax1.set_title("Model + Objective Function")
#Call plot_acquisition_function
ax2=plotting.plot_acquisition_function(acquisition_func,X_lower,X_upper,X,ax2,resolution,"AcquisitionFunction",True)
plt.savefig('test2.png')
os.system('eog test2.png&')
| import GPy
import matplotlib; matplotlib.use('GTKAgg')
import matplotlib.pyplot as plt
import numpy as np
import os
from robo.models.GPyModel import GPyModel
from robo.acquisition.EI import EI
from robo.maximizers.maximize import stochastic_local_search
from robo.recommendation.incumbent import compute_incumbent
from robo.visualization import plotting as plotting
# The optimization function that we want to optimize. It gets a numpy array with shape (N,D) where N >= 1 are the number of datapoints and D are the number of features
def objective_function(x):
return np.sin(3 * x) * 4 * (x - 1) * (x + 2)
def run():
# Defining the bounds and dimensions of the input space
X_lower = np.array([0])
X_upper = np.array([6])
dims = 1
# Set the method that we will use to optimize the acquisition function
maximizer = stochastic_local_search
# Defining the method to model the objective function
kernel = GPy.kern.Matern52(input_dim=dims)
model = GPyModel(kernel, optimize=True, noise_variance=1e-4, num_restarts=10)
# The acquisition function that we optimize in order to pick a new x
acquisition_func = EI(model, X_upper=X_upper, X_lower=X_lower, compute_incumbent=compute_incumbent, par=0.1) # par is the minimum improvement that a point has to obtain
# Draw one random point and evaluate it to initialize BO
X = np.array([np.random.uniform(X_lower, X_upper, dims)])
Y = objective_function(X)
# Fit the model on the data we observed so far
model.train(X, Y)
# Update the acquisition function model with the retrained model
acquisition_func.update(model)
# Optimize the acquisition function to obtain a new point
new_x = maximizer(acquisition_func, X_lower, X_upper)
# Evaluate the point and add the new observation to our set of previous seen points
new_y = objective_function(np.array(new_x))
X = np.append(X, new_x, axis=0)
Y = np.append(Y, new_y, axis=0)
# Visualize the objective function, model and the acquisition function
fig = plt.figure()
#Sub plot for the model and the objective function
ax1 = fig.add_subplot(2,1,1)
#Sub plot for the acquisition function
ax2 = fig.add_subplot(2,1,2)
resolution = 0.1
# Call plot_model function
ax1=plotting.plot_model(model,X_lower,X_upper,ax1,resolution,'b','blue',"Prosterior Mean",3,True)
#Call plot_objective_function
ax1=plotting.plot_objective_function(objective_function,X_lower,X_upper,X,Y,ax1,resolution,'black','ObjectiveFunction',True)
ax1.set_title("Model + Objective Function")
#Call plot_acquisition_function
ax2=plotting.plot_acquisition_function(acquisition_func,X_lower,X_upper,X,ax2,resolution,"AcquisitionFunction",True)
plt.savefig('test2.png')
os.system('eog test2.png&')
| Python | 0.999986 |
14884a18b42dab22e5893c619164049a475888cc | fix traceback on bank accounts creation | addons/account_bank_statement_import/wizard/setup_wizards.py | addons/account_bank_statement_import/wizard/setup_wizards.py | # -*- coding: utf-8 -*-
from odoo import models, fields, api
class SetupBarBankConfigWizard(models.TransientModel):
_inherit = 'account.setup.bank.manual.config'
def validate(self):
""" Default the bank statement source of new bank journals as 'file_import'
"""
super(SetupBarBankConfigWizard, self).validate()
if (self.create_or_link_option == 'new' or self.linked_journal_id.bank_statements_source == 'undefined') \
and self.env['account.journal']._get_bank_statements_available_import_formats():
self.linked_journal_id.bank_statements_source = 'file_import'
| # -*- coding: utf-8 -*-
from odoo import models, fields, api
class SetupBarBankConfigWizard(models.TransientModel):
_inherit = 'account.setup.bank.manual.config'
def validate(self):
""" Default the bank statement source of new bank journals as 'file_import'
"""
super(SetupBarBankConfigWizard, self).validate()
if self.create_or_link_option == 'new' or self.linked_journal_id.bank_statements_source == 'undefined' \
and self.env['account.journal']._get_bank_statements_available_import_formats():
self.linked_journal_id.bank_statements_source = 'file_import'
| Python | 0.000001 |
0b707c137aef4c6ad6ddd27b00585388b152666c | fix skitaid shutdown | skitai/skitaid.py | skitai/skitaid.py | #!/usr/bin/python3
# 2014. 12. 9 by Hans Roh hansroh@gmail.com
__version__ = "0.8.8.1"
version_info = tuple (map (lambda x: not x.isdigit () and x or int (x), __version__.split (".")))
import sys
import subprocess
import os
import signal
import time
from aquests.lib import confparse, logger, flock, pathtool
from skitai.server.wastuff import process, daemon
import time
class Service (daemon.Daemon):
BACKOFF_MAX_INTERVAL = 600
CLEAN_SHUTDOWNED = {}
RESTART_QUEUE = {}
DAEMONS = ("smtpda", "cron")
def __init__ (self, cmd, logpath, varpath, verbose):
self.cmd = cmd
self.logpath = logpath
self.varpath = varpath
self.consol = verbose
self.make_logger (False)
self.backoff_start_time = None
self.backoff_interval = 5
self.child = None
def set_backoff (self, reset = False):
if reset:
if self.backoff_start_time is None:
return
else:
self.backoff_start_time = None
self.backoff_interval = 5
return
if self.backoff_start_time is None:
self.backoff_start_time = time.time ()
def shutdown (self):
self.logger ("[info] try to kill %s..." % self.child.name)
self.child.kill ()
for i in range (30):
time.sleep (1)
if self.child.poll () is None:
self.logger ("[info] %s is still alive" % self.child.name)
else:
break
if self.child.poll () is None:
self.logger ("[info] force to kill %s" % self.child.name)
self.child.send_signal ('kill')
def hTERM (self, signum, frame):
self.shutdown ()
def run (self):
if os.name == "nt":
signal.signal(signal.SIGBREAK, self.hTERM)
else:
signal.signal(signal.SIGTERM, self.hTERM)
try:
self.start ()
except KeyboardInterrupt:
pass
except:
self.logger.trace ()
def create (self):
self.child = process.Process (
self.cmd,
'instance',
not self.varpath and (os.name == "posix" and '/var/skitai' or r'c:\var\skitai')
)
def start (self):
self.create ()
try:
while 1:
exitcode = self.child.poll ()
if exitcode is None:
self.set_backoff (True)
continue
if exitcode == 0:
self.logger ("[info] instance has been shutdowned cleanly")
break
elif exitcode == 3:
self.logger ("[info] try re-starting up instance")
self.create ()
else:
self.set_backoff ()
if time.time() - self.backoff_start_time >= self.backoff_interval:
self.logger ("[fail] instance encountered unexpected error and terminated, try re-starting up (current backoff interval is %d)" % self.backoff_interval)
self.backoff_interval = self.backoff_interval * 2
if self.backoff_interval > self.BACKOFF_MAX_INTERVAL:
self.backoff_interval = self.BACKOFF_MAX_INTERVAL
self.create ()
time.sleep (3)
except KeyboardInterrupt:
pass
if __name__ == "__main__":
service = Service ()
service.run ()
| #!/usr/bin/python3
# 2014. 12. 9 by Hans Roh hansroh@gmail.com
__version__ = "0.8.8.1"
version_info = tuple (map (lambda x: not x.isdigit () and x or int (x), __version__.split (".")))
import sys
import subprocess
import os
import signal
import time
from aquests.lib import confparse, logger, flock, pathtool
from skitai.server.wastuff import process, daemon
import time
class Service (daemon.Daemon):
BACKOFF_MAX_INTERVAL = 600
CLEAN_SHUTDOWNED = {}
RESTART_QUEUE = {}
DAEMONS = ("smtpda", "cron")
def __init__ (self, cmd, logpath, varpath, verbose):
self.cmd = cmd
self.logpath = logpath
self.varpath = varpath
self.consol = verbose
self.make_logger (False)
self.backoff_start_time = None
self.backoff_interval = 5
self.child = None
def set_backoff (self, reset = False):
if reset:
if self.backoff_start_time is None:
return
else:
self.backoff_start_time = None
self.backoff_interval = 5
return
if self.backoff_start_time is None:
self.backoff_start_time = time.time ()
def shutdown (self):
self.logger ("[info] try to kill %s..." % self.child.name)
self.child.kill ()
for i in range (30):
time.sleep (1)
if self.child.poll () is None:
self.logger ("[info] %s is still alive" % self.child.name)
else:
break
if self.child.poll () is None:
self.logger ("[info] force to kill %s" % self.child.name)
self.child.send_signal ('kill')
def run (self):
if os.name == "nt":
signal.signal(signal.SIGBREAK, self.shutdown)
else:
signal.signal(signal.SIGTERM, self.shutdown)
try:
try:
self.start ()
except:
self.logger.trace ()
finally:
self.shutdown ()
def create (self):
self.child = process.Process (
self.cmd,
'instance',
not self.varpath and (os.name == "posix" and '/var/skitai' or r'c:\var\skitai')
)
def start (self):
self.create ()
try:
while 1:
exitcode = self.child.poll ()
if exitcode is None:
self.set_backoff (True)
continue
if exitcode == 0:
self.logger ("[info] instance has been shutdowned cleanly")
break
elif exitcode == 3:
self.logger ("[info] try re-starting up instance")
self.create ()
else:
self.set_backoff ()
if time.time() - self.backoff_start_time >= self.backoff_interval:
self.logger ("[fail] instance encountered unexpected error and terminated, try re-starting up (current backoff interval is %d)" % self.backoff_interval)
self.backoff_interval = self.backoff_interval * 2
if self.backoff_interval > self.BACKOFF_MAX_INTERVAL:
self.backoff_interval = self.BACKOFF_MAX_INTERVAL
self.create ()
time.sleep (3)
except KeyboardInterrupt:
pass
if __name__ == "__main__":
service = Service ()
service.run ()
| Python | 0.000015 |
8fbdced7a4c8ea61116e8c978e420c30b8a1f1dc | update for urls.py | slothauth/urls.py | slothauth/urls.py | from django.conf.urls import include, url
from rest_framework.routers import DefaultRouter
from .views import change_email, login, logout, password_reset, profile, signup, passwordless_signup,\
passwordless_login, AccountViewSet, AuthViewSet
from . import settings
router = DefaultRouter()
router.register(r'accounts', AccountViewSet)
router.register(r'accounts/auth', AuthViewSet)
urlpatterns = [
url(r'^api/' + settings.API_VERSION + '/', include(router.urls)), # TODO makes sense to have a settings.API_BASE_URL rather than a settings.API_VERSION?
url(r'^signup/?', signup, name='signup'),
url(r'^login/?', login, name='login'),
url(r'^password_reset/?', password_reset, name='password_reset'),
url(r'^change_email/?', change_email, name='change_email'),
url(r'^profile/?', profile, name='profile'),
url(r'^logout/?', logout, name='logout'),
url(r'^passwordless_signup/?', passwordless_signup, name='passwordless_signup'),
url(r'^passwordless_login/?', passwordless_login, name='passwordless_login'),
#(r'^password-reset-done/$', 'django.contrib.auth.views.password_reset_complete'),
#(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm',
# {'post_reset_redirect' : '/password-reset-done/'}),
]
# TODO create setting for turning on and off debug urls
| from django.conf.urls import include, url
from rest_framework.routers import DefaultRouter
from .views import change_email, login, logout, password_reset, profile, signup, passwordless_signup,\
passwordless_login, AccountViewSet, AuthViewSet
from . import settings
router = DefaultRouter()
router.register(r'accounts', AccountViewSet)
router.register(r'accounts/auth', AuthViewSet)
urlpatterns = [
url(r'^api/' + settings.API_VERSION + '/', include(router.urls)),
url(r'^signup/?', signup, name='signup'),
url(r'^login/?', login, name='login'),
url(r'^password_reset/?', password_reset, name='password_reset'),
url(r'^change_email/?', change_email, name='change_email'),
url(r'^profile/?', profile, name='profile'),
url(r'^logout/?', logout, name='logout'),
url(r'^passwordless_signup/?', passwordless_signup, name='passwordless_signup'),
url(r'^passwordless_login/?', passwordless_login, name='passwordless_login'),
#(r'^password-reset-done/$', 'django.contrib.auth.views.password_reset_complete'),
#(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm',
# {'post_reset_redirect' : '/password-reset-done/'}),
]
| Python | 0 |
ac765968a9a83685c28244200958164d2a0fc81e | fix typo | smartmin/email.py | smartmin/email.py | from django.conf import settings
from django.template import Context
from django.utils.module_loading import import_string
def link_components(request, user=None):
protocol = 'https' if request.is_secure() else 'http'
hostname = getattr(settings, 'HOSTNAME', request.get_host())
return {"protocol": protocol, "hostname": hostname}
def build_email_context(request=None, user=None):
context = Context({'user': user})
processors = []
collect = []
collect.extend(getattr(settings, "EMAIL_CONTEXT_PROCESSORS",
('smartmin.email.link_components',)))
for path in collect:
func = import_string(path)
processors.append(func)
for processor in processors:
context.update(processor(request, user))
return context
| from django.conf import settings
from django.template import Context
from django.utils.module_loading import import_string
def link_components(request, user=None):
protocol = 'https' if request.is_secure() else 'http'
hostname = getattr(settings, 'HOSTNAME', request.get_host())
return {"protocol": protocol, "hostname": hostname}
def build_email_context(request=None, user=None):
context = Context({'user': user})
processors = []
collect = []
collect.extend(getattr(settings, "EMAIL_CONTEXT_PROCESSORS",
('smartmin.emaile.link_components',)))
for path in collect:
func = import_string(path)
processors.append(func)
for processor in processors:
context.update(processor(request, user))
return context
| Python | 0.999991 |
091735fce650d6326e73ca6fb224a77ae68bb601 | Add data written to message | salaryzenaggr/manager.py | salaryzenaggr/manager.py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from stevedore import extension
from salaryzenaggr.formatters import json_formatter
_fetchers = extension.ExtensionManager(namespace='salaryzenaggr.fetchers', invoke_on_load=True)
def _get_fetchers(banks, currencies):
for ext in _fetchers.extensions:
fetcher = ext.obj
if (any([bank in fetcher.get_supported_banks() for bank in banks]) and
any([curr in fetcher.get_supported_currencies() for curr in currencies])):
yield fetcher
def aggregate_rates(banks, currencies, from_date, result_file, debug):
res = {}
for fetcher in _get_fetchers(banks, currencies):
fetcher.fetch_data(res, currencies, from_date)
formatter = json_formatter.JsonPrettyFormatter if debug else json_formatter.JsonFormatter
output = formatter().format_data(res)
if debug:
print output
print "New data aggregated at %s UTC" % datetime.utcnow()
if result_file:
result_file.write(output)
result_file.close()
print "Data successfully written to %s" % result_file
| # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from stevedore import extension
from salaryzenaggr.formatters import json_formatter
_fetchers = extension.ExtensionManager(namespace='salaryzenaggr.fetchers', invoke_on_load=True)
def _get_fetchers(banks, currencies):
for ext in _fetchers.extensions:
fetcher = ext.obj
if (any([bank in fetcher.get_supported_banks() for bank in banks]) and
any([curr in fetcher.get_supported_currencies() for curr in currencies])):
yield fetcher
def aggregate_rates(banks, currencies, from_date, result_file, debug):
res = {}
for fetcher in _get_fetchers(banks, currencies):
fetcher.fetch_data(res, currencies, from_date)
formatter = json_formatter.JsonPrettyFormatter if debug else json_formatter.JsonFormatter
output = formatter().format_data(res)
if debug:
print output
print "New data aggregated at %s UTC" % datetime.utcnow()
if result_file:
result_file.write(output)
result_file.close()
| Python | 0.000007 |
1dd8ce20632d8a6b857a06136b89ac11b27c8f07 | Update documentation | letters/models.py | letters/models.py | from django.db import models
from common.models import Citation
class Letter(models.Model):
"""A letter from one of Pliny's books of personal correspondence
Attributes:
book (PositiveSmallIntegerField): book number.
manuscript_correspondent_name (CharField): override if manuscript
correspondent as written differs from database name
letter (PositiveSmallIntegerField): letter number
date (PositiveSmallIntegerField): Year of letter if known.
citations (ManyToManyField): Citations related to the letter
"""
book = models.PositiveSmallIntegerField()
manuscript_correspondent_name = models.CharField(blank=True, max_length=255)
letter = models.PositiveSmallIntegerField()
topics = models.ManyToManyField('Topic', blank=True)
date = models.PositiveSmallIntegerField(blank=True, null=True)
citations = models.ManyToManyField(Citation, blank=True)
class Meta:
unique_together = ('book', 'letter')
ordering = ['book', 'letter']
def __str__(self):
return "%s.%s" % (self.book, self.letter)
class Topic(models.Model):
"""A topic for one of Pliny's letters"""
name = models.CharField(max_length=255)
def __str__(self):
return self.name
| from django.db import models
from common.models import Citation
class Letter(models.Model):
"""A letter from one of Pliny's books of personal correspondence"""
book = models.PositiveSmallIntegerField()
manuscript_correspondent_name = models.CharField(blank=True, max_length=255)
letter = models.PositiveSmallIntegerField()
topics = models.ManyToManyField('Topic', blank=True)
date = models.PositiveSmallIntegerField(blank=True, null=True)
citations = models.ManyToManyField(Citation, blank=True)
class Meta:
unique_together = ('book', 'letter')
ordering = ['book', 'letter']
def __str__(self):
return "%s.%s" % (self.book, self.letter)
class Topic(models.Model):
"""A topic for one of Pliny's letters"""
name = models.CharField(max_length=255)
def __str__(self):
return self.name
| Python | 0 |
e7f5efafbdbd674adfb60bbadac6665860dd23a0 | Update __main__.py | snake/__main__.py | snake/__main__.py | import argparse
import sys
from .assembler import Assembler
from .vm import System
def assembler():
parser = argparse.ArgumentParser(description='A 2 pass assembler.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Take action depending on whether or not this is being pipelined
if sys.stdin.isatty():
parser.add_argument("file", help="file to be assembled.")
parser.add_argument('-o','--outfile', help='output file',
default=None, required=False)
args = parser.parse_args()
try:
with open(args.file, 'r') as f:
a = Assembler(f)
a.assemble()
output_records = a.generated_records
except IOError:
print("[IO Error]: The source file could not be opened.")
else:
try:
if args.outfile is None:
for record in output_records:
print(record)
else:
with open(args.outfile, 'w') as w:
for record in output_records:
w.write(record)
w.write('\n')
except IOError:
print("[IO Error]: The output file could not be opened.")
else:
a = Assembler(sys.stdin)
try:
a.assemble()
output_records = a.generated_records
except StopIteration:
print("[IO Error]: The source program could not be read from stdin")
else:
for record in output_records:
print(record)
def vm():
parser = argparse.ArgumentParser(description='A simple vm.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Take action depending on whether or not this is being pipelined
if sys.stdin.isatty():
parser.add_argument("file", help="file to be loaded.")
parser.add_argument('-o','--outfile', help='output file',
default=None, required=False)
parser.add_argument('--step', dest='step',
help='step through each instruction cycle.',
action='store_true')
parser.set_defaults(step=False)
args = parser.parse_args()
try:
system = System()
with open(args.file, 'r') as f:
system.load_file(f)
system.step = args.step
system.run()
except IOError:
print("[IO Error]: The source file could not be opened.")
except:
print "IR: %s\nPC: %s\nOutput: %s\n" % \
(system.ir, system.pc, system.format_output())
raise
else:
try:
system = System()
system.load_file(sys.stdin)
system.run()
except StopIteration:
print("[IO Error]: The source program could not be read from stdin")
except:
print "IR: %s\nPC: %s\nOutput: %s\n" % \
(system.ir, system.pc, system.format_output())
raise
if __name__ == '__main__':
assembler()
| import argparse
import sys
from .assembler import Assembler
from .vm import System
def assembler():
parser = argparse.ArgumentParser(description='A 2 pass assembler.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Take action depending on whether or not this is being pipelined
if sys.stdin.isatty():
parser.add_argument("file", help="file to be assembled.")
parser.add_argument('-o','--outfile', help='output file',
default=None, required=False)
args = parser.parse_args()
try:
with open(args.file, 'r') as f:
a = Assembler(f)
a.assemble()
output_records = a.generated_records
except IOError:
print("[IO Error]: The source file could not be opened.")
else:
try:
if args.outfile is None:
for record in output_records:
print(record)
else:
with open(args.outfile, 'w') as w:
for record in output_records:
w.write(record)
w.write('\n')
except IOError:
print("[IO Error]: The output file could not be opened.")
else:
a = Assembler(sys.stdin)
try:
a.assemble()
output_records = a.generated_records
except StopIteration:
print("[IO Error]: The source program could not be read from stdin")
else:
for record in output_records:
print(record)
def vm():
parser = argparse.ArgumentParser(description='A simple vm.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# Take action depending on whether or not this is being pipelined
if sys.stdin.isatty():
parser.add_argument("file", help="file to be assembled.")
parser.add_argument('-o','--outfile', help='output file',
default=None, required=False)
parser.add_argument('--step', dest='step',
help='step through each instruction cycle.',
action='store_true')
parser.set_defaults(step=False)
args = parser.parse_args()
try:
system = System()
with open(args.file, 'r') as f:
system.load_file(f)
system.step = args.step
system.run()
except IOError:
print("[IO Error]: The source file could not be opened.")
except:
print "IR: %s\nPC: %s\nOutput: %s\n" % \
(system.ir, system.pc, system.format_output())
raise
else:
try:
system = System()
system.load_file(sys.stdin)
system.run()
except StopIteration:
print("[IO Error]: The source program could not be read from stdin")
except:
print "IR: %s\nPC: %s\nOutput: %s\n" % \
(system.ir, system.pc, system.format_output())
raise
if __name__ == '__main__':
assembler()
| Python | 0.000063 |
c1be270c96ef07faa7ceecf4117890ce06af65a8 | Update setup.py | wrappers/python/setup.py | wrappers/python/setup.py | from setuptools import setup, find_packages
from setuptools.dist import Distribution
# _version.py should be generated by running find_librs_version.py and copied to pyrealsense2 folder
from pyrealsense2._version import __version__
import os
import io
package_name = "pyrealsense2"
package_data = {}
print("version = ", __version__)
def load_readme():
with io.open('README.rst', encoding="utf-8") as f:
return f.read()
if os.name == 'posix':
package_data[package_name] = ['*.so']
else:
package_data[package_name] = ['*.pyd', '*.dll']
# This creates a list which is empty but returns a length of 1.
# Should make the wheel a binary distribution and platlib compliant.
class EmptyListWithLength(list):
def __len__(self):
return 1
setup(
name=package_name,
version=__version__,
author='Intel(R) RealSense(TM)',
author_email='realsense@intel.com',
url='https://github.com/IntelRealSense/librealsense',
scripts=['examples/align-depth2color.py',
'examples/export_ply_example.py',
'examples/opencv_viewer_example.py',
'examples/python-rs400-advanced-mode-example.py',
'examples/python-tutorial-1-depth.py'
],
license='Apache License, Version 2.0',
description='Python Wrapper for Intel Realsense SDK 2.0.',
long_description=load_readme(),
install_requires=[],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Multimedia :: Video',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
packages=find_packages(exclude=['third_party', 'docs', 'examples']),
include_package_data=True,
ext_modules=EmptyListWithLength(),
package_data=package_data
)
| from setuptools import setup, find_packages
from setuptools.dist import Distribution
# _version.py should be generated by running find_librs_version.py and copied to pyrealsense2 folder
from pyrealsense2._version import __version__
import os
import io
package_name = "pyrealsense2"
package_data = {}
print("version = ", __version__)
def load_readme():
with io.open('README.rst', encoding="utf-8") as f:
return f.read()
if os.name == 'posix':
package_data[package_name] = ['*.so']
else:
package_data[package_name] = ['*.pyd', '*.dll']
# This creates a list which is empty but returns a length of 1.
# Should make the wheel a binary distribution and platlib compliant.
class EmptyListWithLength(list):
def __len__(self):
return 1
setup(
name=package_name,
version=__version__,
author='Intel(R) RealSense(TM)',
author_email='realsense@intel.com',
url='https://github.com/IntelRealSense/librealsense',
scripts=['examples/align-depth2color.py',
'examples/export_ply_example.py',
'examples/opencv_viewer_example.py',
'examples/python-rs400-advanced-mode-example.py',
'examples/python-tutorial-1-depth.py'
],
license='Apache License, Version 2.0',
description='Python Wrapper for Intel Realsense SDK 2.0.',
long_description=load_readme(),
install_requires=[],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Multimedia :: Video',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
'Topic :: Scientific/Engineering :: Image Recognition',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
packages=find_packages(exclude=['third_party', 'docs', 'examples']),
include_package_data=True,
ext_modules=EmptyListWithLength(),
package_data=package_data
)
| Python | 0.000001 |
26af472f187de01f6e0d8d609f2d857e6557c984 | allow case insensitive menu in top url only | mapannotations/urls.py | mapannotations/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>,
#
# Version: 1.0
from mapannotations import views
from django.conf.urls import url, patterns
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
from django.views.generic import RedirectView
from django.views.decorators.cache import never_cache
from map_settings import map_settings
reverse_lazy = lazy(reverse, str)
# concatenate aliases to use in url regex
MENU_MAPPER_REGEX = "(%s)" % ("|".join(map_settings.MENU_MAPPER))
DEFAULT_MENU = map_settings.MENU_MAPPER.iterkeys().next()
urlpatterns = patterns('',)
# alias
for m in map_settings.MENU_MAPPER:
urlpatterns += (
url(r'^(?i)%s/$' % m, views.index,
{'menu': m},
name="mapindex_%s" % m),
)
urlpatterns += (
# core
url(r'^$', never_cache(
RedirectView.as_view(
url=reverse_lazy('mapindex_%s' % DEFAULT_MENU),
permanent=True,
query_string=True)),
name="mapindex"),
url(r'^api/experimenters/(?P<menu>%s)/'
r'(?P<experimenter_id>([-1]|[0-9])+)/$' % MENU_MAPPER_REGEX,
views.api_experimenter_detail,
name='mapannotations_api_experimenter'),
url(r'^api/mapannotations/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_mapannotation_list,
name='mapannotations_api_mapannotations'),
url(r'^api/plates/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_plate_list,
name='mapannotations_api_plates'),
url(r'^api/images/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_image_list,
name='mapannotations_api_images'),
url(r'^api/paths_to_object/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_paths_to_object,
name='mapannotations_api_paths_to_object'),
# TODO: c_id takes namedValue.name as an attribute, make sure regex match
url(r'^metadata_details/(?P<c_type>%s)/'
r'(?P<c_id>(.*))/$' % MENU_MAPPER_REGEX,
views.load_metadata_details,
name="load_metadata_details"),
# autocomplete
url(r'^autocomplete/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.mapannotations_autocomplete,
name='mapannotations_autocomplete'),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>,
#
# Version: 1.0
from mapannotations import views
from django.conf.urls import url, patterns
from django.core.urlresolvers import reverse
from django.utils.functional import lazy
from django.views.generic import RedirectView
from django.views.decorators.cache import never_cache
from map_settings import map_settings
reverse_lazy = lazy(reverse, str)
# concatenate aliases to use in url regex
MENU_MAPPER_REGEX = "(%s)" % ("|".join(map_settings.MENU_MAPPER))
DEFAULT_MENU = map_settings.MENU_MAPPER.iterkeys().next()
urlpatterns = patterns('',)
# alias
for m in map_settings.MENU_MAPPER:
urlpatterns += (
url(r'^%s/$' % m, views.index,
{'menu': m},
name="mapindex_%s" % m),
)
urlpatterns += (
# core
url(r'^$', never_cache(
RedirectView.as_view(
url=reverse_lazy('mapindex_%s' % DEFAULT_MENU),
permanent=True,
query_string=True)),
name="mapindex"),
url(r'^api/experimenters/(?P<menu>%s)/'
r'(?P<experimenter_id>([-1]|[0-9])+)/$' % MENU_MAPPER_REGEX,
views.api_experimenter_detail,
name='mapannotations_api_experimenter'),
url(r'^api/mapannotations/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_mapannotation_list,
name='mapannotations_api_mapannotations'),
url(r'^api/plates/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_plate_list,
name='mapannotations_api_plates'),
url(r'^api/images/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_image_list,
name='mapannotations_api_images'),
url(r'^api/paths_to_object/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.api_paths_to_object,
name='mapannotations_api_paths_to_object'),
# TODO: c_id takes namedValue.name as an attribute, make sure regex match
url(r'^metadata_details/(?P<c_type>%s)/'
r'(?P<c_id>(.*))/$' % MENU_MAPPER_REGEX,
views.load_metadata_details,
name="load_metadata_details"),
# autocomplete
url(r'^autocomplete/(?P<menu>%s)/$' % MENU_MAPPER_REGEX,
views.mapannotations_autocomplete,
name='mapannotations_autocomplete'),
)
| Python | 0 |
c56466243a582acde695ebb2a75d709a4b7672b3 | Reduce lines | Solver/Beginner/WhiteFaceSolver.py | Solver/Beginner/WhiteFaceSolver.py | from .. import Solver
from Move import Move
class WhiteFaceSolver(Solver):
def solution(self):
solution = []
# There are 4 down-corners
for i in range(4):
front_color = self.cube.cubies['F'].facings['F']
right_color = self.cube.cubies['R'].facings['R']
goal_cubie = self.cube.search_by_colors('W', front_color, right_color)
step_solution = []
goal_cubie_obj = self.cube.cubies[goal_cubie]
if goal_cubie == 'DFR':
if goal_cubie_obj.color_facing('W') == 'F':
step_solution.extend(["R", "U'", "R'"])
elif goal_cubie_obj.color_facing('W') == 'R':
step_solution.extend(["R", "U", "R'", "U'"])
elif goal_cubie == 'DFL':
if goal_cubie_obj.color_facing('W') == 'F':
step_solution.extend(["L'", "U", "L", "U'"])
elif goal_cubie_obj.color_facing('W') in ['L', 'D']:
step_solution.extend(["L'", "U'", "L"])
elif goal_cubie == 'BDL':
if goal_cubie_obj.color_facing('W') in ['B', 'D']:
step_solution.extend(["B'", "U2", "B"])
elif goal_cubie_obj.color_facing('W') == 'L':
step_solution.extend(["B'", "U", "B", "U2"])
elif goal_cubie == 'BDR':
if goal_cubie_obj.color_facing('W') in ['B', 'D']:
step_solution.extend(["B", "U", "B'"])
elif goal_cubie_obj.color_facing('W') == 'R':
step_solution.extend(["B", "U'", "B'", "U"])
else:
# Cubie is in upper face, place it on FRU
if goal_cubie == 'BRU':
step_solution.append("U")
elif goal_cubie == 'BLU':
step_solution.append("U2")
elif goal_cubie == 'FLU':
step_solution.append("U'")
# else is already at FRU
for m in step_solution:
self.cube.move(Move(m))
# Cubie is at FRU, place it at DRU with correct orientation
solution.extend(step_solution)
step_solution = []
if self.cube.cubies['FRU'].color_facing('W') == 'F':
step_solution.extend(["F'", "U'", "F"])
elif self.cube.cubies['FRU'].color_facing('W') == 'R':
step_solution.extend(["R", "U", "R'"])
elif self.cube.cubies['FRU'].color_facing('W') == 'U':
step_solution.extend(["R", "U2", "R'", "U'", "R", "U", "R"])
for m in step_solution:
self.cube.move(Move(m))
solution.extend(step_solution)
# Cubie is placed, move to next
solution.append('Y')
self.cube.move(Move('Y'))
return solution | from .. import Solver
from Move import Move
class WhiteFaceSolver(Solver):
def solution(self):
solution = []
# There are 4 down-corners
for i in range(4):
front_color = self.cube.cubies['F'].facings['F']
right_color = self.cube.cubies['R'].facings['R']
goal_cubie = self.cube.search_by_colors('W', front_color, right_color)
step_solution = []
goal_cubie_obj = self.cube.cubies[goal_cubie]
if goal_cubie == 'DFR':
if goal_cubie_obj.color_facing('W') == 'F':
step_solution.append("R")
step_solution.append("U'")
step_solution.append("R'")
elif goal_cubie_obj.color_facing('W') == 'R':
step_solution.append("R")
step_solution.append("U")
step_solution.append("R'")
step_solution.append("U'")
elif goal_cubie == 'DFL':
if goal_cubie_obj.color_facing('W') == 'F':
step_solution.append("L'")
step_solution.append("U")
step_solution.append("L")
step_solution.append("U'")
elif goal_cubie_obj.color_facing('W') in ['L', 'D']:
step_solution.append("L'")
step_solution.append("U'")
step_solution.append("L")
elif goal_cubie == 'BDL':
if goal_cubie_obj.color_facing('W') in ['B', 'D']:
step_solution.append("B'")
step_solution.append("U2")
step_solution.append("B")
elif goal_cubie_obj.color_facing('W') == 'L':
step_solution.append("B'")
step_solution.append("U")
step_solution.append("B")
step_solution.append("U2")
elif goal_cubie == 'BDR':
if goal_cubie_obj.color_facing('W') in ['B', 'D']:
step_solution.append("B")
step_solution.append("U")
step_solution.append("B'")
elif goal_cubie_obj.color_facing('W') == 'R':
step_solution.append("B")
step_solution.append("U'")
step_solution.append("B'")
step_solution.append("U")
else:
# Cubie is in upper face, place it on FRU
if goal_cubie == 'BRU':
step_solution.append("U")
elif goal_cubie == 'BLU':
step_solution.append("U2")
elif goal_cubie == 'FLU':
step_solution.append("U'")
# else is already at FRU
for m in step_solution:
self.cube.move(Move(m))
# Cubie is at FRU, place it at DRU with correct orientation
solution.extend(step_solution)
step_solution = []
if self.cube.cubies['FRU'].color_facing('W') == 'F':
step_solution.append("F'")
step_solution.append("U'")
step_solution.append("F")
elif self.cube.cubies['FRU'].color_facing('W') == 'R':
step_solution.append("R")
step_solution.append("U")
step_solution.append("R'")
elif self.cube.cubies['FRU'].color_facing('W') == 'U':
step_solution.append("R")
step_solution.append("U2")
step_solution.append("R'")
step_solution.append("U'")
step_solution.append("R")
step_solution.append("U")
step_solution.append("R'")
for m in step_solution:
self.cube.move(Move(m))
solution.extend(step_solution)
# Cubie is placed, move to next
solution.append('Y')
self.cube.move(Move('Y'))
return solution | Python | 0.999865 |
60ccc393fc428d609e772b838c5a4c4002e3ec49 | Add sendError method | snp/SNProtocol.py | snp/SNProtocol.py | from twisted.internet import defer
from twisted.protocols.basic import NetstringReceiver
import json
class SNError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, args, kwargs)
self.code = args[1]
self.request = args[2]
class SNProtocol(NetstringReceiver):
id_counter = 0
def stringReceived(self, string):
packet = json.loads(string)
if "reqid" in packet:
if len(packet["reqid"]) > 2:
type = packet["reqid"][:2]
reqid = packet["reqid"][2:]
if type == "RQ":
self.factory.service.hadleRequest(packet, reqid, self)
elif type == "RE":
if reqid in self.requests:
self.factory.requests[reqid].callback(packet)
self.factory.requests.pop(reqid)
def sendRequest(self, request):
reqid = str(self.id_counter)
request["reqid"] = "RQ{0}".format(reqid)
self._sendPacket(request)
d = self.createDeferred(reqid)
self.id_counter += 1
return d
def sendResponse(self, request, reqid):
request["reqid"] = "RE{0}".format(str(reqid))
self._sendPacket(request)
def sendError(self, code, request):
r = {"Error": code, "Request": request}
self._sendPacket(r)
def _sendPacket(self, request):
json_str = json.dumps(request)
self.sendString(json_str)
def connectionMade(self):
self.factory.service.connectionMade(self)
def createDeferred(self, reqid):
d = defer.Deferred()
d.addCallback(self.errorChecker)
self.factory.service.requests[reqid] = d
return d
def errorChecker(self, packet):
if "Error" in packet:
raise SNError("", int(packet["Error"]), packet["Request"])
return packet
| from twisted.internet import defer
from twisted.protocols.basic import NetstringReceiver
import json
class SNError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, args, kwargs)
self.code = args[1]
self.request = args[2]
class SNProtocol(NetstringReceiver):
id_counter = 0
def stringReceived(self, string):
packet = json.loads(string)
if "reqid" in packet:
if len(packet["reqid"]) > 2:
type = packet["reqid"][:2]
reqid = packet["reqid"][2:]
if type == "RQ":
self.factory.service.hadleRequest(packet, reqid, self)
elif type == "RE":
if reqid in self.requests:
self.factory.requests[reqid].callback(packet)
self.factory.requests.pop(reqid)
def sendRequest(self, request):
reqid = str(self.id_counter)
request["reqid"] = "RQ{0}".format(reqid)
self._sendPacket(request)
d = self.createDeferred(reqid)
self.id_counter += 1
return d
def sendResponse(self, request, reqid):
request["reqid"] = "RE{0}".format(str(reqid))
self._sendPacket(request)
def _sendPacket(self, request):
json_str = json.dumps(request)
self.sendString(json_str)
def connectionMade(self):
self.factory.service.connectionMade(self)
def createDeferred(self, reqid):
d = defer.Deferred()
d.addCallback(self.errorChecker)
self.factory.service.requests[reqid] = d
return d
def errorChecker(self, packet):
if "Error" in packet:
raise SNError("", int(packet["Error"]), packet["Request"])
return packet
| Python | 0.000001 |
b223865ded88b5467c1088abbf628048e39e564c | Test str_cat() schema; use fixtures for exception tests | blaze/expr/tests/test_strings.py | blaze/expr/tests/test_strings.py | import pytest
from datashape import dshape
from blaze import symbol
dshapes = ['var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string']
lhsrhs_ds = ['var * {name: string, comment: string[25]}',
'var * {name: string[10], comment: string}',
'var * {name: string, comment: string}',
'var * {name: ?string, comment: string}',
'var * {name: string, comment: ?string}']
@pytest.fixture(scope='module')
def strcat_sym():
'''
blaze symbol used to test exceptions raised by str_cat()
'''
ds = dshape('3 * {name: string, comment: string, num: int32}')
s = symbol('s', dshape=ds)
return s
@pytest.mark.parametrize('ds', dshapes)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
@pytest.mark.parametrize('ds', dshapes)
def test_str_upper_schema(ds):
t = symbol('t', ds)
expr_upper = getattr(t, 'name', t).str_upper()
expr_lower = getattr(t, 'name', t).str_upper()
assert (expr_upper.schema.measure ==
expr_lower.schema.measure ==
dshape('%sstring' % ('?' if '?' in ds else '')).measure)
@pytest.mark.parametrize('ds', lhsrhs_ds)
def test_str_schema(ds):
t = symbol('t', ds)
expr = t.name.str_cat(t.comment)
assert (expr.schema.measure ==
dshape('%sstring' % ('?' if '?' in ds else '')).measure)
def test_str_cat_exception_non_string_sep(strcat_sym):
with pytest.raises(TypeError):
strcat_sym.name.str_cat(strcat_sym.comment, sep=123)
def test_str_cat_exception_non_string_col_to_cat(strcat_sym):
with pytest.raises(TypeError):
strcat_sym.name.str_cat(strcat_sym.num)
| import pytest
from datashape import dshape
from blaze import symbol
dshapes = ['var * {name: string}',
'var * {name: ?string}',
'var * string',
'var * ?string',
'string']
@pytest.mark.parametrize('ds', dshapes)
def test_like(ds):
t = symbol('t', ds)
expr = getattr(t, 'name', t).like('Alice*')
assert expr.pattern == 'Alice*'
assert expr.schema.measure == dshape(
'%sbool' % ('?' if '?' in ds else '')
).measure
@pytest.mark.parametrize('ds', dshapes)
def test_str_upper_schema(ds):
t = symbol('t', ds)
expr_upper = getattr(t, 'name', t).str_upper()
expr_lower = getattr(t, 'name', t).str_upper()
assert (expr_upper.schema.measure ==
expr_lower.schema.measure ==
dshape('%sstring' % ('?' if '?' in ds else '')).measure)
class TestStrCatExceptions():
ds = dshape('3 * {name: string[10], comment: string[25], num: int32}')
s = symbol('s', dshape=ds)
def test_str_cat_exception_non_string_sep(self):
with pytest.raises(TypeError):
self.s.name.str_cat(self.s.comment, sep=123)
def test_str_cat_exception_non_string_col_to_cat(self):
with pytest.raises(TypeError):
self.s.name.str_cat(self.s.num)
| Python | 0 |
09fffb062b45e4715c092c0899a6d4f89cf0b4e1 | Fix toolbarbox test | bokeh/models/tests/test_tools.py | bokeh/models/tests/test_tools.py | from __future__ import absolute_import
from bokeh.models.layouts import Box
from bokeh.models.tools import Toolbar, ToolbarBox
# TODO (bev) validate entire list of props
def test_Toolbar():
tb = Toolbar()
assert tb.active_drag == 'auto'
assert tb.active_scroll == 'auto'
assert tb.active_tap == 'auto'
#
# ToolbarBox
#
def test_toolbar_box_is_instance_of_box():
tb_box = ToolbarBox()
assert isinstance(tb_box, Box)
def test_toolbar_box_properties():
tb_box = ToolbarBox()
assert tb_box.logo == "normal"
assert tb_box.toolbar_location == "right"
assert tb_box.tools == []
assert tb_box.merge_tools is True
| from __future__ import absolute_import
from bokeh.models.layouts import Box
from bokeh.models.tools import Toolbar, ToolbarBox
# TODO (bev) validate entire list of props
def test_Toolbar():
tb = Toolbar()
assert tb.active_drag == 'auto'
assert tb.active_scroll == 'auto'
assert tb.active_tap == 'auto'
#
# ToolbarBox
#
def test_toolbar_box_is_instance_of_box():
tb_box = ToolbarBox()
assert isinstance(tb_box, Box)
def test_toolbar_box_properties():
tb_box = ToolbarBox()
assert tb_box.logo == "normal"
assert tb_box.toolbar_location == "right"
assert tb_box.tools is None
assert tb_box.merge_tools is True
| Python | 0.000001 |
7daf7440b271c923e4a210a47ba4ba87d76181a3 | Add more assertions to rendering test | communication/tests.py | communication/tests.py | from communication.mail import SUBJECT, TEXT, HTML, send_email, render_blocks
from mock import Mock, patch, sentinel
from unittest2 import TestCase
from django.template.loader_tags import BlockNode
class SendEmailTestCase(TestCase):
def setUp(self):
patcher = patch('communication.mail.settings')
self.settings_mock = patcher.start()
patcher = patch('communication.mail.render_blocks')
self.render_mock = patcher.start()
patcher = patch('communication.mail.EmailMultiAlternatives')
self.email_mock = patcher.start()
self.settings_mock.DEFAULT_FROM_EMAIL = sentinel.from_email
def test_sending_email_without_html(self):
"""Html content is not attached when html block is missing"""
self.render_mock.return_value = {SUBJECT: sentinel.subject,
TEXT: sentinel.text}
send_email(address=sentinel.address,
template_name=sentinel.template_name,
context=sentinel.context)
self.assert_email_constructed()
self.email_mock().send.assert_called_once()
def test_sending_email_with_html(self):
"""Html content is attached when html block present"""
self.render_mock.return_value = {SUBJECT: sentinel.subject,
TEXT: sentinel.text,
HTML: sentinel.html}
send_email(address=sentinel.address,
template_name=sentinel.template_name,
context=sentinel.context)
self.assert_email_constructed()
self.email_mock().attach_alternative.assert_called_once_with(
sentinel.html, 'text/html')
self.email_mock().send.assert_called_once()
def assert_email_constructed(self):
self.email_mock.assert_called_once_with(
subject=sentinel.subject,
body=sentinel.text,
from_email=sentinel.from_email,
to=[sentinel.address])
def tearDown(self):
patch.stopall()
class RenderBlocksTestCase(TestCase):
@patch('communication.mail.get_template')
@patch('communication.mail.Context')
def test_block_rendering(self, context_mock, get_template_mock):
"""Template blocks are rendered with proper context"""
html_block = Mock(spec=BlockNode)
html_block.name = HTML
some_block = Mock(spec=BlockNode)
some_block.name = 'some_block'
non_block = Mock()
get_template_mock.return_value = [html_block, some_block, non_block]
blocks = render_blocks(template_name=sentinel.template_name,
context=sentinel.context)
context_mock.assert_called_once_with(sentinel.context)
html_block.render.assert_called_once_with(context_mock())
some_block.render.assert_not_called()
non_block.render.assert_not_called()
self.assertEquals(blocks, {HTML: html_block.render()})
| from communication.mail import SUBJECT, TEXT, HTML, send_email, render_blocks
from mock import Mock, patch, sentinel
from unittest2 import TestCase
from django.template.loader_tags import BlockNode
class SendEmailTestCase(TestCase):
def setUp(self):
patcher = patch('communication.mail.settings')
self.settings_mock = patcher.start()
patcher = patch('communication.mail.render_blocks')
self.render_mock = patcher.start()
patcher = patch('communication.mail.EmailMultiAlternatives')
self.email_mock = patcher.start()
self.settings_mock.DEFAULT_FROM_EMAIL = sentinel.from_email
def test_sending_email_without_html(self):
"""Html content is not attached when html block is missing"""
self.render_mock.return_value = {SUBJECT: sentinel.subject,
TEXT: sentinel.text}
send_email(address=sentinel.address,
template_name=sentinel.template_name,
context=sentinel.context)
self.assert_email_constructed()
self.email_mock().send.assert_called_once()
def test_sending_email_with_html(self):
"""Html content is attached when html block present"""
self.render_mock.return_value = {SUBJECT: sentinel.subject,
TEXT: sentinel.text,
HTML: sentinel.html}
send_email(address=sentinel.address,
template_name=sentinel.template_name,
context=sentinel.context)
self.assert_email_constructed()
self.email_mock().attach_alternative.assert_called_once_with(
sentinel.html, 'text/html')
self.email_mock().send.assert_called_once()
def assert_email_constructed(self):
self.email_mock.assert_called_once_with(
subject=sentinel.subject,
body=sentinel.text,
from_email=sentinel.from_email,
to=[sentinel.address])
def tearDown(self):
patch.stopall()
class RenderBlocksTestCase(TestCase):
@patch('communication.mail.get_template')
@patch('communication.mail.Context')
def test_block_rendering(self, context_mock, get_template_mock):
"""Template blocks are rendered with proper context"""
html_block = Mock(spec=BlockNode)
html_block.name = HTML
some_block = Mock(spec=BlockNode)
some_block.name = 'some_block'
non_block = Mock()
get_template_mock.return_value = [html_block, some_block, non_block]
blocks = render_blocks(template_name=sentinel.template_name,
context=sentinel.context)
self.assertEquals(blocks, {HTML: html_block.render()})
context_mock.assert_called_once_with(sentinel.context)
| Python | 0 |
32720b2cc4e2599a57ddf81ec1d9c334d71b29f1 | Add Schedule to the system job migration | awx/main/migrations/0010_v300_create_system_job_templates.py | awx/main/migrations/0010_v300_create_system_job_templates.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.utils.timezone import now
from awx.api.license import feature_enabled
def create_system_job_templates(apps, schema_editor):
'''
Create default system job templates if not present. Create default schedules
only if new system job templates were created (i.e. new database).
'''
SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate')
Schedule = apps.get_model('main', 'Schedule')
ContentType = apps.get_model('contenttypes', 'ContentType')
sjt_ct = ContentType.objects.get_for_model(SystemJobTemplate)
now_dt = now()
now_str = now_dt.strftime('%Y%m%dT%H%M%SZ')
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_jobs',
defaults=dict(
name='Cleanup Job Details',
description='Remove job history older than X days',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created:
sjt.schedules.create(
name='Cleanup Job Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'days': '120'},
created=now_dt,
modified=now_dt,
)
existing_cd_jobs = SystemJobTemplate.objects.filter(job_type='cleanup_deleted')
Schedule.objects.filter(unified_job_template__in=existing_cd_jobs).delete()
existing_cd_jobs.delete()
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_activitystream',
defaults=dict(
name='Cleanup Activity Stream',
description='Remove activity stream history older than X days',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created:
sjt.schedules.create(
name='Cleanup Activity Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=TU' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'days': '355'},
created=now_dt,
modified=now_dt,
)
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_facts',
defaults=dict(
name='Cleanup Fact Details',
description='Remove system tracking history',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created and feature_enabled('system_tracking', bypass_database=True):
sjt.schedules.create(
name='Cleanup Fact Schedule',
rrule='DTSTART:%s RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=1' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'older_than': '120d', 'granularity': '1w'},
created=now_dt,
modified=now_dt,
)
class Migration(migrations.Migration):
dependencies = [
('main', '0009_v300_rbac_migrations'),
]
operations = [
migrations.RunPython(create_system_job_templates, migrations.RunPython.noop),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.utils.timezone import now
from awx.api.license import feature_enabled
def create_system_job_templates(apps, schema_editor):
'''
Create default system job templates if not present. Create default schedules
only if new system job templates were created (i.e. new database).
'''
SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate')
ContentType = apps.get_model('contenttypes', 'ContentType')
sjt_ct = ContentType.objects.get_for_model(SystemJobTemplate)
now_dt = now()
now_str = now_dt.strftime('%Y%m%dT%H%M%SZ')
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_jobs',
defaults=dict(
name='Cleanup Job Details',
description='Remove job history older than X days',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created:
sjt.schedules.create(
name='Cleanup Job Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'days': '120'},
created=now_dt,
modified=now_dt,
)
existing_cd_jobs = SystemJobTemplate.objects.filter(job_type='cleanup_deleted')
Schedule.objects.filter(unified_job_template__in=existing_cd_jobs).delete()
existing_cd_jobs.delete()
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_activitystream',
defaults=dict(
name='Cleanup Activity Stream',
description='Remove activity stream history older than X days',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created:
sjt.schedules.create(
name='Cleanup Activity Schedule',
rrule='DTSTART:%s RRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=TU' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'days': '355'},
created=now_dt,
modified=now_dt,
)
sjt, created = SystemJobTemplate.objects.get_or_create(
job_type='cleanup_facts',
defaults=dict(
name='Cleanup Fact Details',
description='Remove system tracking history',
created=now_dt,
modified=now_dt,
polymorphic_ctype=sjt_ct,
),
)
if created and feature_enabled('system_tracking', bypass_database=True):
sjt.schedules.create(
name='Cleanup Fact Schedule',
rrule='DTSTART:%s RRULE:FREQ=MONTHLY;INTERVAL=1;BYMONTHDAY=1' % now_str,
description='Automatically Generated Schedule',
enabled=True,
extra_data={'older_than': '120d', 'granularity': '1w'},
created=now_dt,
modified=now_dt,
)
class Migration(migrations.Migration):
dependencies = [
('main', '0009_v300_rbac_migrations'),
]
operations = [
migrations.RunPython(create_system_job_templates, migrations.RunPython.noop),
]
| Python | 0 |
a98ebc7728947e77d92378cdf867c500212738ca | Update dbx_email_alerts.py | Sharing/dbx_email_alerts.py | Sharing/dbx_email_alerts.py | #install the dropbox SDK with 'pip install dropbox'
import dropbox
import datetime
import time
import smtplib
import requests
#requires Dropbox Business API token with 'Team Auditing' permission
token = "<enter token here>"
cursor = None
# instantiating dropbox team object
dbxt = dropbox.DropboxTeam(token)
# Full list of alerts available at:
# https://www.dropbox.com/developers/documentation/http/teams#team_log-get_events
alerts = {"sign_in_as_session_start",
"member_change_admin_role",
"shared_link_create",
# "login_fail",
# "shared_folder_create",
# "file_request_create",
# "account_capture_relinquish_account",
# "shared_content_copy"
}
# If using gmail, "enable less secure apps" needs to be turned on.
# https://myaccount.google.com/security -> "Enable less secure apps"
# For a more robust solution, use an email API tool e.g. Mailgun
sender_email = "<sender_email@gmail.com>"
sender_pw = "<sender_password"
receiver_email = "<receiver_email>"
def send_email(subject, body):
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login(sender_email, sender_pw)
message = "Subject: %s \n\n %s" % (subject, body)
s.sendmail(sender_email, receiver_email, message)
s.quit()
def check_alerts(token):
global cursor
#On the first cycle, the cursor will be none. The cursor will be
#updated on following cycles
if cursor is None:
# Start time has an offset of 1 minute from the current time. Can
# optionally increase or decrease the start time offset. For example,
# if you stop the script and plan to restart it 12 hours later, you may
# want to increase the offset to 12 hours so that events in the 12 hours
# prior to start are captured.
start_time = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
time_range = dropbox.team_common.TimeRange(start_time=start_time)
log = dbxt.team_log_get_events(time=time_range)
events = log.events
cursor = log.cursor
for event in events:
if event.event_type._tag in alerts:
email_subject = event.event_type._tag
email_body = "Event was found at: %s" % event.timestamp
send_email(email_subject, email_body)
else:
log = dbxt.team_log_get_events_continue(cursor)
events = log.events
cursor = log.cursor
has_more = log.has_more
for event in events:
if event.event_type._tag in alerts:
email_subject = event.event_type._tag
email_body = "Event was found at: %s" % event.timestamp
send_email(email_subject, email_body)
# run the check alerts sequence on a 1 minute loop.
while True:
try:
print(datetime.datetime.utcnow())
check_alerts(token)
time.sleep(60)
except requests.exceptions.ReadTimeout:
print ("Request Timeout")
except requests.exceptions.ConnectionError:
print ("Connection Error")
# Breaking on other errors and notifying of a required restart.
# It is recommended to handle potential Dropbox and other
# errors specifically
except Exception as e:
print(e)
subject = "Alert Service Error - Restart Required"
body = "Alert service ecountered an error and needs to be restarted: %s" % e
send_email(subject, body)
break
| #install the dropbox SDK with 'pip install dropbox'
import dropbox
import datetime
import time
import smtplib
import requests
#requires Dropbox Business API token with 'Team Auditing' permission
token = "<enter token here>"
cursor = None
# instantiating dropbox team object
dbxt = dropbox.DropboxTeam(token)
# Full list of alerts available at:
# https://www.dropbox.com/developers/documentation/http/teams#team_log-get_events
alerts = ["sign_in_as_session_start",
"member_change_admin_role",
"shared_link_create",
# "login_fail",
# "shared_folder_create",
# "file_request_create",
# "account_capture_relinquish_account",
# "shared_content_copy"
]
# If using gmail, "enable less secure apps" needs to be turned on.
# https://myaccount.google.com/security -> "Enable less secure apps"
# For a more robust solution, use an email API tool e.g. Mailgun
sender_email = "<sender_email@gmail.com>"
sender_pw = "<sender_password"
receiver_email = "<receiver_email>"
def send_email(subject, body):
s = smtplib.SMTP('smtp.gmail.com', 587)
s.starttls()
s.login(sender_email, sender_pw)
message = "Subject: %s \n\n %s" % (subject, body)
s.sendmail(sender_email, receiver_email, message)
s.quit()
def check_alerts(token):
global cursor
#On the first cycle, the cursor will be none. The cursor will be
#updated on following cycles
if cursor is None:
# Start time has an offset of 1 minute from the current time. Can
# optionally increase or decrease the start time offset. For example,
# if you stop the script and plan to restart it 12 hours later, you may
# want to increase the offset to 12 hours so that events in the 12 hours
# prior to start are captured.
start_time = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
time_range = dropbox.team_common.TimeRange(start_time=start_time)
log = dbxt.team_log_get_events(time=time_range)
events = log.events
cursor = log.cursor
for event in events:
if event.event_type._tag in alerts:
email_subject = event.event_type._tag
email_body = "Event was found at: %s" % event.timestamp
send_email(email_subject, email_body)
else:
log = dbxt.team_log_get_events_continue(cursor)
events = log.events
cursor = log.cursor
has_more = log.has_more
for event in events:
if event.event_type._tag in alerts:
email_subject = event.event_type._tag
email_body = "Event was found at: %s" % event.timestamp
send_email(email_subject, email_body)
# run the check alerts sequence on a 1 minute loop.
while True:
try:
print(datetime.datetime.utcnow())
check_alerts(token)
time.sleep(60)
except requests.exceptions.ReadTimeout:
print ("Request Timeout")
except requests.exceptions.ConnectionError:
print ("Connection Error")
# Breaking on other errors and notifying of a required restart.
# It is recommended to handle potential Dropbox and other
# errors specifically
except Exception as e:
print(e)
subject = "Alert Service Error - Restart Required"
body = "Alert service ecountered an error and needs to be restarted: %s" % e
send_email(subject, body)
break
| Python | 0.000006 |
9fbab30f5d32d96460c7c8188f21a94aa050e0cb | add baomihua.com | extractors/baomihua.py | extractors/baomihua.py | #!/usr/bin/env python3
import re
import sys
import json
sys.path.append('..')
from define import *
from utils import *
from extractor import BasicExtractor
import urllib.parse
class BaoMiHuaExtractor(BasicExtractor):
'''
ku6下载器
'''
def __init__(self,c):
super(BaoMiHuaExtractor,self).__init__(c, BAOMIHUA)
def download(self):
print('baomihua:start downloading ...')
retry = 3
while retry >=0 :
self.page = get_html(self.c.url)
if self.page: break
retry -= 1
if not self.page:
print('error: request video info error,check url. %s' % (self.c.url,))
sys.exit(0)
self.i.vid = self.getVid()
if not self.i.vid:
print('error: not find vid! exit...')
sys.exit(0)
url = r'http://play.baomihua.com/getvideourl.aspx?flvid=%s' % (self.i.vid,)
html = get_html(url)
info = '&%s&' % (urllib.parse.unquote_plus(html),)
self.i.title = self.getTitle(info = info)
self.i.desc = self.getDesc(info = info)
self.i.tags = self.getTags(info = info)
self.i.m3u8 = self.query_m3u8(info = info)
self.i.fsize = self.getFsize(info = info)
self.i.fname = self.getFname()
self.flvlist = self.query_real(info = info)
self.i.views = self.getViews()
self.i.uptime = self.getUptime(info = info)
self.i.category = self.getCategory(info = info)
self.i.duration = self.getDuration(info = info)
ret = checkCondition(self.i,self.c)
if ret == C_PASS:
if not realDownload(self.flvlist,self.tmppath):
sys.exit(0)
#下载成功,合并视频,并删除临时文件
if not mergeVideos(self.flvlist, self.tmppath, self.i.path, self.i.fname):
sys.exit(0)
self.jsonToFile()
else:
print('tips: video do not math conditions. code = %d' % (ret,))
sys.exit(0)
def query_m3u8(self,*args,**kwargs):
m3u8 = ''
info = kwargs['info']
r = re.search(r'&hlshost=(.*?)&',info)
if r:
m3u8 = r.groups()[0]
return m3u8
def query_real(self,*args,**kwargs):
urls = []
info = kwargs['info']
host = ''
stream_name = ''
stream_type = ''
r = re.search('&host=(.*?)&',info)
if r:
host = r.groups()[0]
r2 = re.search('&stream_name=(.*?)&',info)
if r2:
stream_name = r2.groups()[0]
r3 = re.search('&videofiletype=(.*?)&',info)
if r3:
stream_type = r3.groups()[0]
url = r'http://%s/pomoho_video/%s.%s' % (host,stream_name,stream_type)
return [url]
def getVid(self,*args,**kwargs):
vid = ''
r = re.search(r'var\s+flvid\s*=\s*(\d+)',self.page)
if r:
vid = r.groups()[0]
else:
r2 = re.search(r'flvid=(\d+)',self.page)
if r2:
vid = r2.groups()[0]
return vid
def getFsize(self,*args,**kwargs):
size = 1024*1024
info = kwargs['info']
r = re.search(r'&videofilesize=(\d+)&',info)
if r:
size = r.groups()[0]
return int(size)
def getTitle(self,*args,**kwargs):
title = ''
info = kwargs['info']
r = re.search(r'&title=(.*?)&',info)
if r:
title = r.groups()[0]
return title
def getDesc(self,*args,**kwargs):
desc = self.i.title
r = re.search(r'\<meta\s+content=\"(.*?)\"\s+name=\"description\"',self.page)
if r:
desc = r.groups()[0]
return desc
def getTags(self,*args,**kwargs):
tag = ''
r = re.search(r'\<meta\s+content=\"(.*?)\"\s+name=\"keywords\"',self.page)
if r:
tag = r.groups()[0]
return tag.split(',')
def getViews(self,*args,**kwargs):
views = 1
r = re.search(r'var\s+appId\s*=\s*(\d+)\s*;',self.page)
appid = '0'
if r:
appid = r.groups()[0]
url = r'http://action.interface.baomihua.com/AppInfoApi.asmx/GetAppInfo?appid=%s' %(appid,)
data = get_html(url)
r = re.search(r'appPlayCount:\s*[\'\"](\d+)[\'\"]',data)
if r:
views = r.groups()[0]
return int(views)
def getCategory(self,*args,**kwargs):
cat = '未知'
return cat
def getDuration(self,*args,**kwargs):
duration = 0
info = kwargs['info']
r = re.search(r'&totaltime=(\d+)&',info)
if r:
duration = r.groups()[0]
return int(duration)
def getUptime(self,*args,**kwargs):
return '20150813'
def download(c):
d = BaoMiHuaExtractor(c)
return d.download() | #!/usr/bin/env python3 | Python | 0.000001 |
edb1c61a7ded49b63e272bd409fcbf6468173948 | remove comment | bitbots_head_behavior/src/bitbots_head_behavior/head_node.py | bitbots_head_behavior/src/bitbots_head_behavior/head_node.py | #!/usr/bin/env python3
"""
This is the ROS-Node which contains the head behavior, starts the appropriate DSD, initializes the HeadBlackboard
and subscribes to head_behavior specific ROS-Topics.
"""
import os
import rospy
from bitbots_blackboard.blackboard import HeadBlackboard
from dynamic_stack_decider.dsd import DSD
from humanoid_league_msgs.msg import HeadMode as HeadModeMsg, PoseWithCertainty, PoseWithCertaintyArray
from bitbots_msgs.msg import JointCommand
from sensor_msgs.msg import JointState
from std_msgs.msg import Header
from geometry_msgs.msg import PoseWithCovarianceStamped
from moveit_ros_planning_interface._moveit_roscpp_initializer import roscpp_init, roscpp_shutdown
from bitbots_ros_patches.rate import Rate
def run(dsd):
"""
Main run-loop
:returns: Never
"""
rate = Rate(60)
while not rospy.is_shutdown():
dsd.update()
rate.sleep()
# Also stop cpp node
roscpp_shutdown()
def init():
"""
Initialize new components needed for head_behavior:
blackboard, dsd, rostopic subscriber
"""
rospy.init_node('head_behavior')
# This is a general purpose initialization function provided by moved
# It is used to correctly initialize roscpp which is used in the collision checker module
roscpp_init('collision_checker', [])
blackboard = HeadBlackboard()
rospy.Subscriber('head_mode', HeadModeMsg, blackboard.head_capsule.head_mode_callback, queue_size=1)
rospy.Subscriber("ball_position_relative_filtered", PoseWithCovarianceStamped, blackboard.world_model.ball_filtered_callback)
rospy.Subscriber('joint_states', JointState, blackboard.head_capsule.joint_state_callback)
blackboard.head_capsule.position_publisher = rospy.Publisher("head_motor_goals", JointCommand, queue_size=10)
blackboard.head_capsule.visual_compass_record_trigger = rospy.Publisher(blackboard.config['visual_compass_trigger_topic'], Header, queue_size=5)
dirname = os.path.dirname(os.path.realpath(__file__))
dsd = DSD(blackboard, 'debug/dsd/head_behavior')
dsd.register_actions(os.path.join(dirname, 'actions'))
dsd.register_decisions(os.path.join(dirname, 'decisions'))
dsd.load_behavior(os.path.join(dirname, 'head_behavior.dsd'))
rospy.logdebug("Head Behavior completely loaded")
return dsd
if __name__ == '__main__':
run(init())
| #!/usr/bin/env python3
"""
This is the ROS-Node which contains the head behavior, starts the appropriate DSD, initializes the HeadBlackboard
and subscribes to head_behavior specific ROS-Topics.
"""
import os
import rospy
from bitbots_blackboard.blackboard import HeadBlackboard
from dynamic_stack_decider.dsd import DSD
from humanoid_league_msgs.msg import HeadMode as HeadModeMsg, PoseWithCertainty, PoseWithCertaintyArray
from bitbots_msgs.msg import JointCommand
from sensor_msgs.msg import JointState
from std_msgs.msg import Header
from geometry_msgs.msg import PoseWithCovarianceStamped
from moveit_ros_planning_interface._moveit_roscpp_initializer import roscpp_init, roscpp_shutdown
from bitbots_ros_patches.rate import Rate
def run(dsd):
"""
Main run-loop
:returns: Never
"""
rate = Rate(60)
while not rospy.is_shutdown():
dsd.update()
rate.sleep()
# Also stop cpp node
roscpp_shutdown()
def init():
"""
Initialize new components needed for head_behavior:
blackboard, dsd, rostopic subscriber
"""
rospy.init_node('head_behavior')
# This is a general purpose initialization function provided by moved
# It is used to correctly initialize roscpp which is used in the collision checker module
roscpp_init('collision_checker', [])
blackboard = HeadBlackboard()
rospy.Subscriber('head_mode', HeadModeMsg, blackboard.head_capsule.head_mode_callback, queue_size=1)
# rospy.Subscriber("balls_relative", PoseWithCertaintyArray, blackboard.world_model.balls_callback)
rospy.Subscriber("ball_position_relative_filtered", PoseWithCovarianceStamped, blackboard.world_model.ball_filtered_callback)
rospy.Subscriber('joint_states', JointState, blackboard.head_capsule.joint_state_callback)
blackboard.head_capsule.position_publisher = rospy.Publisher("head_motor_goals", JointCommand, queue_size=10)
blackboard.head_capsule.visual_compass_record_trigger = rospy.Publisher(blackboard.config['visual_compass_trigger_topic'], Header, queue_size=5)
dirname = os.path.dirname(os.path.realpath(__file__))
dsd = DSD(blackboard, 'debug/dsd/head_behavior')
dsd.register_actions(os.path.join(dirname, 'actions'))
dsd.register_decisions(os.path.join(dirname, 'decisions'))
dsd.load_behavior(os.path.join(dirname, 'head_behavior.dsd'))
rospy.logdebug("Head Behavior completely loaded")
return dsd
if __name__ == '__main__':
run(init())
| Python | 0 |
a65c57b85ecd57fdb8d0521c1b6ce3ecda5d3916 | Add library to list of types to upgrade. | src/encoded/commands/upgrade.py | src/encoded/commands/upgrade.py | """\
Run this to upgrade the site.
Examples
To update on the production server:
%(prog)s production.ini
For the development.ini you must supply the paster app name:
%(prog)s development.ini --app-name app
"""
from contextlib import contextmanager
import logging
EPILOG = __doc__
logger = logging.getLogger(__name__)
DEFAULT_COLLECTIONS = [
'library',
]
def internal_app(configfile, app_name=None, username=None):
from webtest import TestApp
from pyramid import paster
app = paster.get_app(configfile, app_name)
if not username:
username = 'IMPORT'
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': username,
}
return TestApp(app, environ)
def run(testapp, collections):
from ..storage import DBSession
with AlternateScope(DBSession) as scope:
if not collections:
collections = DEFAULT_COLLECTIONS
root = testapp.app.root_factory(testapp.app)
for collection_name in collections:
collection = root[collection_name]
count = 0
errors = 0
logger.info('Upgrading %s', collection_name)
for uuid in collection:
count += 1
with scope.change():
try:
testapp.patch_json('/%s' % uuid, {})
except Exception:
logger.exception('Upgrade failed for: /%s/%s', collection_name, uuid)
errors += 1
if count % 1000 == 0:
logger.info('Upgrading %s: %d', collection_name, count)
logger.info('Upgraded %s: %d (errors: %d)', collection_name, count, errors)
class AlternateScope(object):
def __init__(self, DBSession):
self.scope = None
self._DBSession = DBSession
def __enter__(self):
import transaction
from zope.sqlalchemy.datamanager import join_transaction
from sqlalchemy.orm.scoping import ScopedRegistry
self._original_registry = self._DBSession.registry
self._DBSession.registry = ScopedRegistry(
self._DBSession.session_factory, self._get_scope)
self.scope = self
txn = transaction.begin()
session = self._DBSession()
join_transaction(session)
transaction.manager.free(txn)
return self
def __exit__(self, exc_type, exc_value, traceback):
self._DBSession.registry = self._original_registry
self.scope = None
def _get_scope(self):
return self.scope
@contextmanager
def change(self, scope=None):
previous = self.scope
self.scope = scope
yield scope
self.scope = previous
def main():
import argparse
parser = argparse.ArgumentParser(
description="Update links and keys", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
testapp = internal_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
run(testapp, args.item_type)
if __name__ == '__main__':
main()
| """\
Run this to upgrade the site.
Examples
To update on the production server:
%(prog)s production.ini
For the development.ini you must supply the paster app name:
%(prog)s development.ini --app-name app
"""
from contextlib import contextmanager
import logging
EPILOG = __doc__
logger = logging.getLogger(__name__)
DEFAULT_COLLECTIONS = [
]
def internal_app(configfile, app_name=None, username=None):
from webtest import TestApp
from pyramid import paster
app = paster.get_app(configfile, app_name)
if not username:
username = 'IMPORT'
environ = {
'HTTP_ACCEPT': 'application/json',
'REMOTE_USER': username,
}
return TestApp(app, environ)
def run(testapp, collections):
from ..storage import DBSession
with AlternateScope(DBSession) as scope:
if not collections:
collections = DEFAULT_COLLECTIONS
root = testapp.app.root_factory(testapp.app)
for collection_name in collections:
collection = root[collection_name]
count = 0
errors = 0
logger.info('Upgrading %s', collection_name)
for uuid in collection:
count += 1
with scope.change():
try:
testapp.patch_json('/%s' % uuid, {})
except Exception:
logger.exception('Upgrade failed for: /%s/%s', collection_name, uuid)
errors += 1
if count % 1000 == 0:
logger.info('Upgrading %s: %d', collection_name, count)
logger.info('Upgraded %s: %d (errors: %d)', collection_name, count, errors)
class AlternateScope(object):
def __init__(self, DBSession):
self.scope = None
self._DBSession = DBSession
def __enter__(self):
import transaction
from zope.sqlalchemy.datamanager import join_transaction
from sqlalchemy.orm.scoping import ScopedRegistry
self._original_registry = self._DBSession.registry
self._DBSession.registry = ScopedRegistry(
self._DBSession.session_factory, self._get_scope)
self.scope = self
txn = transaction.begin()
session = self._DBSession()
join_transaction(session)
transaction.manager.free(txn)
return self
def __exit__(self, exc_type, exc_value, traceback):
self._DBSession.registry = self._original_registry
self.scope = None
def _get_scope(self):
return self.scope
@contextmanager
def change(self, scope=None):
previous = self.scope
self.scope = scope
yield scope
self.scope = previous
def main():
import argparse
parser = argparse.ArgumentParser(
description="Update links and keys", epilog=EPILOG,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('--app-name', help="Pyramid app name in configfile")
parser.add_argument('--item-type', action='append', help="Item type")
parser.add_argument('config_uri', help="path to configfile")
args = parser.parse_args()
logging.basicConfig()
testapp = internal_app(args.config_uri, args.app_name)
# Loading app will have configured from config file. Reconfigure here:
logging.getLogger('encoded').setLevel(logging.DEBUG)
run(testapp, args.item_type)
if __name__ == '__main__':
main()
| Python | 0 |
f93ce7ca0c73946e1997572576b28436d53ef970 | Update to V2 API | salt/modules/opsgenie.py | salt/modules/opsgenie.py | # -*- coding: utf-8 -*-
'''
Module for sending data to OpsGenie
.. versionadded:: 2018.3.0
:configuration: This module can be used in Reactor System for
posting data to OpsGenie as a remote-execution function.
For example:
.. code-block:: yaml
opsgenie_event_poster:
local.opsgenie.post_data:
- tgt: 'salt-minion'
- kwarg:
name: event.reactor
api_key: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
reason: {{ data['data']['reason'] }}
action_type: Create
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
import requests
# Import Salt libs
import salt.exceptions
import salt.utils.json
API_ENDPOINT = "https://api.opsgenie.com/v2/alerts"
log = logging.getLogger(__name__)
def post_data(api_key=None, name='OpsGenie Execution Module', reason=None,
action_type=None):
'''
Post data to OpsGenie. It's designed for Salt's Event Reactor.
After configuring the sls reaction file as shown above, you can trigger the
module with your designated tag (og-tag in this case).
CLI Example:
.. code-block:: bash
salt-call event.send 'og-tag' '{"reason" : "Overheating CPU!"}'
Required parameters:
api_key
It's the API Key you've copied while adding integration in OpsGenie.
reason
It will be used as alert's default message in OpsGenie.
action_type
OpsGenie supports the default values Create/Close for action_type. You
can customize this field with OpsGenie's custom actions for other
purposes like adding notes or acknowledging alerts.
Optional parameters:
name
It will be used as alert's alias. If you want to use the close
functionality you must provide name field for both states like in
this case.
'''
if api_key is None or reason is None:
raise salt.exceptions.SaltInvocationError(
'API Key or Reason cannot be None.')
data = dict()
data['alias'] = name
data['message'] = reason
# data['actions'] = action_type
data['cpuModel'] = __grains__['cpu_model']
data['cpuArch'] = __grains__['cpuarch']
data['fqdn'] = __grains__['fqdn']
data['host'] = __grains__['host']
data['id'] = __grains__['id']
data['kernel'] = __grains__['kernel']
data['kernelRelease'] = __grains__['kernelrelease']
data['master'] = __grains__['master']
data['os'] = __grains__['os']
data['saltPath'] = __grains__['saltpath']
data['saltVersion'] = __grains__['saltversion']
data['username'] = __grains__['username']
data['uuid'] = __grains__['uuid']
log.debug('Below data will be posted:\n%s', data)
log.debug('API Key: %s \t API Endpoint: %s', api_key, API_ENDPOINT)
if action_type == "Create":
response = requests.post(
url=API_ENDPOINT,
data=salt.utils.json.dumps(data),
headers={'Content-Type': 'application/json',
'Authorization': 'GenieKey ' + api_key})
else:
response = requests.post(
url=API_ENDPOINT + "/" + name + "/close?identifierType=alias",
data=salt.utils.json.dumps(data),
headers={'Content-Type': 'application/json',
'Authorization': 'GenieKey ' + api_key})
return response.status_code, response.text
| # -*- coding: utf-8 -*-
'''
Module for sending data to OpsGenie
.. versionadded:: 2018.3.0
:configuration: This module can be used in Reactor System for
posting data to OpsGenie as a remote-execution function.
For example:
.. code-block:: yaml
opsgenie_event_poster:
local.opsgenie.post_data:
- tgt: 'salt-minion'
- kwarg:
name: event.reactor
api_key: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
reason: {{ data['data']['reason'] }}
action_type: Create
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
import requests
# Import Salt libs
import salt.exceptions
import salt.utils.json
API_ENDPOINT = "https://api.opsgenie.com/v1/json/saltstack?apiKey="
log = logging.getLogger(__name__)
def post_data(api_key=None, name='OpsGenie Execution Module', reason=None,
action_type=None):
'''
Post data to OpsGenie. It's designed for Salt's Event Reactor.
After configuring the sls reaction file as shown above, you can trigger the
module with your designated tag (og-tag in this case).
CLI Example:
.. code-block:: bash
salt-call event.send 'og-tag' '{"reason" : "Overheating CPU!"}'
Required parameters:
api_key
It's the API Key you've copied while adding integration in OpsGenie.
reason
It will be used as alert's default message in OpsGenie.
action_type
OpsGenie supports the default values Create/Close for action_type. You
can customize this field with OpsGenie's custom actions for other
purposes like adding notes or acknowledging alerts.
Optional parameters:
name
It will be used as alert's alias. If you want to use the close
functionality you must provide name field for both states like in
this case.
'''
if api_key is None or reason is None or action_type is None:
raise salt.exceptions.SaltInvocationError(
'API Key or Reason or Action Type cannot be None.')
data = dict()
data['name'] = name
data['reason'] = reason
data['actionType'] = action_type
data['cpuModel'] = __grains__['cpu_model']
data['cpuArch'] = __grains__['cpuarch']
data['fqdn'] = __grains__['fqdn']
data['host'] = __grains__['host']
data['id'] = __grains__['id']
data['kernel'] = __grains__['kernel']
data['kernelRelease'] = __grains__['kernelrelease']
data['master'] = __grains__['master']
data['os'] = __grains__['os']
data['saltPath'] = __grains__['saltpath']
data['saltVersion'] = __grains__['saltversion']
data['username'] = __grains__['username']
data['uuid'] = __grains__['uuid']
log.debug('Below data will be posted:\n%s', data)
log.debug('API Key: %s \t API Endpoint: %s', api_key, API_ENDPOINT)
response = requests.post(
url=API_ENDPOINT + api_key,
data=salt.utils.json.dumps(data),
headers={'Content-Type': 'application/json'})
return response.status_code, response.text
| Python | 0 |
0fceb297dc4855cd5617daaf9821fb3a332c19ed | Fix descriptions | mediacrush/slimdown.py | mediacrush/slimdown.py | from functools import partial
from markdown import Markdown, odict
from markdown.blockprocessors import build_block_parser
from markdown.preprocessors import build_preprocessors
from markdown.inlinepatterns import build_inlinepatterns
from markdown.treeprocessors import build_treeprocessors
slimdown = Markdown(safe_mode="escape")
# Remove some block parsers
block = build_block_parser(slimdown)
del block.blockprocessors["hashheader"]
del block.blockprocessors["setextheader"]
del block.blockprocessors["olist"]
del block.blockprocessors["ulist"]
slimdown.parser = block
# Delete most inline patterns
inline = build_inlinepatterns(slimdown)
del inline["backtick"]
del inline["reference"]
del inline["image_link"]
del inline["image_reference"]
del inline["short_reference"]
del inline["autolink"]
del inline["automail"]
del inline["entity"]
slimdown.inlinePatterns = inline
# Monkey-patch unicode fix
slimdown._convert = slimdown.convert
def slimdown_convert(text):
text = text.decode('utf-8')
return slimdown._convert(text)
slimdown.convert = slimdown_convert
| from functools import partial
from markdown import Markdown, odict
from markdown.blockprocessors import build_block_parser
from markdown.preprocessors import build_preprocessors
from markdown.inlinepatterns import build_inlinepatterns
from markdown.treeprocessors import build_treeprocessors
slimdown = Markdown(safe_mode="escape")
# Remove some block parsers
block = build_block_parser(slimdown)
del block.blockprocessors["hashheader"]
del block.blockprocessors["setextheader"]
del block.blockprocessors["olist"]
del block.blockprocessors["ulist"]
slimdown.parser = block
# Delete most inline patterns
inline = build_inlinepatterns(slimdown)
del inline["backtick"]
del inline["reference"]
del inline["image_link"]
del inline["image_reference"]
del inline["short_reference"]
del inline["autolink"]
del inline["automail"]
del inline["entity"]
slimdown.inlinePatterns = inline
| Python | 0.00079 |
2871d4d45c70cb3619bbf12ed77a8a94e038702e | Set default language to finnish should the mail be sent to admins | ckanext/ytp/request/mail.py | ckanext/ytp/request/mail.py | from ckan.lib.i18n import set_lang, get_lang
from ckan.lib.mailer import mail_user
from pylons import i18n
from ckan.common import _
import logging
log = logging.getLogger(__name__)
_SUBJECT_MEMBERSHIP_REQUEST = lambda: _(
"New membership request (%(organization)s)")
_MESSAGE_MEMBERSHIP_REQUEST = lambda: _("""\
User %(user)s (%(email)s) has requested membership to organization %(organization)s.
%(link)s
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
_SUBJECT_MEMBERSHIP_APPROVED = lambda: _(
"Organization membership approved (%(organization)s)")
_MESSAGE_MEMBERSHIP_APPROVED = lambda: _("""\
Your membership request to organization %(organization)s with %(role)s access has been approved.
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
_SUBJECT_MEMBERSHIP_REJECTED = lambda: _(
"Organization membership rejected (%(organization)s)")
_MESSAGE_MEMBERSHIP_REJECTED = lambda: _("""\
Your membership request to organization %(organization)s with %(role)s access has been rejected.
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
def mail_new_membership_request(locale, admin, group_name, url, user_name, user_email):
#Mail sent to admins should be sent with default locale, i.e. finnish not the locale defined by the user
current_locale = get_lang()
#Set the locale to default
_reset_lang()
subject = _SUBJECT_MEMBERSHIP_REQUEST() % {
'organization': group_name
}
message = _MESSAGE_MEMBERSHIP_REQUEST() % {
'user': user_name,
'email': user_email,
'organization': group_name,
'link': url
}
try:
mail_user(admin, subject, message)
except Exception:
log.exception("Mail could not be sent")
finally:
set_lang(current_locale)
def mail_process_status(locale, member_user, approve, group_name, capacity):
current_locale = get_lang()
if locale == 'en':
_reset_lang()
else:
set_lang(locale)
role_name = _(capacity)
subject_template = _SUBJECT_MEMBERSHIP_APPROVED(
) if approve else _SUBJECT_MEMBERSHIP_REJECTED()
message_template = _MESSAGE_MEMBERSHIP_APPROVED(
) if approve else _MESSAGE_MEMBERSHIP_REJECTED()
subject = subject_template % {
'organization': group_name
}
message = message_template % {
'role': role_name,
'organization': group_name
}
try:
mail_user(member_user, subject, message)
except Exception:
log.exception("Mail could not be sent")
# raise MailerException("Mail could not be sent")
finally:
set_lang(current_locale)
def _reset_lang():
try:
i18n.set_lang(None)
except TypeError:
pass
| from ckan.lib.i18n import set_lang, get_lang
from ckan.lib.mailer import mail_user
from pylons import i18n
from ckan.common import _
import logging
log = logging.getLogger(__name__)
_SUBJECT_MEMBERSHIP_REQUEST = lambda: _(
"New membership request (%(organization)s)")
_MESSAGE_MEMBERSHIP_REQUEST = lambda: _("""\
User %(user)s (%(email)s) has requested membership to organization %(organization)s.
%(link)s
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
_SUBJECT_MEMBERSHIP_APPROVED = lambda: _(
"Organization membership approved (%(organization)s)")
_MESSAGE_MEMBERSHIP_APPROVED = lambda: _("""\
Your membership request to organization %(organization)s with %(role)s access has been approved.
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
_SUBJECT_MEMBERSHIP_REJECTED = lambda: _(
"Organization membership rejected (%(organization)s)")
_MESSAGE_MEMBERSHIP_REJECTED = lambda: _("""\
Your membership request to organization %(organization)s with %(role)s access has been rejected.
Best regards
Avoindata.fi support
valtori@avoindata.fi
""")
def mail_new_membership_request(locale, admin, group_name, url, user_name, user_email):
current_locale = get_lang()
if locale == 'en':
_reset_lang()
else:
set_lang(locale)
subject = _SUBJECT_MEMBERSHIP_REQUEST() % {
'organization': group_name
}
message = _MESSAGE_MEMBERSHIP_REQUEST() % {
'user': user_name,
'email': user_email,
'organization': group_name,
'link': url
}
try:
mail_user(admin, subject, message)
except Exception:
log.exception("Mail could not be sent")
finally:
set_lang(current_locale)
def mail_process_status(locale, member_user, approve, group_name, capacity):
current_locale = get_lang()
if locale == 'en':
_reset_lang()
else:
set_lang(locale)
role_name = _(capacity)
subject_template = _SUBJECT_MEMBERSHIP_APPROVED(
) if approve else _SUBJECT_MEMBERSHIP_REJECTED()
message_template = _MESSAGE_MEMBERSHIP_APPROVED(
) if approve else _MESSAGE_MEMBERSHIP_REJECTED()
subject = subject_template % {
'organization': group_name
}
message = message_template % {
'role': role_name,
'organization': group_name
}
try:
mail_user(member_user, subject, message)
except Exception:
log.exception("Mail could not be sent")
# raise MailerException("Mail could not be sent")
finally:
set_lang(current_locale)
def _reset_lang():
try:
i18n.set_lang(None)
except TypeError:
pass
| Python | 0 |
9f26c83b1ba25d8c64a8e9418310f5dd0e6cb9bd | refactor clean | estudios_socioeconomicos/views.py | estudios_socioeconomicos/views.py | from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required, user_passes_test
import django_excel as excel
from administracion.models import Escuela, Colegiatura
from becas.models import Beca
from captura.utils import get_study_info
from captura.models import Retroalimentacion
from perfiles_usuario.utils import is_capturista, is_member, ADMINISTRADOR_GROUP, CAPTURISTA_GROUP
from perfiles_usuario.utils import is_administrador
from familias.models import Integrante, Familia, Comentario, Alumno, Tutor
from familias.utils import total_egresos_familia, total_ingresos_familia, total_neto_familia
from indicadores.models import Transaccion, Ingreso, Oficio, Periodo
from .models import Estudio, Foto, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta
@login_required
@user_passes_test(is_administrador)
def download_studies(request):
""" View for an administrator to make a database dump into an excell
sheet. Each table will be emptied to a page inside the excell
document.
"""
return excel.make_response_from_tables(
[
Transaccion, Ingreso, Oficio, Periodo,
Integrante, Familia, Comentario, Alumno, Tutor,
Estudio, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta,
Retroalimentacion, Beca, Escuela, Colegiatura
],
'xls',
file_name="JP2_ESTUDIOS_SOCIOECONOMICOS")
@login_required
@user_passes_test(lambda u: is_member(u, [ADMINISTRADOR_GROUP, CAPTURISTA_GROUP]))
def focus_mode(request, id_estudio):
""" View to see the detail information about a family and their study.
"""
context = {}
estudio = get_object_or_404(Estudio.objects.filter(pk=id_estudio))
if is_capturista(request.user):
get_object_or_404(
Estudio.objects.filter(pk=id_estudio),
capturista=request.user.capturista)
integrantes = Integrante.objects.filter(familia=estudio.familia).select_related()
fotos = Foto.objects.filter(estudio=id_estudio)
context['estudio'] = estudio
context['integrantes'] = integrantes
context['fotos'] = fotos
context['total_egresos_familia'] = total_egresos_familia(estudio.familia.id)
context['total_ingresos_familia'] = total_ingresos_familia(estudio.familia.id)
context['total_neto_familia'] = total_neto_familia(estudio.familia.id)
transacciones = Transaccion.objects.filter(es_ingreso=True, familia=estudio.familia)
context['ingresos'] = Ingreso.objects.filter(transaccion__in=transacciones)
context['egresos'] = Transaccion.objects.filter(es_ingreso=False, familia=estudio.familia)
context['cuestionario'] = get_study_info(estudio)
context['status_options'] = Estudio.get_options_status()
return render(
request,
'estudios_socioeconomicos/focus_mode.html',
context)
| import csv
import json
from collections import OrderedDict
from django.shortcuts import render, get_object_or_404
from django.contrib.auth.decorators import login_required, user_passes_test
from django.http import HttpResponse
from rest_framework.response import Response
import django_excel as excel
from administracion.models import Escuela, Colegiatura
from becas.models import Beca
from captura.utils import get_study_info
from captura.models import Retroalimentacion
from perfiles_usuario.utils import is_capturista, is_member, ADMINISTRADOR_GROUP, CAPTURISTA_GROUP
from perfiles_usuario.utils import is_administrador
from familias.models import Integrante, Familia, Comentario, Integrante, Alumno, Tutor
from familias.utils import total_egresos_familia, total_ingresos_familia, total_neto_familia
from indicadores.models import Transaccion, Ingreso, Oficio, Periodo
from .models import Estudio, Foto, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta
from .serializers import EstudioSerializer
@login_required
@user_passes_test(is_administrador)
def download_studies(request):
""" View for an administrator to make a database dump into an excell
sheet. Each table will be emptied to a page inside the excell
document.
"""
return excel.make_response_from_tables(
[
Transaccion, Ingreso, Oficio, Periodo,
Integrante, Familia, Comentario, Integrante, Alumno, Tutor,
Estudio, Seccion, Subseccion, Pregunta, OpcionRespuesta, Respuesta,
Retroalimentacion, Beca, Escuela, Colegiatura
],
'xls',
file_name="JP2_ESTUDIOS_SOCIOECONOMICOS")
@login_required
@user_passes_test(lambda u: is_member(u, [ADMINISTRADOR_GROUP, CAPTURISTA_GROUP]))
def focus_mode(request, id_estudio):
""" View to see the detail information about a family and their study.
"""
context = {}
estudio = get_object_or_404(Estudio.objects.filter(pk=id_estudio))
if is_capturista(request.user):
get_object_or_404(
Estudio.objects.filter(pk=id_estudio),
capturista=request.user.capturista)
integrantes = Integrante.objects.filter(familia=estudio.familia).select_related()
fotos = Foto.objects.filter(estudio=id_estudio)
context['estudio'] = estudio
context['integrantes'] = integrantes
context['fotos'] = fotos
context['total_egresos_familia'] = total_egresos_familia(estudio.familia.id)
context['total_ingresos_familia'] = total_ingresos_familia(estudio.familia.id)
context['total_neto_familia'] = total_neto_familia(estudio.familia.id)
transacciones = Transaccion.objects.filter(es_ingreso=True, familia=estudio.familia)
context['ingresos'] = Ingreso.objects.filter(transaccion__in=transacciones)
context['egresos'] = Transaccion.objects.filter(es_ingreso=False, familia=estudio.familia)
context['cuestionario'] = get_study_info(estudio)
context['status_options'] = Estudio.get_options_status()
return render(
request,
'estudios_socioeconomicos/focus_mode.html',
context)
| Python | 0.000278 |
a23a1050501563889c2806a514fe2994a2ebe3a8 | Add python3 support in example | example/consume_many_csv_files.py | example/consume_many_csv_files.py | from __future__ import print_function
from itertools import chain
try:
from itertools import imap
except ImportError:
# if python 3
imap = map
import karld
from karld.path import i_walk_csv_paths
def main():
"""
Consume many csv files as if one.
"""
import pathlib
input_dir = pathlib.Path('test_data/things_kinds')
# # Use a generator expression
# iterables = (karld.io.i_get_csv_data(data_path)
# for data_path in i_walk_csv_paths(str(input_dir)))
# # or a generator map.
iterables = imap(karld.io.i_get_csv_data,
i_walk_csv_paths(str(input_dir)))
items = chain.from_iterable(iterables)
for item in items:
print(item[0], item[1])
if __name__ == "__main__":
main()
| from __future__ import print_function
from itertools import chain
from itertools import imap
import karld
from karld.path import i_walk_csv_paths
def main():
"""
Consume many csv files as if one.
"""
import pathlib
input_dir = pathlib.Path('test_data/things_kinds')
# # Use a generator expression
# iterables = (karld.io.i_get_csv_data(data_path)
# for data_path in i_walk_csv_paths(str(input_dir)))
# # or a generator map.
iterables = imap(karld.io.i_get_csv_data,
i_walk_csv_paths(str(input_dir)))
items = chain.from_iterable(iterables)
for item in items:
print(item[0], item[1])
if __name__ == "__main__":
main()
| Python | 0.000001 |
d27c34c65198280e324c37acda7f33ece07c2c92 | make text field usage consistnt | examples/plotting/server/markers.py | examples/plotting/server/markers.py | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr):
p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12)
def mtext(p, x, y, textstr):
p.text(x, y, text=[textstr],
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser
| # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from numpy.random import random
from bokeh.plotting import *
def mscatter(p, x, y, typestr):
p.scatter(x, y, marker=typestr,
line_color="#6666ee", fill_color="#ee6666", fill_alpha=0.5, size=12)
def mtext(p, x, y, textstr):
p.text(x, y, text=textstr,
text_color="#449944", text_align="center", text_font_size="10pt")
output_server("markers")
p = figure(title="markers.py example")
N = 10
mscatter(p, random(N)+2, random(N)+1, "circle")
mscatter(p, random(N)+4, random(N)+1, "square")
mscatter(p, random(N)+6, random(N)+1, "triangle")
mscatter(p, random(N)+8, random(N)+1, "asterisk")
mscatter(p, random(N)+2, random(N)+4, "circle_x")
mscatter(p, random(N)+4, random(N)+4, "square_x")
mscatter(p, random(N)+6, random(N)+4, "inverted_triangle")
mscatter(p, random(N)+8, random(N)+4, "x")
mscatter(p, random(N)+2, random(N)+7, "circle_cross")
mscatter(p, random(N)+4, random(N)+7, "square_cross")
mscatter(p, random(N)+6, random(N)+7, "diamond")
mscatter(p, random(N)+8, random(N)+7, "cross")
mtext(p, [2.5], [0.5], "circle / o")
mtext(p, [4.5], [0.5], "square")
mtext(p, [6.5], [0.5], "triangle")
mtext(p, [8.5], [0.5], "asterisk / *")
mtext(p, [2.5], [3.5], "circle_x / ox")
mtext(p, [4.5], [3.5], "square_x")
mtext(p, [6.5], [3.5], "inverted_triangle")
mtext(p, [8.5], [3.5], "x")
mtext(p, [2.5], [6.5], "circle_cross / o+")
mtext(p, [4.5], [6.5], "square_cross")
mtext(p, [6.5], [6.5], "diamond")
mtext(p, [8.5], [6.5], "cross / +")
show(p) # open a browser
| Python | 0.000004 |
0051b5a5e287057cab06452d4f178e4c04cbd0c5 | Put the win_osinfo classes in a helper function | salt/utils/win_osinfo.py | salt/utils/win_osinfo.py | # -*- coding: utf-8 -*-
'''
Get Version information from Windows
'''
# http://stackoverflow.com/questions/32300004/python-ctypes-getting-0-with-getversionex-function
from __future__ import absolute_import
# Import Third Party Libs
import ctypes
try:
from ctypes.wintypes import BYTE, WORD, DWORD, WCHAR
HAS_WIN32 = True
except (ImportError, ValueError):
HAS_WIN32 = False
if HAS_WIN32:
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
# Although utils are often directly imported, it is also possible to use the
# loader.
def __virtual__():
'''
Only load if Win32 Libraries are installed
'''
if not HAS_WIN32:
return False, 'This utility requires pywin32'
return 'win_osinfo'
def os_version_info_ex():
'''
Helper function to return the OSVersionInfo class
Returns:
class: The OsVersionInfo class
'''
class OSVersionInfo(ctypes.Structure):
_fields_ = (('dwOSVersionInfoSize', DWORD),
('dwMajorVersion', DWORD),
('dwMinorVersion', DWORD),
('dwBuildNumber', DWORD),
('dwPlatformId', DWORD),
('szCSDVersion', WCHAR * 128))
def __init__(self, *args, **kwds):
super(OSVersionInfo, self).__init__(*args, **kwds)
self.dwOSVersionInfoSize = ctypes.sizeof(self)
kernel32.GetVersionExW(ctypes.byref(self))
class OSVersionInfoEx(OSVersionInfo):
_fields_ = (('wServicePackMajor', WORD),
('wServicePackMinor', WORD),
('wSuiteMask', WORD),
('wProductType', BYTE),
('wReserved', BYTE))
return OSVersionInfoEx()
def get_os_version_info():
info = os_version_info_ex()
ret = {'MajorVersion': info.dwMajorVersion,
'MinorVersion': info.dwMinorVersion,
'BuildNumber': info.dwBuildNumber,
'PlatformID': info.dwPlatformId,
'ServicePackMajor': info.wServicePackMajor,
'ServicePackMinor': info.wServicePackMinor,
'SuiteMask': info.wSuiteMask,
'ProductType': info.wProductType}
return ret
| # -*- coding: utf-8 -*-
'''
Get Version information from Windows
'''
# http://stackoverflow.com/questions/32300004/python-ctypes-getting-0-with-getversionex-function
from __future__ import absolute_import
# Import Third Party Libs
import ctypes
try:
from ctypes.wintypes import BYTE, WORD, DWORD, WCHAR
HAS_WIN32 = True
except (ImportError, ValueError):
HAS_WIN32 = False
if HAS_WIN32:
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
# Although utils are often directly imported, it is also possible to use the
# loader.
def __virtual__():
'''
Only load if Win32 Libraries are installed
'''
if not HAS_WIN32:
return False, 'This utility requires pywin32'
return 'win_osinfo'
if HAS_WIN32:
class OSVERSIONINFO(ctypes.Structure):
_fields_ = (('dwOSVersionInfoSize', DWORD),
('dwMajorVersion', DWORD),
('dwMinorVersion', DWORD),
('dwBuildNumber', DWORD),
('dwPlatformId', DWORD),
('szCSDVersion', WCHAR * 128))
def __init__(self, *args, **kwds):
super(OSVERSIONINFO, self).__init__(*args, **kwds)
self.dwOSVersionInfoSize = ctypes.sizeof(self)
kernel32.GetVersionExW(ctypes.byref(self))
class OSVERSIONINFOEX(OSVERSIONINFO):
_fields_ = (('wServicePackMajor', WORD),
('wServicePackMinor', WORD),
('wSuiteMask', WORD),
('wProductType', BYTE),
('wReserved', BYTE))
def errcheck_bool(result, func, args):
if not result:
raise ctypes.WinError(ctypes.get_last_error())
return args
kernel32.GetVersionExW.errcheck = errcheck_bool
kernel32.GetVersionExW.argtypes = (ctypes.POINTER(OSVERSIONINFO),)
def get_os_version_info():
info = OSVERSIONINFOEX()
ret = {'MajorVersion': info.dwMajorVersion,
'MinorVersion': info.dwMinorVersion,
'BuildNumber': info.dwBuildNumber,
'PlatformID': info.dwPlatformId,
'ServicePackMajor': info.wServicePackMajor,
'ServicePackMinor': info.wServicePackMinor,
'SuiteMask': info.wSuiteMask,
'ProductType': info.wProductType}
return ret
| Python | 0.000002 |
be29826ded5f20f56a7996464a186ccc3f68c0d0 | Switch the default backend from amqp:// (deprecated) to rpc:// | openquake/engine/celeryconfig.py | openquake/engine/celeryconfig.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2010-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Config for all installed OpenQuake binaries and modules.
Should be installed by setup.py into /etc/openquake
eventually.
"""
import os
import sys
if '--with-doctest' in sys.argv: # horrible hack for nosetests
pass # don't set OQ_DISTRIBUTE
else:
os.environ["OQ_DISTRIBUTE"] = "celery"
# just in the case that are you using oq-engine from sources
# with the rest of oq libraries installed into the system (or a
# virtual environment) you must set this environment variable
if os.environ.get("OQ_ENGINE_USE_SRCDIR"):
sys.modules['openquake'].__dict__["__path__"].insert(
0, os.path.join(os.path.dirname(__file__), "openquake"))
from openquake.engine import config
config.abort_if_no_config_available()
amqp = config.get_section("amqp")
# RabbitMQ broker (default)
BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \
amqp
# Redis broker (works only on Trusty)
# BROKER_URL = 'redis://%(host)s:6379/0' % amqp
# BROKER_POOL_LIMIT enables a connections pool so Celery can reuse
# a single connection to RabbitMQ. Value 10 is the default from
# Celery 2.5 where this feature is enabled by default.
# Actually disabled because it's not stable in production.
# See https://bugs.launchpad.net/oq-engine/+bug/1250402
BROKER_POOL_LIMIT = None
# AMQP result backend (default)
CELERY_RESULT_BACKEND = 'rpc://'
CELERY_RESULT_PERSISTENT = False
# Redis result backend (works only on Trusty)
# CELERY_RESULT_BACKEND = 'redis://%(host)s:6379/0' % amqp
# CELERY_ACKS_LATE and CELERYD_PREFETCH_MULTIPLIER settings help evenly
# distribute tasks across the cluster. This configuration is intended
# make worker processes reserve only a single task at any given time.
# (The default settings for prefetching define that each worker process will
# reserve 4 tasks at once. For long running calculations with lots of long,
# heavy tasks, this greedy prefetching is not recommended and can result in
# performance issues with respect to cluster utilization.)
# CELERY_MAX_CACHED_RESULTS disable the cache on the results: this means
# that map_reduce will not leak memory by keeping the intermediate results
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_MAX_CACHED_RESULTS = 1
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_IMPORTS = ["openquake.commonlib.parallel"]
| # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2010-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Config for all installed OpenQuake binaries and modules.
Should be installed by setup.py into /etc/openquake
eventually.
"""
import os
import sys
if '--with-doctest' in sys.argv: # horrible hack for nosetests
pass # don't set OQ_DISTRIBUTE
else:
os.environ["OQ_DISTRIBUTE"] = "celery"
# just in the case that are you using oq-engine from sources
# with the rest of oq libraries installed into the system (or a
# virtual environment) you must set this environment variable
if os.environ.get("OQ_ENGINE_USE_SRCDIR"):
sys.modules['openquake'].__dict__["__path__"].insert(
0, os.path.join(os.path.dirname(__file__), "openquake"))
from openquake.engine import config
config.abort_if_no_config_available()
amqp = config.get_section("amqp")
# RabbitMQ broker (default)
BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \
amqp
# Redis broker (works only on Trusty)
# BROKER_URL = 'redis://%(host)s:6379/0' % amqp
# BROKER_POOL_LIMIT enables a connections pool so Celery can reuse
# a single connection to RabbitMQ. Value 10 is the default from
# Celery 2.5 where this feature is enabled by default.
# Actually disabled because it's not stable in production.
# See https://bugs.launchpad.net/oq-engine/+bug/1250402
BROKER_POOL_LIMIT = None
# RabbitMQ result backend (default)
CELERY_RESULT_BACKEND = 'amqp://'
# Redis result backend (works only on Trusty)
# CELERY_RESULT_BACKEND = 'redis://%(host)s:6379/0' % amqp
# CELERY_ACKS_LATE and CELERYD_PREFETCH_MULTIPLIER settings help evenly
# distribute tasks across the cluster. This configuration is intended
# make worker processes reserve only a single task at any given time.
# (The default settings for prefetching define that each worker process will
# reserve 4 tasks at once. For long running calculations with lots of long,
# heavy tasks, this greedy prefetching is not recommended and can result in
# performance issues with respect to cluster utilization.)
# CELERY_MAX_CACHED_RESULTS disable the cache on the results: this means
# that map_reduce will not leak memory by keeping the intermediate results
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
CELERY_MAX_CACHED_RESULTS = 1
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_IMPORTS = ["openquake.commonlib.parallel"]
| Python | 0 |
e79e0240165bf2aa77612be2f1227ca7bb3c5fc7 | add empty return docs | lib/ansible/modules/extras/system/make.py | lib/ansible/modules/extras/system/make.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Linus Unnebäck <linus@folkdatorn.se>
#
# This file is part of Ansible
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
# import module snippets
from ansible.module_utils.basic import *
DOCUMENTATION = '''
---
module: make
short_description: Run targets in a Makefile
requirements: []
version_added: "2.1"
author: Linus Unnebäck (@LinusU) <linus@folkdatorn.se>
description: Run targets in a Makefile.
options:
target:
description: The target to run
required: false
params:
description: Any extra parameters to pass to make
required: false
chdir:
description: cd into this directory before running make
required: true
'''
EXAMPLES = '''
# Build the default target
- make: chdir=/home/ubuntu/cool-project
# Run `install` target as root
- make: chdir=/home/ubuntu/cool-project target=install
become: yes
# Pass in extra arguments to build
- make:
chdir: /home/ubuntu/cool-project
target: all
params:
NUM_THREADS: 4
BACKEND: lapack
'''
# TODO: Disabled the RETURN as it was breaking docs building. Someone needs to
# fix this
RETURN = '''# '''
def format_params(params):
return [k + '=' + str(v) for k, v in params.iteritems()]
def push_arguments(cmd, args):
if args['target'] != None:
cmd.append(args['target'])
if args['params'] != None:
cmd.extend(format_params(args['params']))
return cmd
def check_changed(make_path, module, args):
cmd = push_arguments([make_path, '--question'], args)
rc, _, __ = module.run_command(cmd, check_rc=False, cwd=args['chdir'])
return (rc != 0)
def run_make(make_path, module, args):
cmd = push_arguments([make_path], args)
module.run_command(cmd, check_rc=True, cwd=args['chdir'])
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec=dict(
target=dict(required=False, default=None, type='str'),
params=dict(required=False, default=None, type='dict'),
chdir=dict(required=True, default=None, type='str'),
),
)
args = dict(
changed=False,
failed=False,
target=module.params['target'],
params=module.params['params'],
chdir=module.params['chdir'],
)
make_path = module.get_bin_path('make', True)
# Check if target is up to date
args['changed'] = check_changed(make_path, module, args)
# Check only; don't modify
if module.check_mode:
module.exit_json(changed=args['changed'])
# Target is already up to date
if args['changed'] == False:
module.exit_json(**args)
run_make(make_path, module, args)
module.exit_json(**args)
if __name__ == '__main__':
main()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Linus Unnebäck <linus@folkdatorn.se>
#
# This file is part of Ansible
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
# import module snippets
from ansible.module_utils.basic import *
DOCUMENTATION = '''
---
module: make
short_description: Run targets in a Makefile
requirements: []
version_added: "2.1"
author: Linus Unnebäck (@LinusU) <linus@folkdatorn.se>
description: Run targets in a Makefile.
options:
target:
description: The target to run
required: false
params:
description: Any extra parameters to pass to make
required: false
chdir:
description: cd into this directory before running make
required: true
'''
EXAMPLES = '''
# Build the default target
- make: chdir=/home/ubuntu/cool-project
# Run `install` target as root
- make: chdir=/home/ubuntu/cool-project target=install
become: yes
# Pass in extra arguments to build
- make:
chdir: /home/ubuntu/cool-project
target: all
params:
NUM_THREADS: 4
BACKEND: lapack
'''
def format_params(params):
return [k + '=' + str(v) for k, v in params.iteritems()]
def push_arguments(cmd, args):
if args['target'] != None:
cmd.append(args['target'])
if args['params'] != None:
cmd.extend(format_params(args['params']))
return cmd
def check_changed(make_path, module, args):
cmd = push_arguments([make_path, '--question'], args)
rc, _, __ = module.run_command(cmd, check_rc=False, cwd=args['chdir'])
return (rc != 0)
def run_make(make_path, module, args):
cmd = push_arguments([make_path], args)
module.run_command(cmd, check_rc=True, cwd=args['chdir'])
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec=dict(
target=dict(required=False, default=None, type='str'),
params=dict(required=False, default=None, type='dict'),
chdir=dict(required=True, default=None, type='str'),
),
)
args = dict(
changed=False,
failed=False,
target=module.params['target'],
params=module.params['params'],
chdir=module.params['chdir'],
)
make_path = module.get_bin_path('make', True)
# Check if target is up to date
args['changed'] = check_changed(make_path, module, args)
# Check only; don't modify
if module.check_mode:
module.exit_json(changed=args['changed'])
# Target is already up to date
if args['changed'] == False:
module.exit_json(**args)
run_make(make_path, module, args)
module.exit_json(**args)
if __name__ == '__main__':
main()
| Python | 0.000926 |
1cc1df8c00a7a956b0a1207f99928f731714541a | add main path reminder in TaskLog | TaskList/TaskLog/TaskLog.py | TaskList/TaskLog/TaskLog.py | #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage task running log'''
import xml.etree.ElementTree as xmlMod
from TaskList.TaskLog.GroupLog import *
from Preferences.PresetList.Preset.Preset import *
from Preferences.PresetList.Preset.Metapreset import *
class TaskLog:
'''class to manage task running log'''
def __init__(self, xml = None, pref = None, task = None):
'''initialize task log object'''
if xml is None:
self.defaultInit(pref, task)
else:
self.fromXml(xml)
def defaultInit(self, preferences, task):
'''initialize Task log object by generating from the task settings'''
self.presetName = task.preset
if self.presetName == '[default]':
self.presetName = preferences.presets.default
self.preset = preferences.presets.getPreset(self.presetName).copy()
fileName = task.path.split('/').pop()
fileName = fileName[0:fileName.rfind('.blend')]
self.path = preferences.output.getMainPath(fileName, task.scene, self.presetName)
if type(self.preset) is Preset:
self.groups = [GroupLog(groupName = '[main]',
preferences = preferences,
task = task)]
else:
self.groups = []
for g in self.preset.groups.keys():
group = preferences.presets.renderlayers.groups[g]
if group.isUsefull(task.info.scenes[task.scene]):
self.groups.append(GroupLog(groupName = g,
preferences = preferences,
task = task))
default = GroupLog(groupName = '[default]',
preferences = preferences,
task = task)
if len(default.renderlayers) > 0:
self.groups.append(default)
def fromXml(self, xml):
'''initialize Task log object with saved log'''
node = xml.find('preset')
if node is None:
node = xml.find('metapreset')
self.presetName = node.get('alias')
self.preset = Metapreset(xml = node)
else:
self.presetName = node.get('alias')
self.preset = Preset(xml = node)
self.groups = []
for node in xml.findall('group'):
self.groups.append(GroupLog(xml = node))
def toXml(self):
'''export task log into xml syntaxed string'''
xml = '<log>\n'
xml += self.preset.toXml(self.presetName)
for g in self.groups:
xml += g.toXml()
xml += '</log>'
return xml
def print(self):
'''A method to print task log'''
def getGroup(self, g):
'''a method to get a group by his name'''
for group in self.groups:
if g == group.name:
return group
| #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''module to manage task running log'''
import xml.etree.ElementTree as xmlMod
from TaskList.TaskLog.GroupLog import *
from Preferences.PresetList.Preset.Preset import *
from Preferences.PresetList.Preset.Metapreset import *
class TaskLog:
'''class to manage task running log'''
def __init__(self, xml = None, pref = None, task = None):
'''initialize task log object'''
if xml is None:
self.defaultInit(pref, task)
else:
self.fromXml(xml)
def defaultInit(self, preferences, task):
'''initialize Task log object by generating from the task settings'''
self.presetName = task.preset
if self.presetName == '[default]':
self.presetName = preferences.presets.default
self.preset = preferences.presets.getPreset(self.presetName).copy()
if type(self.preset) is Preset:
self.groups = [GroupLog(groupName = '[main]',
preferences = preferences,
task = task)]
else:
self.groups = []
for g in self.preset.groups.keys():
group = preferences.presets.renderlayers.groups[g]
if group.isUsefull(task.info.scenes[task.scene]):
self.groups.append(GroupLog(groupName = g,
preferences = preferences,
task = task))
default = GroupLog(groupName = '[default]',
preferences = preferences,
task = task)
if len(default.renderlayers) > 0:
self.groups.append(default)
def fromXml(self, xml):
'''initialize Task log object with saved log'''
node = xml.find('preset')
if node is None:
node = xml.find('metapreset')
self.presetName = node.get('alias')
self.preset = Metapreset(xml = node)
else:
self.presetName = node.get('alias')
self.preset = Preset(xml = node)
self.groups = []
for node in xml.findall('group'):
self.groups.append(GroupLog(xml = node))
def toXml(self):
'''export task log into xml syntaxed string'''
xml = '<log>\n'
xml += self.preset.toXml(self.presetName)
for g in self.groups:
xml += g.toXml()
xml += '</log>'
return xml
def print(self):
'''A method to print task log'''
def getGroup(self, g):
'''a method to get a group by his name'''
for group in self.groups:
if g == group.name:
return group
| Python | 0 |
de2aab06efd9cc5673ad517d453e5f660ef6fcf7 | Disable test_socket_ssl timeout test on Windows. | Lib/test/test_socket_ssl.py | Lib/test/test_socket_ssl.py | # Test just the SSL support in the socket module, in a moderately bogus way.
import sys
from test import test_support
import socket
# Optionally test SSL support. This requires the 'network' resource as given
# on the regrtest command line.
skip_expected = not (test_support.is_resource_enabled('network') and
hasattr(socket, "ssl"))
def test_basic():
test_support.requires('network')
import urllib
socket.RAND_status()
try:
socket.RAND_egd(1)
except TypeError:
pass
else:
print "didn't raise TypeError"
socket.RAND_add("this is a random string", 75.0)
f = urllib.urlopen('https://sf.net')
buf = f.read()
f.close()
if not sys.platform.startswith('win'):
def test_timeout():
test_support.requires('network')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(30.0)
# connect to service which issues an welcome banner (without need to write anything)
s.connect(("gmail.org", 995))
ss = socket.ssl(s)
# read part of return welcome banner twice,# read part of return welcome banner twice
ss.read(1)
ss.read(1)
s.close()
def test_rude_shutdown():
try:
import threading
except ImportError:
return
# Some random port to connect to.
PORT = 9934
listener_ready = threading.Event()
listener_gone = threading.Event()
# `listener` runs in a thread. It opens a socket listening on PORT, and
# sits in an accept() until the main thread connects. Then it rudely
# closes the socket, and sets Event `listener_gone` to let the main thread
# know the socket is gone.
def listener():
s = socket.socket()
s.bind(('', PORT))
s.listen(5)
listener_ready.set()
s.accept()
s = None # reclaim the socket object, which also closes it
listener_gone.set()
def connector():
listener_ready.wait()
s = socket.socket()
s.connect(('localhost', PORT))
listener_gone.wait()
try:
ssl_sock = socket.ssl(s)
except socket.sslerror:
pass
else:
raise test_support.TestFailed(
'connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
connector()
t.join()
def test_main():
if not hasattr(socket, "ssl"):
raise test_support.TestSkipped("socket module has no ssl support")
test_rude_shutdown()
test_basic()
test_timeout()
if __name__ == "__main__":
test_main()
| # Test just the SSL support in the socket module, in a moderately bogus way.
from test import test_support
import socket
# Optionally test SSL support. This requires the 'network' resource as given
# on the regrtest command line.
skip_expected = not (test_support.is_resource_enabled('network') and
hasattr(socket, "ssl"))
def test_basic():
test_support.requires('network')
import urllib
socket.RAND_status()
try:
socket.RAND_egd(1)
except TypeError:
pass
else:
print "didn't raise TypeError"
socket.RAND_add("this is a random string", 75.0)
f = urllib.urlopen('https://sf.net')
buf = f.read()
f.close()
def test_timeout():
test_support.requires('network')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(30.0)
# connect to service which issues an welcome banner (without need to write anything)
s.connect(("gmail.org", 995))
ss = socket.ssl(s)
# read part of return welcome banner twice,# read part of return welcome banner twice
ss.read(1)
ss.read(1)
s.close()
def test_rude_shutdown():
try:
import threading
except ImportError:
return
# Some random port to connect to.
PORT = 9934
listener_ready = threading.Event()
listener_gone = threading.Event()
# `listener` runs in a thread. It opens a socket listening on PORT, and
# sits in an accept() until the main thread connects. Then it rudely
# closes the socket, and sets Event `listener_gone` to let the main thread
# know the socket is gone.
def listener():
s = socket.socket()
s.bind(('', PORT))
s.listen(5)
listener_ready.set()
s.accept()
s = None # reclaim the socket object, which also closes it
listener_gone.set()
def connector():
listener_ready.wait()
s = socket.socket()
s.connect(('localhost', PORT))
listener_gone.wait()
try:
ssl_sock = socket.ssl(s)
except socket.sslerror:
pass
else:
raise test_support.TestFailed(
'connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
connector()
t.join()
def test_main():
if not hasattr(socket, "ssl"):
raise test_support.TestSkipped("socket module has no ssl support")
test_rude_shutdown()
test_basic()
test_timeout()
if __name__ == "__main__":
test_main()
| Python | 0 |
32ab2353d7a7f64300445688b0bd583fbe1a13fb | Improve keystone.conf [endpoint_policy] documentation | keystone/conf/endpoint_policy.py | keystone/conf/endpoint_policy.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from keystone.conf import utils
enabled = cfg.BoolOpt(
'enabled',
default=True,
deprecated_for_removal=True,
deprecated_reason=utils.fmt("""
The option to enable the OS-ENDPOINT-POLICY API extension has been deprecated
in the M release and will be removed in the O release. The OS-ENDPOINT-POLICY
API extension will be enabled by default.
"""),
help=utils.fmt("""
Enable endpoint-policy functionality, which allows policies to be associated
with either specific endpoints, or endpoints of a given service type.
"""))
driver = cfg.StrOpt(
'driver',
default='sql',
help=utils.fmt("""
Entry point for the endpoint policy driver in the `keystone.endpoint_policy`
namespace. Only a `sql` driver is provided by keystone, so there is no reason
to set this unless you are providing a custom entry point.
"""))
GROUP_NAME = __name__.split('.')[-1]
ALL_OPTS = [
enabled,
driver,
]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group=GROUP_NAME)
def list_opts():
return {GROUP_NAME: ALL_OPTS}
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from keystone.conf import utils
enabled = cfg.BoolOpt(
'enabled',
default=True,
deprecated_for_removal=True,
deprecated_reason=utils.fmt("""
The option to enable the OS-ENDPOINT-POLICY extension has been deprecated in
the M release and will be removed in the O release. The OS-ENDPOINT-POLICY
extension will be enabled by default.
"""),
help=utils.fmt("""
Enable endpoint_policy functionality.
"""))
driver = cfg.StrOpt(
'driver',
default='sql',
help=utils.fmt("""
Entrypoint for the endpoint policy backend driver in the
keystone.endpoint_policy namespace.
"""))
GROUP_NAME = __name__.split('.')[-1]
ALL_OPTS = [
enabled,
driver,
]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group=GROUP_NAME)
def list_opts():
return {GROUP_NAME: ALL_OPTS}
| Python | 0.000001 |
29384b927b620b7e943343409f62511451bb3059 | Fix problem with Hopfield energy function for Python 2.7 | neupy/algorithms/memory/utils.py | neupy/algorithms/memory/utils.py | from numpy import where, inner
from numpy.core.umath_tests import inner1d
__all__ = ('sign2bin', 'bin2sign', 'hopfield_energy')
def sign2bin(matrix):
return where(matrix == 1, 1, 0)
def bin2sign(matrix):
return where(matrix == 0, -1, 1)
def hopfield_energy(weight, input_data, output_data):
return -0.5 * inner1d(input_data.dot(weight), output_data)
| from numpy import where
__all__ = ('sign2bin', 'bin2sign', 'hopfield_energy')
def sign2bin(matrix):
return where(matrix == 1, 1, 0)
def bin2sign(matrix):
return where(matrix == 0, -1, 1)
def hopfield_energy(weight, input_data, output_data):
energy_output = -0.5 * input_data.dot(weight).dot(output_data.T)
return energy_output.item(0)
| Python | 0.001066 |
bf28376f252fd474d594e5039d0b2f2bb1afc26a | Add proper warnings on use of the backwards compatibility shim. | IPython/frontend.py | IPython/frontend.py | """
Shim to maintain backwards compatibility with old frontend imports.
We have moved all contents of the old `frontend` subpackage into top-level
subpackages (`html`, `qt` and `terminal`). This will let code that was making
`from IPython.frontend...` calls continue working, though a warning will be
printed.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
import sys
import types
#-----------------------------------------------------------------------------
# Class declarations
#-----------------------------------------------------------------------------
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
m = ("*** WARNING*** : The top-level `frontend` module has been deprecated.\n"
"Please import %s directly from the `IPython` level." % key)
# FIXME: I don't understand why, but if the print statement below is
# redirected to stderr, this shim module stops working. It seems the
# Python import machinery has problem with redirected prints happening
# during the import process. If we can't figure out a solution, we may
# need to leave it to print to default stdout.
print(m)
# FIXME: this seems to work fine, but we should replace it with an
# __import__ call instead of using exec/eval.
exec 'from IPython import %s' % key
return eval(key)
# Unconditionally insert the shim into sys.modules so that further import calls
# trigger the custom attribute access above
sys.modules['IPython.frontend'] = ShimModule('frontend')
| import sys
import types
class ShimModule(types.ModuleType):
def __getattribute__(self, key):
exec 'from IPython import %s' % key
return eval(key)
sys.modules['IPython.frontend'] = ShimModule('frontend')
| Python | 0 |
d8ceaa6cc9e475292633dae3927cf29f3783f9af | Update some comments | conda_tools/updater.py | conda_tools/updater.py | #!/usr/bin/env python
#
# updater.py
#
# Updates all packages in all installed conda environments.
# This script should be run under the root conda environment.
import os.path
import subprocess
import conda_api
from utils import get_root_prefix
def update_all(update_root=True, *blacklist_envs):
"""Updates all conda packages in all installed conda environments.
Required arguments:
update_root -- A Boolean flag that specifies whether the root conda
environment should be updated (default True).
Optional arguments:
*blacklist_envs -- Names of environments you don't want updated.
Example usage:
update_all(True, 'special_env1', 'special_env2')
This will update all conda environments (including root) but excluding
special_env1 and special_env2.
"""
# Before we do anything, set the ROOT_PREFIX
# variable so conda_api knows where to work from.
conda_api.set_root_prefix(get_root_prefix())
# Get all active environments, excluding the ones in the blacklist.
# The root environment will be the first element in this list,
# so exclude that also.
envs = [
os.path.basename(env) for env in conda_api.get_envs()
if os.path.basename(env) not in blacklist_envs
][1:]
print('ROOT_PREFIX is set to: {0}'.format(conda_api.ROOT_PREFIX))
if update_root:
root_update_result = conda_api.update(use_local=True, all=True, env='base')
print('Result from environment root:\n{0}'.format(root_update_result))
for env_name in envs:
# Update all packages in the environment.
env_update_result = conda_api.update(env=env_name, all=True)
print('Result from environment {0}:\n{1}'.format(env_name, env_update_result))
def pip_update(**pip_package_specs):
"""Updates pip packages in their respective conda environments.
Keyword arguments:
**pip_package_specs -- The key is the name of the environment, and the
value is an iterable of the pip package names
in that environment you want to update.
Example usage:
pip_package_specs = {'conda_env1':('autobahn','six','txaio',),
'conda_env2':('pika',)}
pip_update(**pip_package_specs)
This will update autobahn, six, and txaio in the conda environment
'conda_env1', and pika in the environment 'conda_env2'.
"""
if pip_package_specs:
conda_api.set_root_prefix(get_root_prefix())
for env, packages in pip_package_specs.items():
pip_args = ['install', '-U']
pip_args.extend(packages)
# Equivalent of running 'pip install -U package1 package2 ...',
# but runs it inside the appropriate conda environment.
p = conda_api.process(
name=env,
cmd='pip',
args=pip_args,
stdout=subprocess.PIPE
)
stdout, _ = p.communicate()
print('Pip update result from environment {0}:\n{1}'.format(env, stdout))
if __name__ == '__main__':
update_all()
| #!/usr/bin/env python
#
# updater.py
#
# Updates all packages in all installed conda environments.
# This script should be run under the root conda environment.
import os.path
import subprocess
import conda_api
from utils import get_root_prefix
def update_all(update_root=True, *blacklist_envs):
"""Updates all conda packages in all installed conda environments.
Required arguments:
update_root -- A Boolean flag that specifies whether the root conda
environment should be updated (default True).
Optional arguments:
*blacklist_envs -- Names of environments you don't want updated.
Example usage:
update_all(True, 'special_env1', 'special_env2')
This will update all conda environments (including root) but excluding
special_env1 and special_env2.
"""
# Before we do anything, set the ROOT_PREFIX
# variable so conda_api knows where to work from.
conda_api.set_root_prefix(get_root_prefix())
# Get all active environments, excluding the ones in the blacklist.
envs = [
os.path.basename(env) for env in conda_api.get_envs()
if os.path.basename(env) not in blacklist_envs
][1:]
print('ROOT_PREFIX is set to: {0}'.format(conda_api.ROOT_PREFIX))
if update_root:
root_update_result = conda_api.update(use_local=True, all=True, env='base')
print('Result from environment root:\n{0}'.format(root_update_result))
for env_name in envs:
# Update all packages in the environment.
env_update_result = conda_api.update(env=env_name, all=True)
print('Result from environment {0}:\n{1}'.format(env_name, env_update_result))
def pip_update(**pip_package_specs):
"""Updates pip packages in their respective conda environments.
Keyword arguments:
**pip_package_specs -- The key is the name of the environment, and the
value is an iterable of the pip package names
in that environment you want to update.
Example usage:
pip_package_specs = {'conda_env1':('autobahn','six','txaio',),
'conda_env2':('pika',)}
pip_update(**pip_package_specs)
This will update autobahn, six, and txaio in the conda environment
'conda_env1', and pika in the environment 'conda_env2'.
"""
if pip_package_specs:
conda_api.set_root_prefix(get_root_prefix())
for env, packages in pip_package_specs.items():
pip_args = ['install', '-U']
pip_args.extend(packages)
# Equivalent of running 'pip install -q -U package1 package2 ...',
# but runs it inside the appropriate conda environment.
p = conda_api.process(
name=env,
cmd='pip',
args=pip_args,
stdout=subprocess.PIPE
)
stdout, _ = p.communicate()
print('Pip update result from environment {0}:\n{1}'.format(env, stdout))
if __name__ == '__main__':
update_all()
| Python | 0 |
7f800ee03790d28a25f0f5989c8f6a15401af172 | remove print statement | django/crashreport/symbols/views.py | django/crashreport/symbols/views.py | # -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from django.shortcuts import render
from django import forms
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseServerError
from django.views.decorators.csrf import csrf_exempt
from .handler import SymbolsUploadHandler
import os
class UploadSymbolsForm(forms.Form):
symbols = forms.FileField()
comment = forms.CharField()
def handle_uploaded_file(f):
# TODO: moggi: get the symbols localtion from the configuration
file_path = os.path.join('/tmp/symbols_upload', f.name)
with open(file_path, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return file_path
# TODO: this needs to be limited to logged in users
@csrf_exempt
def upload_symbols(request):
if request.method != 'POST':
return HttpResponseNotAllowed('Only POST here')
form = UploadSymbolsForm(request.POST, request.FILES)
if not form.is_valid():
return HttpResponseNotAllowed('Invalid data')
path = handle_uploaded_file(request.FILES['symbols'])
upload = SymbolsUploadHandler()
upload.process(form.cleaned_data, path)
# TODO: moggi: maybe report the zipfile.BadZipfile exception
return HttpResponse("Success")
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
| # -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from django.shortcuts import render
from django import forms
from django.http import HttpResponse, HttpResponseNotAllowed, HttpResponseServerError
from django.views.decorators.csrf import csrf_exempt
from .handler import SymbolsUploadHandler
import os
class UploadSymbolsForm(forms.Form):
symbols = forms.FileField()
comment = forms.CharField()
def handle_uploaded_file(f):
# TODO: moggi: get the symbols localtion from the configuration
file_path = os.path.join('/tmp/symbols_upload', f.name)
print(file_path)
with open(file_path, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return file_path
# TODO: this needs to be limited to logged in users
@csrf_exempt
def upload_symbols(request):
if request.method != 'POST':
return HttpResponseNotAllowed('Only POST here')
form = UploadSymbolsForm(request.POST, request.FILES)
print(form.fields)
if not form.is_valid():
return HttpResponseNotAllowed('Invalid data')
path = handle_uploaded_file(request.FILES['symbols'])
upload = SymbolsUploadHandler()
upload.process(form.cleaned_data, path)
# TODO: moggi: maybe report the zipfile.BadZipfile exception
return HttpResponse("Success")
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
| Python | 0.999999 |
f782633c857932803283dd9c26621e69f6ccb44e | fix version | l10n_es_aeat_sii/__manifest__.py | l10n_es_aeat_sii/__manifest__.py | # -*- coding: utf-8 -*-
# Copyright 2017 Ignacio Ibeas <ignacio@acysos.com>
# (c) 2017 Diagram Software S.L.
# Copyright (c) 2017-TODAY MINORISA <ramon.guiu@minorisa.net>
# (c) 2017 Studio73 - Pablo Fuentes <pablo@studio73.es>
# (c) 2017 Studio73 - Jordi Tolsà <jordi@studio73.es>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Suministro Inmediato de Información en el IVA",
"version": "12.0.1.3.2",
"category": "Accounting & Finance",
"website": "https://www.acysos.com",
"author": "Acysos S.L.",
"license": "AGPL-3",
"application": False,
"installable": True,
"external_dependencies": {
"python": ["zeep",
"requests"],
},
"depends": [
"account",
"account_invoice_refund_link",
"l10n_es_aeat",
"queue_job",
"account_payment_partner",
],
"data": [
"data/ir_config_parameter.xml",
"data/aeat_sii_mapping_registration_keys_data.xml",
"data/aeat_sii_map_data.xml",
"data/aeat_sii_map_data_1_1.xml",
"data/aeat_sii_mapping_payment_keys_data.xml",
"data/account_fiscal_position_data.xml",
"views/res_company_view.xml",
"views/account_invoice_view.xml",
"views/aeat_sii_view.xml",
"views/aeat_sii_result_view.xml",
"views/aeat_check_sii_result_view.xml",
"wizard/aeat_sii_password_view.xml",
"views/aeat_sii_mapping_registration_keys_view.xml",
"views/aeat_sii_map_view.xml",
"security/ir.model.access.csv",
"security/aeat_sii.xml",
"views/product_view.xml",
"views/account_view.xml",
"views/account_payment_mode_view.xml",
],
'images': ['static/description/banner.jpg'],
"post_init_hook": "post_init_sii_hook",
}
| # -*- coding: utf-8 -*-
# Copyright 2017 Ignacio Ibeas <ignacio@acysos.com>
# (c) 2017 Diagram Software S.L.
# Copyright (c) 2017-TODAY MINORISA <ramon.guiu@minorisa.net>
# (c) 2017 Studio73 - Pablo Fuentes <pablo@studio73.es>
# (c) 2017 Studio73 - Jordi Tolsà <jordi@studio73.es>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Suministro Inmediato de Información en el IVA",
"version": "11.0.1.3.2",
"category": "Accounting & Finance",
"website": "https://www.acysos.com",
"author": "Acysos S.L.",
"license": "AGPL-3",
"application": False,
"installable": True,
"external_dependencies": {
"python": ["zeep",
"requests"],
},
"depends": [
"account",
"account_invoice_refund_link",
"l10n_es_aeat",
"queue_job",
"account_payment_partner",
],
"data": [
"data/ir_config_parameter.xml",
"data/aeat_sii_mapping_registration_keys_data.xml",
"data/aeat_sii_map_data.xml",
"data/aeat_sii_map_data_1_1.xml",
"data/aeat_sii_mapping_payment_keys_data.xml",
"data/account_fiscal_position_data.xml",
"views/res_company_view.xml",
"views/account_invoice_view.xml",
"views/aeat_sii_view.xml",
"views/aeat_sii_result_view.xml",
"views/aeat_check_sii_result_view.xml",
"wizard/aeat_sii_password_view.xml",
"views/aeat_sii_mapping_registration_keys_view.xml",
"views/aeat_sii_map_view.xml",
"security/ir.model.access.csv",
"security/aeat_sii.xml",
"views/product_view.xml",
"views/account_view.xml",
"views/account_payment_mode_view.xml",
],
'images': ['static/description/banner.jpg'],
"post_init_hook": "post_init_sii_hook",
}
| Python | 0.000001 |
7d7b12d176a0315057c54eff794fbe7117c7f6da | Include the Python version in the support tracking data. | reviewboard/admin/support.py | reviewboard/admin/support.py | from __future__ import unicode_literals
import base64
import sys
import time
from datetime import datetime
from hashlib import sha1
from django.conf import settings
from django.contrib.auth.models import User
from djblets.siteconfig.models import SiteConfiguration
from reviewboard import get_package_version
def get_install_key():
"""Returns the installation key for this server."""
return sha1(settings.SECRET_KEY).hexdigest()
def serialize_support_data(request=None, force_is_admin=False):
"""Serializes support data into a base64-encoded string."""
siteconfig = SiteConfiguration.objects.get_current()
is_admin = (force_is_admin or
(request is not None and request.user.is_staff))
return base64.b64encode('\t'.join([
get_install_key(),
'%d' % is_admin,
siteconfig.site.domain,
siteconfig.get('site_admin_name'),
siteconfig.get('site_admin_email'),
get_package_version(),
'%d' % User.objects.filter(is_active=True).count(),
'%d' % int(time.mktime(datetime.now().timetuple())),
siteconfig.get('company'),
'%s.%s.%s' % sys.version_info[:3],
]))
def get_default_support_url(request=None, force_is_admin=False):
"""Returns the URL for the default Review Board support page."""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('send_support_usage_stats'):
support_data = serialize_support_data(request, force_is_admin)
else:
support_data = ''
return settings.DEFAULT_SUPPORT_URL % {
'support_data': support_data,
}
def get_register_support_url(request=None, force_is_admin=False):
"""Returns the URL for registering the Review Board support page."""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('send_support_usage_stats'):
support_data = serialize_support_data(request, force_is_admin)
else:
support_data = ''
return settings.REGISTER_SUPPORT_URL % {
'support_data': support_data,
}
def get_support_url(request):
"""Returns the URL for the configured support page."""
siteconfig = SiteConfiguration.objects.get_current()
return (siteconfig.get('support_url') or
get_default_support_url(request))
| from __future__ import unicode_literals
import base64
import time
from datetime import datetime
from hashlib import sha1
from django.conf import settings
from django.contrib.auth.models import User
from djblets.siteconfig.models import SiteConfiguration
from reviewboard import get_package_version
def get_install_key():
"""Returns the installation key for this server."""
return sha1(settings.SECRET_KEY).hexdigest()
def serialize_support_data(request=None, force_is_admin=False):
"""Serializes support data into a base64-encoded string."""
siteconfig = SiteConfiguration.objects.get_current()
is_admin = (force_is_admin or
(request is not None and request.user.is_staff))
return base64.b64encode('\t'.join([
get_install_key(),
'%d' % is_admin,
siteconfig.site.domain,
siteconfig.get('site_admin_name'),
siteconfig.get('site_admin_email'),
get_package_version(),
'%d' % User.objects.filter(is_active=True).count(),
'%d' % int(time.mktime(datetime.now().timetuple())),
siteconfig.get('company'),
]))
def get_default_support_url(request=None, force_is_admin=False):
"""Returns the URL for the default Review Board support page."""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('send_support_usage_stats'):
support_data = serialize_support_data(request, force_is_admin)
else:
support_data = ''
return settings.DEFAULT_SUPPORT_URL % {
'support_data': support_data,
}
def get_register_support_url(request=None, force_is_admin=False):
"""Returns the URL for registering the Review Board support page."""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('send_support_usage_stats'):
support_data = serialize_support_data(request, force_is_admin)
else:
support_data = ''
return settings.REGISTER_SUPPORT_URL % {
'support_data': support_data,
}
def get_support_url(request):
"""Returns the URL for the configured support page."""
siteconfig = SiteConfiguration.objects.get_current()
return (siteconfig.get('support_url') or
get_default_support_url(request))
| Python | 0 |
64d5847f058dec81a288482665a1d8208f0f4e17 | Fix security issue in singularity + misc cleanups (#8657) | var/spack/repos/builtin/packages/singularity/package.py | var/spack/repos/builtin/packages/singularity/package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Singularity(AutotoolsPackage):
"""Singularity is a container platform focused on supporting 'Mobility of
Compute'"""
homepage = "https://www.sylabs.io/singularity/"
url = "https://github.com/singularityware/singularity/releases/download/2.5.2/singularity-2.5.2.tar.gz"
# Versions before 2.5.2 suffer from a serious security problem.
# https://nvd.nist.gov/vuln/detail/CVE-2018-12021
version('2.5.2', '2edc1a8ac9a4d7d26fba6244f1c5fd95')
version('develop', git='https://github.com/singularityware/singularity.git', branch='master')
depends_on('libarchive', when='@2.5.2:')
# these are only needed if we're grabbing the unreleased tree
depends_on('m4', type='build', when='@develop')
depends_on('autoconf', type='build', when='@develop')
depends_on('automake', type='build', when='@develop')
depends_on('libtool', type='build', when='@develop')
# When installing as root, the copy has to run before chmod runs
def install(self, spec, prefix):
make('install', parallel=False)
| ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Singularity(AutotoolsPackage):
"""Singularity is a container platform focused on supporting 'Mobility of
Compute'"""
homepage = "http://singularity.lbl.gov/"
url = "https://github.com/singularityware/singularity/archive/2.4.tar.gz"
version('2.4.5', '9afa903ee019448104b4f40be77a46e7')
version('2.4', 'd357ce68ef2f8149edd84155731531465dbe74148c37719f87f168fc39384377')
version('2.3.1', '292ff7fe3db09c854b8accf42f763f62')
version('develop', git='https://github.com/singularityware/singularity.git', branch='master')
depends_on('m4', type='build')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
| Python | 0 |
346e6c45b3c2a49168eac9fd1b703bd5c05503b1 | fix sql дата начала-создания направления из подверждения исследования | rmis_integration/sql_func.py | rmis_integration/sql_func.py | from django.db import connection
from laboratory.settings import TIME_ZONE
def get_confirm_direction(d_s, d_e, limit):
with connection.cursor() as cursor:
cursor.execute(
"""WITH
t_all_direction AS (
SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya
WHERE time_confirmation AT TIME ZONE %(tz)s <= %(d_end)s),
t_not_confirm_direction AS (
SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya
WHERE napravleniye_id IN (SELECT napravleniye_id FROM t_all_direction) AND time_confirmation IS NULL),
t_only_confirm_direction AS (
SELECT napravleniye_id FROM t_all_direction
WHERE napravleniye_id NOT IN (SELECT napravleniye_id FROM t_not_confirm_direction)),
t_istochnik_f_rmis_auto_send AS (
SELECT id FROM directions_istochnikifinansirovaniya
WHERE rmis_auto_send = false)
SELECT id FROM directions_napravleniya
WHERE id IN (SELECT napravleniye_id FROM t_only_confirm_direction)
AND
data_sozdaniya AT TIME ZONE %(tz)s >= %(d_start)s
AND
rmis_number != ANY(ARRAY['NONERMIS', '', NULL])
AND
result_rmis_send = false
AND
NOT (imported_from_rmis = True and imported_directions_rmis_send = False)
AND
NOT (istochnik_f_id IN (SELECT id FROM t_istochnik_f_rmis_auto_send) and force_rmis_send = False)
ORDER BY data_sozdaniya
LIMIT %(limit)s
""",
params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'limit': limit},
)
row = cursor.fetchall()
return row
| from django.db import connection
from laboratory.settings import TIME_ZONE
def get_confirm_direction(d_s, d_e, limit):
with connection.cursor() as cursor:
cursor.execute(
"""WITH
t_all_direction AS (
SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya
WHERE time_confirmation AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s),
t_not_confirm_direction AS (
SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya
WHERE napravleniye_id IN (SELECT napravleniye_id FROM t_all_direction) AND time_confirmation IS NULL),
t_only_confirm_direction AS (
SELECT napravleniye_id FROM t_all_direction
WHERE napravleniye_id NOT IN (SELECT napravleniye_id FROM t_not_confirm_direction)),
t_istochnik_f_rmis_auto_send AS (
SELECT id FROM directions_istochnikifinansirovaniya
WHERE rmis_auto_send = false)
SELECT id FROM directions_napravleniya
WHERE id IN (SELECT napravleniye_id FROM t_only_confirm_direction)
AND
rmis_number != ANY(ARRAY['NONERMIS', '', NULL])
AND
result_rmis_send = false
AND
NOT (imported_from_rmis = True and imported_directions_rmis_send = False)
AND
NOT (istochnik_f_id IN (SELECT id FROM t_istochnik_f_rmis_auto_send) and force_rmis_send = False)
ORDER BY data_sozdaniya
LIMIT %(limit)s
""",
params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'limit': limit},
)
row = cursor.fetchall()
return row
| Python | 0.000076 |
77ffc94a0439dd7309df0630cddba7daf60bc5ee | Add imported requests to Audit context. | src/ggrc/converters/requests.py | src/ggrc/converters/requests.py | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: silas@reciprocitylabs.com
# Maintained By: silas@reciprocitylabs.com
from .base import *
from ggrc.models import Audit, Request
from .base_row import *
from collections import OrderedDict
class RequestRowConverter(BaseRowConverter):
model_class = Request
def find_by_slug(self, slug):
return self.model_class.query.filter_by(slug=slug).first()
def setup_object(self):
self.obj = self.setup_object_by_slug(self.attrs)
if self.obj.id is not None:
self.add_warning('slug', "Request already exists and will be updated")
def reify(self):
self.handle('objective_id', ObjectiveHandler)
self.handle('request_type', RequestTypeColumnHandler, is_required=True)
self.handle('status', StatusColumnHandler, valid_states=Request.VALID_STATES, default_value='Draft')
self.handle_date('requested_on', is_required=True)
self.handle_date('due_on', is_required=True)
self.handle_text_or_html('description')
self.handle_text_or_html('test')
self.handle_text_or_html('notes')
self.handle_raw_attr('auditor_contact') # default to audit lead
self.handle(
'assignee', AssigneeHandler, is_required=True,
person_must_exist=True)
def save_object(self, db_session, **options):
audit = options.get('audit')
if audit:
self.obj.audit = audit
self.obj.context = audit.context
db_session.add(self.obj)
class RequestsConverter(BaseConverter):
metadata_map = OrderedDict([
('Type', 'type'),
('Program Code', 'slug')
])
object_map = OrderedDict([
('Request Type', 'request_type'),
('Request Description', 'description'),
('Objective Code', 'objective_id'),
('Notes', 'notes'),
('Test', 'test'),
('Assignee', 'assignee'),
('Audit Contact', 'auditor_contact'),
('Requested On', 'requested_on'),
('Due On', 'due_on'),
('Status', 'status'),
])
row_converter = RequestRowConverter
# Overwrite validate functions since they assume a program rather than a directive
def validate_code(self, attrs):
if not attrs.get('slug'):
self.errors.append('Missing Program Code heading')
elif attrs['slug'] != self.program().slug:
self.errors.append('Program Code must be {}'.format(self.program().slug))
def validate_metadata(self, attrs):
self.validate_metadata_type(attrs, "Requests")
self.validate_code(attrs)
def program(self):
return self.options['program']
def do_export_metadata(self):
yield self.metadata_map.keys()
yield ['Requests', self.program().slug]
yield[]
yield[]
yield self.object_map.keys()
| # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: silas@reciprocitylabs.com
# Maintained By: silas@reciprocitylabs.com
from .base import *
from ggrc.models import Request
from .base_row import *
from collections import OrderedDict
class RequestRowConverter(BaseRowConverter):
model_class = Request
def find_by_slug(self, slug):
return self.model_class.query.filter_by(slug=slug).first()
def setup_object(self):
self.obj = self.setup_object_by_slug(self.attrs)
if self.obj.id is not None:
self.add_warning('slug', "Request already exists and will be updated")
def reify(self):
self.handle('objective_id', ObjectiveHandler)
self.handle('request_type', RequestTypeColumnHandler, is_required=True)
self.handle('status', StatusColumnHandler, valid_states=Request.VALID_STATES, default_value='Draft')
self.handle_date('requested_on', is_required=True)
self.handle_date('due_on', is_required=True)
self.handle_text_or_html('description')
self.handle_text_or_html('test')
self.handle_text_or_html('notes')
self.handle_raw_attr('auditor_contact') # default to audit lead
self.handle(
'assignee', AssigneeHandler, is_required=True,
person_must_exist=True)
def save_object(self, db_session, **options):
if options.get('audit'):
self.obj.audit_id = options.get('audit').id
db_session.add(self.obj)
class RequestsConverter(BaseConverter):
metadata_map = OrderedDict([
('Type', 'type'),
('Program Code', 'slug')
])
object_map = OrderedDict([
('Request Type', 'request_type'),
('Request Description', 'description'),
('Objective Code', 'objective_id'),
('Notes', 'notes'),
('Test', 'test'),
('Assignee', 'assignee'),
('Audit Contact', 'auditor_contact'),
('Requested On', 'requested_on'),
('Due On', 'due_on'),
('Status', 'status'),
])
row_converter = RequestRowConverter
# Overwrite validate functions since they assume a program rather than a directive
def validate_code(self, attrs):
if not attrs.get('slug'):
self.errors.append('Missing Program Code heading')
elif attrs['slug'] != self.program().slug:
self.errors.append('Program Code must be {}'.format(self.program().slug))
def validate_metadata(self, attrs):
self.validate_metadata_type(attrs, "Requests")
self.validate_code(attrs)
def program(self):
return self.options['program']
def do_export_metadata(self):
yield self.metadata_map.keys()
yield ['Requests', self.program().slug]
yield[]
yield[]
yield self.object_map.keys()
| Python | 0 |
d4c98e176833a4b37b7edf1d68741bcfa8c50213 | Return the name_sha of each flaky test in build_flaky_tests | changes/api/build_flaky_tests.py | changes/api/build_flaky_tests.py | from __future__ import absolute_import
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models.build import Build
from changes.models.job import Job
from changes.models.phabricatordiff import PhabricatorDiff
from changes.models.source import Source
from changes.models.test import TestCase
# This constant must match MAX_TESTS_TO_ADD in citools' quarantine keeper
MAX_TESTS_TO_ADD = 2
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
if jobs:
flaky_tests_query = db.session.query(
TestCase.id,
TestCase.name,
TestCase.name_sha,
TestCase.job_id
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc()).all()
else:
flaky_tests_query = []
flaky_tests = []
for test in flaky_tests_query:
item = {
'id': test.id,
'name': test.name,
'name_sha': test.name_sha,
'job_id': test.job_id,
}
# Quarantine Keeper only needs the author if there are at most
# MAX_TESTS_TO_ADD to add. If there are less, it will only send
# an alert and we don't want to waste time querying the DB
if len(flaky_tests_query) <= MAX_TESTS_TO_ADD:
first_build = self._get_first_build(build.project_id, test.name_sha)
last_test = self._get_last_testcase(build.project_id, test.name_sha)
possible_authors = [
last_test.owner,
first_build.author.email,
]
for author in possible_authors:
if author:
item['author'] = {'email': author}
break
phab_diff = PhabricatorDiff.query.filter(
Source.id == first_build.source.id,
).first()
if phab_diff:
item['diff_id'] = phab_diff.revision_id
flaky_tests.append(item)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
@staticmethod
def _get_first_build(project_id, test_name_sha):
"""Get the first build (by date created) containing a test case.
Args:
:param project_id: string
:param test_name_sha: string
Returns:
Build
"""
first_test = TestCase.query.filter(
TestCase.project_id == project_id,
TestCase.name_sha == test_name_sha,
).order_by(TestCase.date_created.asc()).limit(1).first()
if first_test is None:
return None
first_build = Build.query.options(
joinedload('author'),
joinedload('source'),
).filter(
Build.id == first_test.job.build_id,
).first()
return first_build
@staticmethod
def _get_last_testcase(project_id, test_name_sha):
"""Get the most recent TestCase instance for the specified name.
Args:
:param project_id: string
:param test_name_sha: string
Returns:
TestCase
"""
most_recent_test = TestCase.query.filter(
TestCase.project_id == project_id,
TestCase.name_sha == test_name_sha,
).order_by(TestCase.date_created.desc()).limit(1).first()
return most_recent_test
| from __future__ import absolute_import
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.config import db
from changes.constants import Result
from changes.models.build import Build
from changes.models.job import Job
from changes.models.phabricatordiff import PhabricatorDiff
from changes.models.source import Source
from changes.models.test import TestCase
# This constant must match MAX_TESTS_TO_ADD in citools' quarantine keeper
MAX_TESTS_TO_ADD = 2
class BuildFlakyTestsAPIView(APIView):
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return '', 404
jobs = list(Job.query.filter(
Job.build_id == build.id,
))
if jobs:
flaky_tests_query = db.session.query(
TestCase.id,
TestCase.name,
TestCase.name_sha,
TestCase.job_id
).filter(
TestCase.job_id.in_([j.id for j in jobs]),
TestCase.result == Result.passed,
TestCase.reruns > 1
).order_by(TestCase.name.asc()).all()
else:
flaky_tests_query = []
flaky_tests = []
for test in flaky_tests_query:
item = {
'id': test.id,
'name': test.name,
'job_id': test.job_id,
}
# Quarantine Keeper only needs the author if there are at most
# MAX_TESTS_TO_ADD to add. If there are less, it will only send
# an alert and we don't want to waste time querying the DB
if len(flaky_tests_query) <= MAX_TESTS_TO_ADD:
first_build = self._get_first_build(build.project_id, test.name_sha)
last_test = self._get_last_testcase(build.project_id, test.name_sha)
possible_authors = [
last_test.owner,
first_build.author.email,
]
for author in possible_authors:
if author:
item['author'] = {'email': author}
break
phab_diff = PhabricatorDiff.query.filter(
Source.id == first_build.source.id,
).first()
if phab_diff:
item['diff_id'] = phab_diff.revision_id
flaky_tests.append(item)
context = {
'projectSlug': build.project.slug,
'repositoryUrl': build.project.repository.url,
'flakyTests': {
'count': len(flaky_tests),
'items': flaky_tests
}
}
return self.respond(context)
@staticmethod
def _get_first_build(project_id, test_name_sha):
"""Get the first build (by date created) containing a test case.
Args:
:param project_id: string
:param test_name_sha: string
Returns:
Build
"""
first_test = TestCase.query.filter(
TestCase.project_id == project_id,
TestCase.name_sha == test_name_sha,
).order_by(TestCase.date_created.asc()).limit(1).first()
if first_test is None:
return None
first_build = Build.query.options(
joinedload('author'),
joinedload('source'),
).filter(
Build.id == first_test.job.build_id,
).first()
return first_build
@staticmethod
def _get_last_testcase(project_id, test_name_sha):
"""Get the most recent TestCase instance for the specified name.
Args:
:param project_id: string
:param test_name_sha: string
Returns:
TestCase
"""
most_recent_test = TestCase.query.filter(
TestCase.project_id == project_id,
TestCase.name_sha == test_name_sha,
).order_by(TestCase.date_created.desc()).limit(1).first()
return most_recent_test
| Python | 0.05876 |
3b950782e0f4140715084ebace2820db4f61c600 | Fix reindent-rst.py: it works on binary files | Tools/scripts/reindent-rst.py | Tools/scripts/reindent-rst.py | #!/usr/bin/env python
# Make a reST file compliant to our pre-commit hook.
# Currently just remove trailing whitespace.
import sys, re, shutil
ws_re = re.compile(br'\s+(\r?\n)$')
def main(argv=sys.argv):
rv = 0
for filename in argv[1:]:
try:
with open(filename, 'rb') as f:
lines = f.readlines()
new_lines = [ws_re.sub(br'\1', line) for line in lines]
if new_lines != lines:
print('Fixing %s...' % filename)
shutil.copyfile(filename, filename + '.bak')
with open(filename, 'wb') as f:
f.writelines(new_lines)
except Exception as err:
print('Cannot fix %s: %s' % (filename, err))
rv = 1
return rv
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Make a reST file compliant to our pre-commit hook.
# Currently just remove trailing whitespace.
import sys, re, shutil
ws_re = re.compile(r'\s+(\r?\n)$')
def main(argv=sys.argv):
rv = 0
for filename in argv[1:]:
try:
with open(filename, 'rb') as f:
lines = f.readlines()
new_lines = [ws_re.sub(r'\1', line) for line in lines]
if new_lines != lines:
print('Fixing %s...' % filename)
shutil.copyfile(filename, filename + '.bak')
with open(filename, 'wb') as f:
f.writelines(new_lines)
except Exception as err:
print('Cannot fix %s: %s' % (filename, err))
rv = 1
return rv
if __name__ == '__main__':
sys.exit(main())
| Python | 0.000008 |
4eb100414a139d15d55bc752965d81e96bf5404d | Refactor from review | src/globus_cli/login_manager.py | src/globus_cli/login_manager.py | import functools
import click
from .tokenstore import token_storage_adapter
class LoginManager:
def __init__(self):
self._token_storage = token_storage_adapter()
def has_login(self, resource_server: str):
"""
Determines if the user has a refresh token for the given
resource server
"""
tokens = self._token_storage.get_token_data(resource_server)
return tokens is not None and "refresh_token" in tokens
def requires_login(*args: str, pass_manager: bool = False):
"""
Command decorator for specifying a resource server that the user must have
tokens for in order to run the command.
Simple usage for commands that have static resource needs: simply list all
needed resource servers as args:
@requries_login("auth.globus.org")
@requires_login("auth.globus.org", "transfer.api.globus.org")
Usage for commands which have dynamic resource servers depending
on the arguments passed to the command (e.g. commands for the GCS API)
@requies_login(pass_manager=True)
def command(login_manager, endpoint_id)
login_manager.<do the thing>(endpoint_id)
"""
resource_servers = args
def inner(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
manager = LoginManager()
# determine the set of resource servers missing logins
missing_servers = set()
for server_name in resource_servers:
if not manager.has_login(server_name):
missing_servers.add(server_name)
# if we are missing logins, assemble error text
# text is slightly different for 1, 2, or 3+ missing servers
if missing_servers:
if len(missing_servers) == 1:
plural_string = ""
server_string = missing_servers.pop()
elif len(missing_servers) == 2:
plural_string = "s"
server_string = "{} and {}".format(
missing_servers.pop(), missing_servers.pop()
)
else:
plural_string = "s"
single_server = missing_servers.pop()
server_string = ", ".join(missing_servers) + ", and {}".format(
single_server
)
raise click.ClickException(
"Missing login{} for {}, please run 'globus login'".format(
plural_string, server_string
)
)
# if pass_manager is True, pass it as an additional positional arg
if pass_manager:
return func(*args, manager, **kwargs)
else:
return func(*args, **kwargs)
return wrapper
return inner
| import functools
import click
from .tokenstore import token_storage_adapter
class LoginManager:
def __init__(self):
self._token_storage = token_storage_adapter()
def has_login(self, resource_server: str):
"""
Determines if the user has a refresh token for the given
resource server
"""
tokens = self._token_storage.get_token_data(resource_server)
if tokens is None or "refresh_token" not in tokens:
return False
return True
def requires_login(*args: str, pass_manager: bool = False):
"""
Command decorator for specifying a resource server that the user must have
tokens for in order to run the command.
Simple usage for commands that have static resource needs: simply list all
needed resource servers as args:
@requries_login("auth.globus.org")
@requires_login("auth.globus.org", "transfer.api.globus.org")
Usage for commands which have dynamic resource servers depending
on the arguments passed to the command (e.g. commands for the GCS API)
@requies_login(pass_manager=True)
def command(login_manager, endpoint_id)
login_manager.<do the thing>(endpoint_id)
"""
resource_servers = args
def inner(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
manager = LoginManager()
# determine the set of resource servers missing logins
missing_servers = set()
for server_name in resource_servers:
if not manager.has_login(server_name):
missing_servers.add(server_name)
# if we are missing logins, assemble error text
# text is slightly different for 1, 2, or 3+ missing servers
if missing_servers:
if len(missing_servers) == 1:
plural_string = ""
server_string = missing_servers.pop()
elif len(missing_servers) == 2:
plural_string = "s"
server_string = "{} and {}".format(
missing_servers.pop(), missing_servers.pop()
)
else:
plural_string = "s"
single_server = missing_servers.pop()
server_string = ", ".join(missing_servers) + ", and {}".format(
single_server
)
raise click.ClickException(
"Missing login{} for {}, please run 'globus login'".format(
plural_string, server_string
)
)
# if pass_manager is True, pass it as an additional positional arg
if pass_manager:
return func(*args, manager, **kwargs)
else:
return func(*args, **kwargs)
return wrapper
return inner
| Python | 0 |
5221630769f53853fea490e04301c2a19711894c | Fix #296. Generate name according to profile's sex. Thanks @Dutcho for the report | faker/providers/profile/__init__.py | faker/providers/profile/__init__.py | # coding=utf-8
from .. import BaseProvider
import itertools
class Provider(BaseProvider):
"""
This provider is a collection of functions to generate personal profiles and identities.
"""
def simple_profile(self):
"""
Generates a basic profile with personal informations
"""
sex = self.random_element(["F", "M"])
if sex == 'F':
name = self.generator.name_female()
elif sex == 'M':
name = self.generator.name_male()
return {
"username": self.generator.user_name(),
"name": name,
"sex": sex,
"address": self.generator.address(),
"mail": self.generator.free_email(),
#"password":self.generator.password()
"birthdate": self.generator.date(),
}
def profile(self, fields=None):
"""
Generates a complete profile.
If "fields" is not empty, only the fields in the list will be returned
"""
if fields is None:
fields = []
d = {
"job": self.generator.job(),
"company": self.generator.company(),
"ssn": self.generator.ssn(),
"residence": self.generator.address(),
"current_location": (self.generator.latitude(), self.generator.longitude()),
"blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))),
"website": [self.generator.url() for i in range(1, self.random_int(2, 5))]
}
d = dict(d, **self.generator.simple_profile())
#field selection
if len(fields) > 0:
d = dict((k, v) for (k, v) in d.items() if k in fields)
return d
| # coding=utf-8
from .. import BaseProvider
import itertools
class Provider(BaseProvider):
"""
This provider is a collection of functions to generate personal profiles and identities.
"""
def simple_profile(self):
"""
Generates a basic profile with personal informations
"""
return {
"username": self.generator.user_name(),
"name": self.generator.name(),
"sex": self.random_element(["M", "F"]),
"address": self.generator.address(),
"mail": self.generator.free_email(),
#"password":self.generator.password()
"birthdate": self.generator.date(),
}
def profile(self, fields=None):
"""
Generates a complete profile.
If "fields" is not empty, only the fields in the list will be returned
"""
if fields is None:
fields = []
d = {
"job": self.generator.job(),
"company": self.generator.company(),
"ssn": self.generator.ssn(),
"residence": self.generator.address(),
"current_location": (self.generator.latitude(), self.generator.longitude()),
"blood_group": "".join(self.random_element(list(itertools.product(["A", "B", "AB", "0"], ["+", "-"])))),
"website": [self.generator.url() for i in range(1, self.random_int(2, 5))]
}
d = dict(d, **self.generator.simple_profile())
#field selection
if len(fields) > 0:
d = dict((k, v) for (k, v) in d.items() if k in fields)
return d
| Python | 0.000669 |
981a74b116081f3ce1d97262c3c88104a953cdf4 | Use numpy's float supporting range | saau/sections/misc/header.py | saau/sections/misc/header.py | import matplotlib.pyplot as plt
from operator import itemgetter
from lxml.etree import fromstring, XMLSyntaxError
import numpy as np
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, np.arange(sy, 0, -1))
parsed = list(parse_lines(lines))
lines = map(itemgetter(0), parsed)
line_attrs = map(itemgetter(1), parsed)
lines = [
ax.figure.text(sx, y, text, ha='center')
for y, text in zip(y_points, lines)
]
for idx, attrs in enumerate(line_attrs):
if 'b' in attrs:
lines[idx].set_weight('extra bold')
if 'i' in attrs:
lines[idx].set_style('italic')
return ax
class Header:
__init__ = lambda self, _, a: None
has_required_data = lambda _: True
def build_image(self):
ax = plt.axes()
render_header_to(ax)
plt.show()
return ax
| import matplotlib.pyplot as plt
from operator import gt, lt, itemgetter
from lxml.etree import fromstring, XMLSyntaxError
def frange(start, stop, step):
cur = start
op = gt if start > stop else lt
while op(cur, stop):
yield cur
cur += step
def parse_lines(lines):
for line in lines:
try:
xml_line = fromstring(line.encode('utf-8'))
except XMLSyntaxError:
attrs = []
else:
attrs = [thing.tag for thing in xml_line.getiterator()]
line = list(xml_line.getiterator())[-1].text
yield line, attrs
def render_header_to(ax, sy, lines, sx=0.5):
calc = lambda q: q / 20
y_points = map(calc, frange(sy, 0, -1))
parsed = list(parse_lines(lines))
lines = map(itemgetter(0), parsed)
line_attrs = map(itemgetter(1), parsed)
lines = [
ax.figure.text(sx, y, text, ha='center')
for y, text in zip(y_points, lines)
]
for idx, attrs in enumerate(line_attrs):
if 'b' in attrs:
lines[idx].set_weight('extra bold')
if 'i' in attrs:
lines[idx].set_style('italic')
return ax
class Header:
__init__ = lambda self, _, a: None
has_required_data = lambda _: True
def build_image(self):
ax = plt.axes()
render_header_to(ax)
plt.show()
return ax
| Python | 0.000001 |
4e2fa3a249b9027f9e50f0f957163cad3bdc28bf | Fix typo | src/gramcore/features/points.py | src/gramcore/features/points.py | """Functions for extracting interest points.
These are applied to numpy.arrays representing images.
"""
import numpy
from skimage import feature
def harris(parameters):
"""Harris interest point operator.
It wraps `skimage.feature.harris`. The `threshold`, `eps` and
`gaussian_deviation` options are not supported.
This function returns an array of 0s and 1s. Harris points are marked
with 1s. This way the result can be easily transformed to an image. It
works on RGB and greyscale images.
The wrapped function returns a set of point coordinates in a list. For
some reason it is not possible to do something like:
>>> points = feature.harris(data, min_distance=5)
>>> data[points] = 1
so a for loop is used.
.. note::
The coordinates returned are not directly on the corner, but a pixel
inside the object (TODO: is this expected?).
:param parameters['data'][0]: input array
:type parameters['data'][0]: numpy.array
:param parameters['min_distance']: minimum number of pixels separating
interest points and image boundary,
defaults to 10
:type parameters['min_distance']: float
:return: numpy.array, it contains 1s where points were found, otherwise 0
"""
data = parameters['data'][0]
min_distance = parameters.get('min_distance', 10)
points = feature.harris(data, min_distance=min_distance)
result = numpy.zeros((data.shape[0], data.shape[1]), dtype='uint8')
for point in points:
result[point[0], point[1]] = 1
return result
| """Functions for extracting interest points.
These are applied to numpy.arrays representing images.
"""
import numpy
from skimage import feature
def harris(parameters):
"""Harris interest point operator.
It wraps `skimage.feature.harris`. The `threshold`, `eps` and
`gaussian_deviation` options are not supported.
This function returns an array of 0s and 1s. Harris points are marked
with 1s. This way the result can be easily transformed to an image. It
works on RGB and greyscale images.
The wrapped function returns a set of point coordinates in a list. For
some reason it is not possible to do something like:
>>> points = feature.harris(data, min_distance=5)
>>> data[points] = 1
Instead a for loop is used.
:param parameters['data'][0]: input array
:type parameters['data'][0]: numpy.array
:param parameters['min_distance']: minimum number of pixels separating
interest points and image boundary,
defaults to 10
:type parameters['min_distance']: float
:return: numpy.array, it contains 1s where points were found, otherwise 0
"""
data = parameters['data'][0]
min_distance = parameters.get('min_distance', 10)
points = feature.harris(data, min_distance=pmin_distance)
result = numpy.zeros((data.shape[0], data.shape[1]), dtype='uint8')
for point in points:
result[point[0], point[1]] = 1
return result
| Python | 0.999999 |
f4bbb244716f9471b520f53ebffaf34a31503cd1 | Remove unused imports (besides they are Py 2.x only) | Web/scripts/CPWeb/__init__.py | Web/scripts/CPWeb/__init__.py | """
CPWeb - A collection of commonly used routines to produce CoolProp's online documentation
=====
"""
from __future__ import division, absolute_import, print_function
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
| """
CPWeb - A collection of commonly used routines to produce CoolProp's online documentation
=====
"""
from __future__ import division, absolute_import, print_function
import codecs
import csv
import cStringIO
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
| Python | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.