commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
5c35a57d703b2627f9f8903841e8115c2e1606e4 | add io | methane/minefield,methane/minefield,methane/minefield,methane/minefield | meinheld/middleware.py | meinheld/middleware.py | from meinheld import server
import greenlet
CLIENT_KEY = 'meinheld.client'
CONTINUATION_KEY = 'meinheld.continuation'
IO_KEY = 'wsgix.io'
class Continuation(object):
def __init__(self, client):
self.client = client
def suspend(self):
return server._suspend_client(self.client)
def resume(self, *args, **kwargs):
return server._resume_client(self.client, args, kwargs)
class SpawnMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
client = environ[CLIENT_KEY]
g = client.get_greenlet()
if not g:
# new greenlet
g = greenlet.greenlet(self.app)
client.set_greenlet(g)
c = Continuation(client)
environ[CONTINUATION_KEY] = c
s = server._get_socket_fromfd(client.get_fd())
environ[IO_KEY] = s
return g.switch(environ, start_response)
| from meinheld import server
import greenlet
CLIENT_KEY = 'meinheld.client'
CONTINUATION_KEY = 'meinheld.continuation'
class Continuation(object):
def __init__(self, client):
self.client = client
def suspend(self):
return server._suspend_client(self.client)
def resume(self, *args, **kwargs):
return server._resume_client(self.client, args, kwargs)
class SpawnMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
g = greenlet.greenlet(self.app)
client = environ[CLIENT_KEY]
client.set_greenlet(g)
c = Continuation(client)
environ[CONTINUATION_KEY] = c
return g.switch(environ, start_response)
| bsd-3-clause | Python |
996f110415cfb1f84ec6171f0b1a362b537685d8 | Fix "invalid escape sequence \s" deprecation warning | andersinno/django-form-designer-ai,andersinno/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,kcsry/django-form-designer | form_designer/email.py | form_designer/email.py | import re
import django
from django.core.mail import EmailMessage
from django.utils.encoding import force_text
from form_designer.utils import string_template_replace
DJANGO_18 = django.VERSION[:2] >= (1, 8)
def _template_replace_list(input_str, context_dict):
"""
Split the input string by commas or semicolons, then template-replace.
Falsy input values yield empty lists.
:param input_str: Comma-or-semicolon-separated list of values
:type input_str: str|None
:param context_dict: The context for template replacement
:return: List of strings
:rtype: list[str]
"""
if not input_str:
return []
return [
string_template_replace(email, context_dict)
for email
in re.compile(r'\s*[,;]+\s*').split(force_text(input_str))
]
def build_form_mail(form_definition, form, files=None):
"""
Build a form-submission email based on the given form definition and associated submitted form
:param form_definition: Form definition object
:param form: The freshly submitted form
:param files: Associated files
:return: Django email message
"""
if not files:
files = []
form_data = form_definition.get_form_data(form)
message = form_definition.compile_message(form_data)
context_dict = form_definition.get_form_data_context(form_data)
mail_to = _template_replace_list(form_definition.mail_to, context_dict)
if form_definition.mail_from:
from_email = string_template_replace(form_definition.mail_from, context_dict)
else:
from_email = None
reply_to = _template_replace_list(form_definition.mail_reply_to, context_dict)
mail_subject = string_template_replace(
(form_definition.mail_subject or form_definition.title),
context_dict
)
kwargs = {
'subject': mail_subject,
'body': message,
'from_email': from_email,
'to': mail_to,
}
if DJANGO_18: # the reply_to kwarg is only supported in Django 1.8+ . . .
kwargs['reply_to'] = reply_to
message = EmailMessage(**kwargs)
if not DJANGO_18: # so do it manually when not on Django 1.8
message.extra_headers['Reply-To'] = ', '.join(map(force_text, reply_to))
if form_definition.is_template_html:
message.content_subtype = "html"
if form_definition.mail_uploaded_files:
for file_path in files:
message.attach_file(file_path)
return message
| import re
import django
from django.core.mail import EmailMessage
from django.utils.encoding import force_text
from form_designer.utils import string_template_replace
DJANGO_18 = django.VERSION[:2] >= (1, 8)
def _template_replace_list(input_str, context_dict):
"""
Split the input string by commas or semicolons, then template-replace.
Falsy input values yield empty lists.
:param input_str: Comma-or-semicolon-separated list of values
:type input_str: str|None
:param context_dict: The context for template replacement
:return: List of strings
:rtype: list[str]
"""
if not input_str:
return []
return [
string_template_replace(email, context_dict)
for email
in re.compile('\s*[,;]+\s*').split(force_text(input_str))
]
def build_form_mail(form_definition, form, files=None):
"""
Build a form-submission email based on the given form definition and associated submitted form
:param form_definition: Form definition object
:param form: The freshly submitted form
:param files: Associated files
:return: Django email message
"""
if not files:
files = []
form_data = form_definition.get_form_data(form)
message = form_definition.compile_message(form_data)
context_dict = form_definition.get_form_data_context(form_data)
mail_to = _template_replace_list(form_definition.mail_to, context_dict)
if form_definition.mail_from:
from_email = string_template_replace(form_definition.mail_from, context_dict)
else:
from_email = None
reply_to = _template_replace_list(form_definition.mail_reply_to, context_dict)
mail_subject = string_template_replace(
(form_definition.mail_subject or form_definition.title),
context_dict
)
kwargs = {
'subject': mail_subject,
'body': message,
'from_email': from_email,
'to': mail_to,
}
if DJANGO_18: # the reply_to kwarg is only supported in Django 1.8+ . . .
kwargs['reply_to'] = reply_to
message = EmailMessage(**kwargs)
if not DJANGO_18: # so do it manually when not on Django 1.8
message.extra_headers['Reply-To'] = ', '.join(map(force_text, reply_to))
if form_definition.is_template_html:
message.content_subtype = "html"
if form_definition.mail_uploaded_files:
for file_path in files:
message.attach_file(file_path)
return message
| bsd-3-clause | Python |
a6362b702b5494146a6c9e1d5accf03797fee981 | Reduce duplication of settings name | Rypac/sublime-format | src/settings.py | src/settings.py | import sublime
class Settings():
FORMAT_SETTINGS = 'Format.sublime-settings'
@staticmethod
def load():
return sublime.load_settings(Settings.FORMAT_SETTINGS)
@staticmethod
def save():
sublime.save_settings(Settings.FORMAT_SETTINGS)
@staticmethod
def on_change(callback):
Settings.load().add_on_change(Settings.FORMAT_SETTINGS, callback)
@staticmethod
def formatters():
return Settings.load().get('formatters', default={})
@staticmethod
def paths():
return Settings.load().get('paths', default=[])
@staticmethod
def update_formatter(name, value):
settings = Settings.load()
formatters = Settings.formatters()
formatters[name] = value
settings.set('formatters', formatters)
Settings.save()
class FormatterSettings():
def __init__(self, formatter):
self.__formatter = formatter
self.__settings = Settings.formatters().get(formatter, {})
def get(self, value, default=None):
return self.__settings.get(value, default)
def set(self, key, value):
self.__settings[key] = value
Settings.update_formatter(self.__formatter, self.__settings)
@property
def format_on_save(self):
return self.get('format_on_save', default=False)
@format_on_save.setter
def format_on_save(self, value):
return self.set('format_on_save', value)
@property
def sources(self):
return self.get('sources', default=[])
@property
def args(self):
return self.get('args', default=[])
| import sublime
class Settings():
@staticmethod
def load():
return sublime.load_settings('Format.sublime-settings')
@staticmethod
def save():
sublime.save_settings('Format.sublime-settings')
@staticmethod
def on_change(callback):
Settings.load().add_on_change('Format.sublime-settings', callback)
@staticmethod
def formatters():
return Settings.load().get('formatters', default={})
@staticmethod
def paths():
return Settings.load().get('paths', default=[])
@staticmethod
def update_formatter(name, value):
settings = Settings.load()
formatters = Settings.formatters()
formatters[name] = value
settings.set('formatters', formatters)
Settings.save()
class FormatterSettings():
def __init__(self, formatter):
self.__formatter = formatter
self.__settings = Settings.formatters().get(formatter, {})
def get(self, value, default=None):
return self.__settings.get(value, default)
def set(self, key, value):
self.__settings[key] = value
Settings.update_formatter(self.__formatter, self.__settings)
@property
def format_on_save(self):
return self.get('format_on_save', default=False)
@format_on_save.setter
def format_on_save(self, value):
return self.set('format_on_save', value)
@property
def sources(self):
return self.get('sources', default=[])
@property
def args(self):
return self.get('args', default=[])
| mit | Python |
cb9e234600c3f06c7dec084d2055f4a52b1f5975 | Update urls.py | nleng/django-monit-collector,nleng/django-monit-collector,nleng/django-monit-collector,nleng/django-monit-collector | monitcollector/urls.py | monitcollector/urls.py | """monitcollector URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^$', 'monitcollector.views.dashboard', name='dashboard'),
url(r'^dashboard/$', 'monitcollector.views.dashboard', name='dashboard'),
url(r'^collector$', 'monitcollector.views.collector', name='collector'),
url(r'^server/(?P<server_id>\d+)/$', 'monitcollector.views.server', name='server'),
url(r'^server/(?P<server_id>\w+)/process/(?P<process_name>[^/]+)/$', 'monitcollector.views.process', name='process'),
url(r'^process_action/(?P<server_id>\d+)/$', 'monitcollector.views.process_action', name='process_action'),
url(r'^confirm_delete/(?P<server_id>\d+)/$', 'monitcollector.views.confirm_delete', name='confirm_delete'),
url(r'^delete_server/(?P<server_id>\d+)/$', 'monitcollector.views.delete_server', name='delete_server'),
url(r'^load_system_data/(?P<server_id>\d+)/$', 'monitcollector.views.load_system_data', name='load_system_data'),
url(r'^load_process_data/(?P<server_id>\d+)/(?P<process_name>[^/]+)/$', 'monitcollector.views.load_process_data', name='load_process_data'),
url(r'^load_dashboard_table/$', 'monitcollector.views.load_dashboard_table', name='load_dashboard_table'),
url(r'^load_system_table/(?P<server_id>\d+)/$', 'monitcollector.views.load_system_table', name='load_system_table'),
url(r'^load_process_table/(?P<server_id>\d+)/(?P<process_name>[^/]+)/$', 'monitcollector.views.load_process_table', name='load_process_table'),
]
| """prettymonit URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^$', 'monitcollector.views.dashboard', name='dashboard'),
url(r'^dashboard/$', 'monitcollector.views.dashboard', name='dashboard'),
url(r'^collector$', 'monitcollector.views.collector', name='collector'),
url(r'^server/(?P<server_id>\d+)/$', 'monitcollector.views.server', name='server'),
url(r'^server/(?P<server_id>\w+)/process/(?P<process_name>[^/]+)/$', 'monitcollector.views.process', name='process'),
url(r'^process_action/(?P<server_id>\d+)/$', 'monitcollector.views.process_action', name='process_action'),
url(r'^confirm_delete/(?P<server_id>\d+)/$', 'monitcollector.views.confirm_delete', name='confirm_delete'),
url(r'^delete_server/(?P<server_id>\d+)/$', 'monitcollector.views.delete_server', name='delete_server'),
url(r'^load_system_data/(?P<server_id>\d+)/$', 'monitcollector.views.load_system_data', name='load_system_data'),
url(r'^load_process_data/(?P<server_id>\d+)/(?P<process_name>[^/]+)/$', 'monitcollector.views.load_process_data', name='load_process_data'),
url(r'^load_dashboard_table/$', 'monitcollector.views.load_dashboard_table', name='load_dashboard_table'),
url(r'^load_system_table/(?P<server_id>\d+)/$', 'monitcollector.views.load_system_table', name='load_system_table'),
url(r'^load_process_table/(?P<server_id>\d+)/(?P<process_name>[^/]+)/$', 'monitcollector.views.load_process_table', name='load_process_table'),
]
| bsd-3-clause | Python |
3144b548d58f219b573d82bbb2d1bfee171061d9 | Fix history view after merge | PressLabs/gitfs,rowhit/gitfs,PressLabs/gitfs,bussiere/gitfs,ksmaheshkumar/gitfs | gitfs/views/history.py | gitfs/views/history.py | import os
from stat import S_IFDIR
from errno import ENOENT
from fuse import FuseOSError
from gitfs.log import log
from .read_only import ReadOnlyView
class HistoryView(ReadOnlyView):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path not in self.repo.get_commit_dates() and path != '/':
raise FuseOSError(ENOENT)
attrs = super(HistoryView, self).getattr(path, fh)
attrs.update({
'st_mode': S_IFDIR | 0555,
'st_nlink': 2
})
return attrs
def access(self, path, amode):
if getattr(self, 'date', None):
log.info('PATH: %s', path)
if path == '/':
available_dates = self.repo.get_commit_dates()
if self.date not in available_dates:
raise FuseOSError(ENOENT)
else:
commits = self.repo.get_commits_by_date(self.date)
dirname = os.path.split(path)[1]
if dirname not in commits:
raise FuseOSError(ENOENT)
else:
if path != '/':
raise FuseOSError(ENOENT)
return 0
def readdir(self, path, fh):
if getattr(self, 'date', None):
additional_entries = self.repo.get_commits_by_date(self.date)
else:
additional_entries = self.repo.get_commit_dates()
dir_entries = ['.', '..'] + additional_entries
for entry in dir_entries:
yield entry
| import os
from stat import S_IFDIR
from errno import ENOENT
from fuse import FuseOSError
from gitfs.log import log
from .read_only import ReadOnlyView
class HistoryView(ReadOnlyView):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
if path not in self._get_commit_dates() and path != '/':
raise FuseOSError(ENOENT)
attrs = super(HistoryView, self).getattr(path, fh)
attrs.update({
'st_mode': S_IFDIR | 0555,
'st_nlink': 2
})
return attrs
def access(self, path, amode):
if getattr(self, 'date', None):
log.info('PATH: %s', path)
if path == '/':
available_dates = self.repo.get_commit_dates()
if self.date not in available_dates:
raise FuseOSError(ENOENT)
else:
commits = self.repo.get_commits_by_date(self.date)
dirname = os.path.split(path)[1]
if dirname not in commits:
raise FuseOSError(ENOENT)
else:
if path != '/':
raise FuseOSError(ENOENT)
return 0
def readdir(self, path, fh):
if getattr(self, 'date', None):
additional_entries = self.repo.get_commits_by_date(self.date)
else:
additional_entries = self.repo.get_commit_dates()
dir_entries = ['.', '..'] + additional_entries
for entry in dir_entries:
yield entry
| apache-2.0 | Python |
e3e52d63a05d0373a91a4cac746ad09dbbc42f13 | Remove filesystem signal for stopping speech | MycroftAI/mycroft-core,forslund/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core | mycroft/audio/utils.py | mycroft/audio/utils.py | # Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
from mycroft.util.signal import check_for_signal
def is_speaking():
"""Determine if Text to Speech is occurring
Returns:
bool: True while still speaking
"""
return check_for_signal("isSpeaking", -1)
def wait_while_speaking():
"""Pause as long as Text to Speech is still happening
Pause while Text to Speech is still happening. This always pauses
briefly to ensure that any preceeding request to speak has time to
begin.
"""
time.sleep(0.3) # Wait briefly in for any queued speech to begin
while is_speaking():
time.sleep(0.1)
def stop_speaking():
# TODO: Less hacky approach to this once Audio Manager is implemented
# Skills should only be able to stop speech they've initiated
from mycroft.messagebus.send import send
send('mycroft.audio.speech.stop')
# Block until stopped
while check_for_signal("isSpeaking", -1):
time.sleep(0.25)
| # Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
from mycroft.util.signal import check_for_signal, create_signal
def is_speaking():
"""Determine if Text to Speech is occurring
Returns:
bool: True while still speaking
"""
return check_for_signal("isSpeaking", -1)
def wait_while_speaking():
"""Pause as long as Text to Speech is still happening
Pause while Text to Speech is still happening. This always pauses
briefly to ensure that any preceeding request to speak has time to
begin.
"""
time.sleep(0.3) # Wait briefly in for any queued speech to begin
while is_speaking():
time.sleep(0.1)
def stop_speaking():
# TODO: Less hacky approach to this once Audio Manager is implemented
# Skills should only be able to stop speech they've initiated
from mycroft.messagebus.send import send
create_signal('stoppingTTS')
send('mycroft.audio.speech.stop')
# Block until stopped
while check_for_signal("isSpeaking", -1):
time.sleep(0.25)
# This consumes the signal
check_for_signal('stoppingTTS')
| apache-2.0 | Python |
c9c2cb0661de4a0fd955068acc5a1c7dbc2de790 | update tests | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | test/test_fasta.py | test/test_fasta.py | import os
from sequana import FastA, sequana_data
from sequana.fasta import is_fasta
from easydev import TempFile
def test_format_contigs_denovo():
# test with a custom fasta
filename = sequana_data("test_fasta.fasta")
contigs = FastA(filename)
with TempFile(suffix='.fasta') as fh:
contigs.format_contigs_denovo(fh.name)
contigs.names
contigs.lengths
contigs.comments
contigs.GC_content()
contigs.GC_content_sequence(contigs.sequences[0])
contigs.summary()
def test_fasta_filtering():
filename = sequana_data("test_fasta_filtering.fa")
ff = FastA(filename)
with TempFile(suffix='.fasta') as fh:
ff.to_fasta(fh.name)
ff.save_ctg_to_fasta("A", fh.name)
with TempFile(suffix='.fasta') as fh:
ff.filter(fh.name, names_to_exclude=["A", "B"])
reader = FastA(fh.name)
assert set(reader.names) == set(["C", "D"])
ff = FastA(filename)
with TempFile(suffix='.fasta') as fh:
ff.filter(fh.name, names_to_keep=["A",])
reader = FastA(fh.name)
assert set(reader.names) == set(['A'])
def test_others():
filename = sequana_data("test_fasta.fasta")
ff = FastA(filename)
assert len(ff) == 16
assert len(ff.comments) == 16
assert len(ff.names) == 16
assert len(ff.sequences) == 16
assert is_fasta(filename) == True
ff.get_lengths_as_dict()
with TempFile(suffix='.fasta') as fh:
ff.select_random_reads(4, output_filename=fh.name)
ff.select_random_reads([1,2,3], output_filename=fh.name)
ff.select_random_reads({1,2,3}, output_filename=fh.name)
ff.select_random_reads(100000, output_filename=fh.name)
assert ff.get_stats()['N'] == 16
assert ff.get_stats()['mean_length'] > 454
with TempFile(suffix='.fasta') as fh:
ff.reverse_and_save(fh.name)
ff.to_fasta(fh.name)
ff.to_igv_chrom_size(fh.name)
| import os
from sequana import FastA, sequana_data
from sequana.fasta import is_fasta
from easydev import TempFile
def test_format_contigs_denovo():
# test with a custom fasta
filename = sequana_data("test_fasta.fasta")
contigs = FastA(filename)
with TempFile(suffix='.fasta') as fh:
contigs.format_contigs_denovo(fh.name)
contigs.names
contigs.lengths
contigs.comments
def test_fasta_filtering():
filename = sequana_data("test_fasta_filtering.fa")
ff = FastA(filename)
with TempFile(suffix='.fasta') as fh:
ff.to_fasta(fh.name)
ff.save_ctg_to_fasta("A", fh.name)
with TempFile(suffix='.fasta') as fh:
ff.filter(fh.name, names_to_exclude=["A", "B"])
reader = FastA(fh.name)
assert set(reader.names) == set(["C", "D"])
ff = FastA(filename)
with TempFile(suffix='.fasta') as fh:
ff.filter(fh.name, names_to_keep=["A",])
reader = FastA(fh.name)
assert set(reader.names) == set(['A'])
def test_others():
filename = sequana_data("test_fasta.fasta")
ff = FastA(filename)
assert len(ff) == 16
assert len(ff.comments) == 16
assert len(ff.names) == 16
assert len(ff.sequences) == 16
assert is_fasta(filename) == True
ff.get_lengths_as_dict()
with TempFile(suffix='.fasta') as fh:
ff.select_random_reads(4, output_filename=fh.name)
ff.select_random_reads([1,2,3], output_filename=fh.name)
ff.select_random_reads({1,2,3}, output_filename=fh.name)
assert ff.get_stats()['N'] == 16
assert ff.get_stats()['mean_length'] > 454
with TempFile(suffix='.fasta') as fh:
ff.reverse_and_save(fh.name)
ff.to_fasta(fh.name)
ff.to_igv_chrom_size(fh.name)
| bsd-3-clause | Python |
e7954822d9c941b84917c4ec4a0a2f5357c9751b | Fix test to make it compatible with further changes | ahitrin/carlo | test/test_model.py | test/test_model.py | # coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import Model, ModelException, eq, int_val, string_val
import pytest
def test_minimal_model():
m = Model(const={'int': int_val(42)}).build()
assert [('const', {'int': 42})] == m.create()
m = Model(const2={'str': string_val('hello')}).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = Model(first={'name': string_val('elves')},
second={'name': string_val('humans')}).build()
assert sorted([('first', {'name': 'elves'}),
('second', {'name': 'humans'})]) ==\
sorted(m.create())
def test_model_with_multiple_params():
m = Model(human={
'head': int_val(1),
'hands': int_val(2),
'name': string_val('Hurin'),
}).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
# restrictions
def test_restriction_must_override_parameter_definition():
m = Model(leader={'direction': string_val('north')},
follower={'direction': string_val()},
).restricted_by(eq('leader.direction', 'follower.direction')).build()
assert sorted([('leader', {'direction': 'north'}),
('follower', {'direction': 'north'})]) == \
sorted(m.create())
# error handling
def test_fields_with_different_type_could_not_be_eq():
m = Model(leader={'direction': string_val('north')},
follower={'direction': int_val(13)}
).restricted_by(eq('leader.direction', 'follower.direction'))
with pytest.raises(ModelException):
m.build()
| # coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import Model, ModelException, eq, int_val, string_val
import pytest
def test_minimal_model():
m = Model(const={'int': int_val(42)}).build()
assert [('const', {'int': 42})] == m.create()
m = Model(const2={'str': string_val('hello')}).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = Model(first={'name': string_val('elves')},
second={'name': string_val('humans')}).build()
assert sorted([('first', {'name': 'elves'}),
('second', {'name': 'humans'})]) ==\
sorted(m.create())
def test_model_with_multiple_params():
m = Model(human={
'head': int_val(1),
'hands': int_val(2),
'name': string_val('Hurin'),
}).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
# restrictions
def test_restriction_must_override_parameter_definition():
m = Model(leader={'direction': string_val('north')},
follower={'direction': string_val('west')},
).restricted_by(eq('leader.direction', 'follower.direction')).build()
assert sorted([('leader', {'direction': 'north'}),
('follower', {'direction': 'north'})]) == \
sorted(m.create())
# error handling
def test_fields_with_different_type_could_not_be_eq():
m = Model(leader={'direction': string_val('north')},
follower={'direction': int_val(13)}
).restricted_by(eq('leader.direction', 'follower.direction'))
with pytest.raises(ModelException):
m.build()
| mit | Python |
2933ac6a0ecdd18763a528559d2813512d309873 | Add Unit Tests for utils.py | Alignak-monitoring-contrib/alignak-app,Alignak-monitoring-contrib/alignak-app | test/test_utils.py | test/test_utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import os
import unittest2
import alignak_app.core.utils as utils
class TestUtils(unittest2.TestCase):
"""
This file test methods of `utils.py` file.
"""
def test_get_app_root(self):
"""Get Alignak-App Root Folder"""
expected_home = os.environ['HOME'] + '/.local'
home = utils.get_app_root()
self.assertEqual(home, expected_home)
def test_app_config(self):
"""Set and Get app_config"""
# Reset app_config to None
utils.app_config = None
self.assertIsNone(utils.app_config)
utils.init_config()
self.assertIsNotNone(utils.app_config)
def test_set_app_config(self):
"""Reload config"""
# Reset and Init "app_config"
utils.app_config = None
self.assertIsNone(utils.app_config)
utils.init_config()
# Get current url
under_test = utils.get_app_config('Backend', 'alignak_url')
self.assertEqual('http://94.76.229.155', under_test)
# Change url
utils.set_app_config('Backend', 'alignak_url', 'http://127.0.0.1')
new_under_test = utils.get_app_config('Backend', 'alignak_url')
self.assertEqual('http://127.0.0.1', new_under_test)
# Back url to normal
utils.set_app_config('Backend', 'alignak_url', 'http://94.76.229.155')
last_under_test = utils.get_app_config('Backend', 'alignak_url')
self.assertEqual('http://94.76.229.155', last_under_test)
def test_reload_config(self):
"""TODO"""
# Reset and Init "app_config"
utils.app_config = None
self.assertIsNone(utils.app_config)
utils.init_config()
cur_config = utils.app_config
utils.init_config()
new_config = utils.app_config
self.assertFalse(cur_config is new_config)
self.assertTrue(utils.app_config is new_config)
def test_get_image_path(self):
"""Get Right Image Path"""
utils.init_config()
expected_img = utils.get_app_root() + '/alignak_app/images/icon.svg'
under_test = utils.get_image_path('icon')
self.assertEqual(under_test, expected_img)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016:
# Matthieu Estrada, ttamalfor@gmail.com
#
# This file is part of (AlignakApp).
#
# (AlignakApp) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (AlignakApp) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (AlignakApp). If not, see <http://www.gnu.org/licenses/>.
import os
import unittest2
import alignak_app.core.utils as utils
class TestUtils(unittest2.TestCase):
"""
This file test methods of `utils.py` file.
"""
def test_get_app_root(self):
"""Get Alignak-App Root Folder"""
expected_home = os.environ['HOME'] + '/.local'
home = utils.get_app_root()
self.assertEqual(home, expected_home)
def test_app_config(self):
"""Set and Get app_config"""
# Reset app_config to None
utils.app_config = None
self.assertIsNone(utils.app_config)
utils.init_config()
self.assertIsNotNone(utils.app_config)
def test_get_image_path(self):
"""Get Right Image Path"""
utils.init_config()
expected_img = utils.get_app_root() + '/alignak_app/images/icon.svg'
under_test = utils.get_image_path('icon')
self.assertEqual(under_test, expected_img)
| agpl-3.0 | Python |
75379d0510961c8a88251de38c87ea3fe4f31542 | Fix CORS headers configuration. | ironweb/lesfeuxverts-backend | greenlight/settings.py | greenlight/settings.py | # Django settings for greenlight project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
TIME_ZONE = 'America/Montreal'
USE_I18N = False
SECRET_KEY = '9&@r*3hth4m=ml5t5tme0*(9^x@2xqo-ua^s+wg_ws(-^4-7@v'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'corsheaders.middleware.CorsMiddleware',
)
ROOT_URLCONF = 'greenlight.urls'
WSGI_APPLICATION = 'greenlight.wsgi.application'
INSTALLED_APPS = (
'django_extensions',
'corsheaders',
)
CORS_ORIGIN_ALLOW_ALL = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| # Django settings for greenlight project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
TIME_ZONE = 'America/Montreal'
USE_I18N = False
SECRET_KEY = '9&@r*3hth4m=ml5t5tme0*(9^x@2xqo-ua^s+wg_ws(-^4-7@v'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'greenlight.urls'
WSGI_APPLICATION = 'greenlight.wsgi.application'
INSTALLED_APPS = (
'django_extensions'
)
CORS_ORIGIN_ALLOW_ALL = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| mit | Python |
2b4ccb7b7dd9bf24d74fc686c7f4296cfe34db51 | Add registration functions to `reg` | DudLab/nanshe_workflow,nanshe-org/nanshe_workflow | nanshe_workflow/reg.py | nanshe_workflow/reg.py | from builtins import range as irange
import numpy
import scipy.ndimage
import dask
import dask.array
def fourier_shift_wrap(array, shift):
result = numpy.empty_like(array)
for i in irange(len(array)):
result[i] = scipy.ndimage.fourier_shift(array[i], shift[0][i])
return result
def find_best_match(matches):
best_match = numpy.zeros(
matches.shape[:1],
dtype=matches.dtype
)
if matches.size:
i = numpy.argmin((matches ** 2).sum(axis=0))
best_match = matches[:, i]
return best_match
def compute_offset(match_mask):
while type(match_mask) is list:
match_mask = match_mask[0]
result = numpy.empty((len(match_mask), match_mask.ndim - 1), dtype=int)
for i in irange(len(match_mask)):
match_mask_i = match_mask[i]
frame_shape = numpy.array(match_mask_i.shape)
half_frame_shape = frame_shape // 2
matches = numpy.array(match_mask_i.nonzero())
above = (matches > half_frame_shape[:, None]).astype(matches.dtype)
matches -= above * frame_shape[:, None]
result[i] = find_best_match(matches)
return result
def roll_frames_chunk(frames, shifts):
# Needed as Dask shares objects and we plan to write to it.
# Also if there is only one refcount the old object is freed.
frames = numpy.copy(frames)
for i in irange(len(frames)):
frames[i] = numpy.roll(
frames[i],
tuple(shifts[i]),
axis=tuple(irange(frames.ndim - 1))
)
return frames
def roll_frames(frames, shifts):
frames = frames.rechunk({
k: v for k, v in enumerate(frames.shape[1:], 1)
})
shifts = shifts.rechunk({1: shifts.shape[1]})
rolled_frames = dask.array.atop(
roll_frames_chunk, tuple(irange(frames.ndim)),
frames, tuple(irange(frames.ndim)),
shifts, (0, frames.ndim),
dtype=frames.dtype,
concatenate=True
)
return rolled_frames
| apache-2.0 | Python | |
7ee30ea1606122e97119ca7190f080c2ed105f6a | fix support title handling + better find budget items for bad takanas | omerbartal/open-budget-data,OpenBudget/open-budget-data,omerbartal/open-budget-data,OpenBudget/open-budget-data | processors/fix_support_budget_titles.py | processors/fix_support_budget_titles.py | import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_support_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
budgets2 = {}
supports = {}
supports_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
budgets2.setdefault("%(year)s/%(title)s" % line,[]).add(line['code'])
for line in file(supports_jsons):
line = json.loads(line.strip())
supports.setdefault("%(year)s/%(title)s" % line,[]).add(line['code'])
outfile = file(output,"w")
changed_num = 0
for line in file(supports_jsons):
line = json.loads(line.strip())
year = line['year']
data = [line]
for datum in data:
key_code = "%s/%s" % (year, datum['code'])
title = budgets.get(key_code)
if title != None:
if title != line.get('title',''):
datum['title'] = title
changed_num += 1
else:
key_title = "%s/%s" % (year, datum['title'])
possible_codes = budgets2.get(key_title,[])
if len(possible_codes) == 1:
datum['code'] = possible_codes[0]
changed_num += 1
else:
all_codes_for_title = supports.get(key_title,[])
all_valid_codes = [ x for x in all_codes_for_title if "%s/%s" % (year,x) in budgets ]
if len(all_valid_codes) == 1:
code = all_valid_codes[0]
datum['code'] = code
datum['title'] = budgets[ "%s/%s" % (year,code) ]
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key_code)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
logging.info("updated %d entries" % changed_num)
| import json
import logging
if __name__ == "__main__":
input = sys.argv[1]
output = sys.argv[2]
processor = fix_changeline_budget_titles().process(input,output,[])
class fix_support_budget_titles(object):
def process(self,inputs,output):
out = []
budgets = {}
supports_jsons, budget_jsons = inputs
for line in file(budget_jsons):
line = json.loads(line.strip())
budgets["%(year)s/%(code)s" % line] = line['title']
outfile = file(output,"w")
changed_num = 0
for line in file(supports_jsons):
line = json.loads(line.strip())
year = line['year']
data = [line]
for datum in data:
key = "%s/%s" % (year, datum['code'])
title = budgets.get(key)
if title != None:
if title != line.get('title',''):
datum['title'] = title
changed_num += 1
else:
logging.error("Failed to find title for change with key %s" % key)
outfile.write(json.dumps(line,sort_keys=True)+"\n")
logging.info("updated %d entries" % changed_num)
| mit | Python |
28d0f14a172821657ec7f17cfaf0ffb95f5ff8f8 | Make sure the right python path is used for plugins. | kamitchell/py2app,kamitchell/py2app,kamitchell/py2app,kamitchell/py2app | py2app/bundletemplate/plist_template.py | py2app/bundletemplate/plist_template.py | import sys
import py2app
__all__ = ['infoPlistDict']
def infoPlistDict(CFBundleExecutable, plist={}):
CFBundleExecutable = unicode(CFBundleExecutable)
NSPrincipalClass = u''.join(CFBundleExecutable.split())
version = sys.version[:3]
pdict = dict(
CFBundleDevelopmentRegion=u'English',
CFBundleDisplayName=plist.get('CFBundleName', CFBundleExecutable),
CFBundleExecutable=CFBundleExecutable,
CFBundleIconFile=CFBundleExecutable,
CFBundleIdentifier=u'org.pythonmac.unspecified.%s' % (NSPrincipalClass,),
CFBundleInfoDictionaryVersion=u'6.0',
CFBundleName=CFBundleExecutable,
CFBundlePackageType=u'BNDL',
CFBundleShortVersionString=plist.get('CFBundleVersion', u'0.0'),
CFBundleSignature=u'????',
CFBundleVersion=u'0.0',
LSHasLocalizedDisplayName=False,
NSAppleScriptEnabled=False,
NSHumanReadableCopyright=u'Copyright not specified',
NSMainNibFile=u'MainMenu',
NSPrincipalClass=NSPrincipalClass,
PyMainFileNames=[u'__boot__'],
PyResourcePackages=[ (s % version) for s in [
u'lib/python%s',
u'lib/python%s/lib-dynload',
u'lib/python%s/site-packages.zip',
]],
PyRuntimeLocations=[(s % version) for s in [
u'@executable_path/../Frameworks/Python.framework/Versions/%s/Python',
u'~/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/Network/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/System/Library/Frameworks/Python.framework/Versions/%s/Python',
]],
)
pdict.update(plist)
pythonInfo = pdict.setdefault(u'PythonInfoDict', {})
pythonInfo.update(dict(
PythonLongVersion=unicode(sys.version),
PythonShortVersion=unicode(sys.version[:3]),
PythonExecutable=unicode(sys.executable),
))
py2appInfo = pythonInfo.setdefault(u'py2app', {}).update(dict(
version=unicode(py2app.__version__),
template=u'bundle',
))
return pdict
| import sys
import py2app
__all__ = ['infoPlistDict']
def infoPlistDict(CFBundleExecutable, plist={}):
CFBundleExecutable = unicode(CFBundleExecutable)
NSPrincipalClass = u''.join(CFBundleExecutable.split())
version = sys.version[:3]
pdict = dict(
CFBundleDevelopmentRegion=u'English',
CFBundleDisplayName=plist.get('CFBundleName', CFBundleExecutable),
CFBundleExecutable=CFBundleExecutable,
CFBundleIconFile=CFBundleExecutable,
CFBundleIdentifier=u'org.pythonmac.unspecified.%s' % (NSPrincipalClass,),
CFBundleInfoDictionaryVersion=u'6.0',
CFBundleName=CFBundleExecutable,
CFBundlePackageType=u'BNDL',
CFBundleShortVersionString=plist.get('CFBundleVersion', u'0.0'),
CFBundleSignature=u'????',
CFBundleVersion=u'0.0',
LSHasLocalizedDisplayName=False,
NSAppleScriptEnabled=False,
NSHumanReadableCopyright=u'Copyright not specified',
NSMainNibFile=u'MainMenu',
NSPrincipalClass=NSPrincipalClass,
PyMainFileNames=[u'__boot__'],
PyResourcePackages=[],
PyRuntimeLocations=[(s % version) for s in [
u'@executable_path/../Frameworks/Python.framework/Versions/%s/Python',
u'~/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/Network/Library/Frameworks/Python.framework/Versions/%s/Python',
u'/System/Library/Frameworks/Python.framework/Versions/%s/Python',
]],
)
pdict.update(plist)
pythonInfo = pdict.setdefault(u'PythonInfoDict', {})
pythonInfo.update(dict(
PythonLongVersion=unicode(sys.version),
PythonShortVersion=unicode(sys.version[:3]),
PythonExecutable=unicode(sys.executable),
))
py2appInfo = pythonInfo.setdefault(u'py2app', {}).update(dict(
version=unicode(py2app.__version__),
template=u'bundle',
))
return pdict
| mit | Python |
3adfc96892b0f45a5d95164f81cea75d02b2df0d | add missing dependency | csulmone/skia,csulmone/skia,csulmone/skia,csulmone/skia | gyp/SimpleCocoaApp.gyp | gyp/SimpleCocoaApp.gyp | {
'targets': [
{
'target_name': 'SimpleCocoaApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../experimental/SimpleCocoaApp/',
],
'sources': [
'../src/views/mac/SkEventNotifier.h',
'../src/views/mac/SkEventNotifier.mm',
'../src/views/mac/skia_mac.mm',
'../src/views/mac/SkNSView.h',
'../src/views/mac/SkNSView.mm',
'../src/views/mac/SkOptionsTableView.h',
'../src/views/mac/SkOptionsTableView.mm',
'../src/views/mac/SkOSWindow_Mac.mm',
'../src/views/mac/SkTextFieldCell.h',
'../src/views/mac/SkTextFieldCell.m',
'../experimental/SimpleCocoaApp/SimpleApp-Info.plist',
'../experimental/SimpleCocoaApp/SimpleApp.h',
'../experimental/SimpleCocoaApp/SimpleApp.mm',
],
'dependencies': [
'core.gyp:core',
'opts.gyp:opts',
'ports.gyp:ports',
'utils.gyp:utils',
'views.gyp:views',
'xml.gyp:xml',
],
'conditions' : [
# Only supports Mac currently
[ 'skia_os == "mac"', {
'sources': [
'../include/utils/mac/SkCGUtils.h',
'../src/utils/mac/SkCreateCGImageRef.cpp',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
},
'xcode_settings' : {
'INFOPLIST_FILE' : '../experimental/SimpleCocoaApp/SimpleApp-Info.plist',
},
'mac_bundle_resources' : [
'../experimental/SimpleCocoaApp/SimpleApp.xib',
],
}],
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| {
'targets': [
{
'target_name': 'SimpleCocoaApp',
'type': 'executable',
'mac_bundle' : 1,
'include_dirs' : [
'../experimental/SimpleCocoaApp/',
],
'sources': [
'../src/views/mac/SkEventNotifier.h',
'../src/views/mac/SkEventNotifier.mm',
'../src/views/mac/skia_mac.mm',
'../src/views/mac/SkNSView.h',
'../src/views/mac/SkNSView.mm',
'../src/views/mac/SkOptionsTableView.h',
'../src/views/mac/SkOptionsTableView.mm',
'../src/views/mac/SkOSWindow_Mac.mm',
'../src/views/mac/SkTextFieldCell.h',
'../src/views/mac/SkTextFieldCell.m',
'../experimental/SimpleCocoaApp/SimpleApp-Info.plist',
'../experimental/SimpleCocoaApp/SimpleApp.h',
'../experimental/SimpleCocoaApp/SimpleApp.mm',
],
'dependencies': [
'core.gyp:core',
'opts.gyp:opts',
'utils.gyp:utils',
'views.gyp:views',
'xml.gyp:xml',
],
'conditions' : [
# Only supports Mac currently
[ 'skia_os == "mac"', {
'sources': [
'../include/utils/mac/SkCGUtils.h',
'../src/utils/mac/SkCreateCGImageRef.cpp',
],
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
],
},
'xcode_settings' : {
'INFOPLIST_FILE' : '../experimental/SimpleCocoaApp/SimpleApp-Info.plist',
},
'mac_bundle_resources' : [
'../experimental/SimpleCocoaApp/SimpleApp.xib',
],
}],
],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| bsd-3-clause | Python |
50647e0c5033fb397d0acd5440427c6002bab510 | implement merging by process | morgenst/PyAnalysisTools,morgenst/PyAnalysisTools,morgenst/PyAnalysisTools | PyAnalysisTools/ROOTUtils/ObjectHandle.py | PyAnalysisTools/ROOTUtils/ObjectHandle.py | __author__ = 'marcusmorgenstern'
__mail__ = 'marcus.matthias.morgenstern@cern.ch'
# helper methods to deal with objects
def get_objects_from_canvas(canvas):
# todo: add logger warning for empty canvas
obj = [canvas.GetPrimitive(key.GetName()) for key in canvas.GetListOfPrimitives()]
return obj
def get_objects_from_canvas_by_type(canvas, typename):
obj = get_objects_from_canvas(canvas)
obj = filter(lambda o: o.InheritsFrom(typename), obj)
return obj
def merge_objects_by_process_type(canvas, process_config, merge_type):
objects = get_objects_from_canvas_by_type(canvas, "TH1")
if len(objects) == 0:
return None
first_object = objects[0]
variable = "_".join(first_object.GetName().split("_")[0:-1])
merged_hist = first_object.Clone("_".join([variable, merge_type]))
for obj in objects:
process_name = obj.GetName().split("_")[-1]
if not process_config[process_name].type == merge_type:
continue
merged_hist.Add(obj)
return merged_hist
| __author__ = 'marcusmorgenstern'
__mail__ = 'marcus.matthias.morgenstern@cern.ch'
# helper methods to deal with objects
def get_objects_from_canvas(canvas):
# todo: add logger warning for empty canvas
obj = [canvas.GetPrimitive(key.GetName()) for key in canvas.GetListOfPrimitives()]
return obj
def get_objects_from_canvas_by_type(canvas, typename):
obj = get_objects_from_canvas(canvas)
obj = filter(lambda o: o.InheritsFrom(typename), obj)
return obj
| mit | Python |
a032f92300802db664bdc7c21d6d300b72dab3a2 | change so that it works with split base/root url | Ogaday/sapi-python-client,Ogaday/sapi-python-client | tests/test_base.py | tests/test_base.py | import unittest
import os
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = os.getenv('KBC_TEST_API_URL')
self.token = 'some-token'
def test_get(self):
endpoint = Endpoint(self.root, '', self.token)
self.assertEqual(os.getenv('KBC_TEST_API_URL'), endpoint.root_url)
self.assertEqual(os.getenv('KBC_TEST_API_URL') + '/v2/storage/',
endpoint.base_url)
self.assertEqual('some-token',
endpoint.token)
def test_get_404(self):
endpoint = Endpoint(self.root, 'not-a-url', self.token)
self.assertEqual(os.getenv('KBC_TEST_API_URL') +
'/v2/storage/not-a-url',
endpoint.base_url)
with self.assertRaises(HTTPError):
endpoint.get(endpoint.base_url)
def test_get_404_2(self):
endpoint = Endpoint(self.root, '', self.token)
self.assertEqual(os.getenv('KBC_TEST_API_URL') +
'/v2/storage/',
endpoint.base_url)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, '', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
| import unittest
from requests import HTTPError
from kbcstorage.base import Endpoint
class TestEndpoint(unittest.TestCase):
"""
Test Endpoint functionality.
"""
def setUp(self):
self.root = 'https://httpbin.org'
self.token = ''
def test_get(self):
"""
Simple get works.
"""
endpoint = Endpoint(self.root, 'get', self.token)
requested_url = endpoint.get(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/get'
def test_get_404(self):
"""
Get inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'get', self.token)
with self.assertRaises(HTTPError):
endpoint.get('{}/not-a-url'.format(endpoint.base_url))
def test_post(self):
"""
Simple post works.
"""
endpoint = Endpoint(self.root, 'post', self.token)
requested_url = endpoint.post(endpoint.base_url)['url']
assert requested_url == 'https://httpbin.org/post'
def test_post_404(self):
"""
Post to inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'post', self.token)
with self.assertRaises(HTTPError):
endpoint.post('{}/not-a-url'.format(endpoint.base_url))
def test_delete(self):
"""
Simple delete works.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
resp = endpoint.delete(endpoint.base_url)
assert resp is None
def test_delete_404(self):
"""
Delete inexistent resource raises HTTPError.
"""
endpoint = Endpoint(self.root, 'delete', self.token)
with self.assertRaises(HTTPError):
endpoint.delete('{}/not-a-url'.format(endpoint.base_url))
| mit | Python |
5ba72c9877d2f6e8400fae81bd88254d5b1db657 | bump version | sanoma/dynamic-rest,AltSchool/dynamic-rest,sanoma/dynamic-rest,AltSchool/dynamic-rest | dynamic_rest/__init__.py | dynamic_rest/__init__.py | __version__ = "1.3.3"
| __version__ = "1.3.2"
| mit | Python |
688783ec544616d177540ae024b90e3c18d32680 | disable heartbeat | hiidef/hiispider,hiidef/hiispider | hiispider/amqp/amqp.py | hiispider/amqp/amqp.py | import specs
from twisted.internet.protocol import ClientCreator
from twisted.internet import reactor
from txamqp.client import TwistedDelegate
from txamqp.protocol import AMQClient
import txamqp.spec
import sys
def createClient(amqp_host, amqp_vhost, amqp_port=5672):
amqp_spec = txamqp.spec.loadString(specs.v0_8)
amqp_delegate = TwistedDelegate()
client = ClientCreator(reactor,
AMQClient,
delegate=amqp_delegate,
vhost=amqp_vhost,
spec=amqp_spec,
heartbeat=0).connectTCP(amqp_host, amqp_port, timeout=sys.maxint)
return client
| import specs
from twisted.internet.protocol import ClientCreator
from twisted.internet import reactor
from txamqp.client import TwistedDelegate
from txamqp.protocol import AMQClient
import txamqp.spec
import sys
def createClient(amqp_host, amqp_vhost, amqp_port=5672):
amqp_spec = txamqp.spec.loadString(specs.v0_8)
amqp_delegate = TwistedDelegate()
client = ClientCreator(reactor,
AMQClient,
delegate=amqp_delegate,
vhost=amqp_vhost,
spec=amqp_spec,
heartbeat=10).connectTCP(amqp_host, amqp_port, timeout=sys.maxint)
return client
| mit | Python |
45c4c1f627f224f36c24acebbec43a17a5c59fcb | Print out file being processed, need to do to other modules, add -v flag | jreese/nib | nib/plugins/lesscss.py | nib/plugins/lesscss.py | from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
print("Processing: ", filepath)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| from __future__ import absolute_import, division, print_function, unicode_literals
from os import path
import sh
from nib import Processor, resource
@resource('.less')
class LessCSSProcessor(Processor):
def resource(self, resource):
filepath = path.join(self.options['resource_path'],
resource.path + resource.extension)
resource.content = bytearray(str(sh.lessc(filepath)), 'utf-8')
resource.extension = '.css'
return resource
| mit | Python |
cd79906dbdb9758acdacdf6a251b21a54634ce29 | Update version for v0.10.dev | einvalentin/elasticutils,einvalentin/elasticutils,einvalentin/elasticutils,mozilla/elasticutils,mozilla/elasticutils,mozilla/elasticutils | elasticutils/_version.py | elasticutils/_version.py | # follow pep-386
# Examples:
# * 0.3 - released version
# * 0.3a1 - alpha version
# * 0.3.dev - version in development
__version__ = '0.10.dev'
__releasedate__ = ''
| # follow pep-386
# Examples:
# * 0.3 - released version
# * 0.3a1 - alpha version
# * 0.3.dev - version in development
__version__ = '0.9'
__releasedate__ = '20140403'
| bsd-3-clause | Python |
ec9258dcf3a2ca648c87c595752ea57eb9f9b943 | insert doSth() at current position | kamilsmuga/nodejs-autocomplete-sublime,kamilsmuga/nodejs-autocomplete-sublime | nodejs_autocomplete.py | nodejs_autocomplete.py | import sublime
import sublime_plugin
class NodejsAutocompleteCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
view_sel = view.sel()
if not view_sel:
return
pos = view_sel[0].begin()
self.view.insert(edit, pos, ".doSth()")
| import sublime
import sublime_plugin
class NodejsAutocompleteCommand(sublime_plugin.TextCommand):
def run(self, edit):
self.view.insert(edit, 0, "Hello, World!")
| mit | Python |
d28134b62ee4fdfb7676a65de54bc0e422c439e9 | add link | rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs,rboman/progs | sandbox/webapi/githubapi_listproject.py | sandbox/webapi/githubapi_listproject.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# see https://developer.github.com/v3/repos/#list-repositories-for-the-authenticated-user
import json
import requests
token =''
# get all the public projects of github.com
#url = 'https://api.github.com/users/rboman/repos'
url = 'https://api.github.com/orgs/ulgltas/repos'
#url = 'https://api.github.com/orgs/math0471/repos'
r = requests.get(url)
#r = requests.get(url, headers={ "Authorization": 'token {}'.format(token) }) #, params={'per_page' : 1000, 'page':1 })
print 'r.status_code =', r.status_code
print 'r.headers =', r.headers
print 'r.encoding =', r.encoding
#print 'r.text =', r.text
#print 'r.json() =', r.json()
projects = r.json()
#print type(projects) # => list
print 'r.json() ='
print(json.dumps(projects, sort_keys=True, indent=4))
for p in projects:
print "%s (id=%d)" % (p["name"], p["id"])
#curl -H "Authorization: token OAUTH-TOKEN" https://api.github.com | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import json
import requests
token =''
# get all the public projects of github.com
#url = 'https://api.github.com/users/rboman/repos'
url = 'https://api.github.com/orgs/ulgltas/repos'
#url = 'https://api.github.com/orgs/math0471/repos'
r = requests.get(url)
#r = requests.get(url, headers={ "Authorization": 'token {}'.format(token) }) #, params={'per_page' : 1000, 'page':1 })
print 'r.status_code =', r.status_code
print 'r.headers =', r.headers
print 'r.encoding =', r.encoding
#print 'r.text =', r.text
#print 'r.json() =', r.json()
projects = r.json()
#print type(projects) # => list
print 'r.json() ='
print(json.dumps(projects, sort_keys=True, indent=4))
for p in projects:
print "%s (id=%d)" % (p["name"], p["id"])
#curl -H "Authorization: token OAUTH-TOKEN" https://api.github.com | apache-2.0 | Python |
d886a8c50f3cd7cb961ae114fa82322f25303481 | Change how backfill_meta_created works | lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server,lalinsky/acoustid-server | acoustid/scripts/backfill_meta_created.py | acoustid/scripts/backfill_meta_created.py | #!/usr/bin/env python
# Copyright (C) 2019 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import logging
logger = logging.getLogger(__name__)
def run_backfill_meta_created(script, opts, args):
if script.config.cluster.role != 'master':
logger.info('Not running backfill_meta_created in slave mode')
return
last_meta_id_query = """
SELECT max(id) FROM meta WHERE created IS NULL
"""
update_query = """
WITH meta_created AS (
SELECT meta_id, min(created) AS created
FROM track_meta
WHERE meta_id > %(first_meta_id)s AND meta_id <= %(last_meta_id)s
GROUP BY meta_id
)
UPDATE meta
SET created = meta_created.created
FROM meta_created
WHERE meta.id = meta_created.meta_id AND meta.created IS NULL AND meta.id > %(first_meta_id)s AND meta.id <= %(last_meta_id)s
"""
with script.context() as ctx:
fingerprint_db = ctx.db.get_fingerprint_db()
last_meta_id = fingerprint_db.execute(last_meta_id_query).scalar()
if last_meta_id is None:
return
first_meta_id = last_meta_id - 10000
result = fingerprint_db.execute(update_query, {'first_meta_id': first_meta_id, 'last_meta_id': last_meta_id})
logger.info('Added create date to %s meta entries', result.rowcount)
if result.rowcount == 0:
return
ctx.db.session.commit()
| #!/usr/bin/env python
# Copyright (C) 2019 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import logging
logger = logging.getLogger(__name__)
def run_backfill_meta_created(script, opts, args):
if script.config.cluster.role != 'master':
logger.info('Not running backfill_meta_created in slave mode')
return
query = """
WITH meta_created AS (
SELECT meta_id, min(created) AS created
FROM track_meta
WHERE meta_id IN (SELECT id FROM meta WHERE created IS NULL LIMIT 10000)
GROUP BY meta_id
)
UPDATE meta
SET created = meta_created.created
FROM meta_created
WHERE meta.id = meta_created.meta_id AND meta.created IS NULL
"""
for i in range(100):
with script.context() as ctx:
fingerprint_db = ctx.db.get_fingerprint_db()
result = fingerprint_db.execute(query)
logger.info('Added create date to %s meta entries', result.rowcount)
if result.rowcount == 0:
return
ctx.db.session.commit()
| mit | Python |
41ad2bebb3827c1772eddfaa7cf4dc23495f68f2 | document the three abstract methods | pradyunsg/zazo,pradyunsg/zazo | src/zazo/abc.py | src/zazo/abc.py | """
"""
from __future__ import absolute_import
import abc
from six import add_metaclass
if False:
from packaging.requirements import Requirement # NOQA
from packaging.version import Version # NOQA
from typing import List # NOQA
@add_metaclass(abc.ABCMeta)
class Candidate(object):
@abc.abstractmethod
def matches(self, requirement):
# type: (Requirement) -> bool
"""Does this candidate match the given requirement?
"""
raise NotImplementedError("Method to be overridden in a subclass.")
@add_metaclass(abc.ABCMeta)
class Provider(object):
"""Handles everything related to providing packages and package information
"""
@abc.abstractmethod
def get_candidates(self, requirement):
# type: (Requirement) -> List[Candidate]
"""Get all the candidates that match the given requirement.
"""
raise NotImplementedError("Method to be overridden in a subclass.")
@abc.abstractmethod
def fetch_dependencies(self, candidate):
# type: (Candidate) -> List[Requirement]
"""Fetch dependencies of the given candidate
"""
raise NotImplementedError("Method to be overridden in a subclass.")
| """
"""
from __future__ import absolute_import
import abc
from six import add_metaclass
if False:
from packaging.requirements import Requirement # NOQA
from packaging.version import Version # NOQA
from typing import List # NOQA
@add_metaclass(abc.ABCMeta)
class Candidate(object):
@abc.abstractmethod
def matches(self, requirement):
# type: (Requirement) -> bool
raise NotImplementedError("Method to be overridden in a subclass.")
@add_metaclass(abc.ABCMeta)
class Provider(object):
"""Handles everything related to providing packages and package information
"""
@abc.abstractmethod
def get_candidates(self, requirement):
# type: (Requirement) -> List[Candidate]
raise NotImplementedError("Method to be overridden in a subclass.")
@abc.abstractmethod
def fetch_dependencies(self, candidate):
# type: (Candidate) -> List[Requirement]
raise NotImplementedError("Method to be overridden in a subclass.")
| mit | Python |
71b511f97a6efa70c00c6dfa827f73f2c0d6b8c7 | update moment2 plot | mdavidsaver/jmbgsddb,mdavidsaver/jmbgsddb,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,frib-high-level-controls/FLAME,mdavidsaver/jmbgsddb,frib-high-level-controls/FLAME,mdavidsaver/jmbgsddb | tools/h5plotsim.py | tools/h5plotsim.py | #!/usr/bin/env python
"""Plot simulation results
"""
from __future__ import print_function
import sys
from matplotlib.pylab import *
from h5py import File
fname, _junk, gname = sys.argv[1].partition(':')
data = File(fname)
grp = data[gname or '/']
simtype = grp.attrs['sim_type']
print('sim_type', simtype)
def show_vector(grp, vector='state'):
'Show envelope size and angle as a function of s position'
pos = grp['pos']
state = grp[vector]
subplot(2,1,1)
plot(pos, state[:,0], '-b',
pos, state[:,2], '-r',
pos, state[:,4], '-g')
xlabel('s')
ylabel('size')
legend(['x','y','z'])
subplot(2,1,2)
plot(pos, state[:,1], '-b',
pos, state[:,3], '-r',
pos, state[:,5], '-g')
xlabel('s')
ylabel('angle')
def show_moment2(grp):
pos = grp['pos'][:]
avg = grp['moment0'][:]
rms = grp['moment0_rms'][:]
rmsp, rmsn = avg+rms, avg-rms
for i,L in zip(range(6), ('x','px','y','py','z','pz')):
subplot(3,2,i+1)
plot(pos, rmsp[:,i], '-b',
pos, avg [:,i], '-r',
pos, rmsn[:,i], '-b')
xlabel('s')
ylabel(L)
def show_generic(grp):
print("Unknown sim_type")
showsim = {
'Vector': show_vector,
'MomentMatrix2': show_moment2,
}
showsim.get(simtype)(grp)
show()
| #!/usr/bin/env python
"""Plot simulation results
"""
from __future__ import print_function
import sys
from matplotlib.pylab import *
from h5py import File
fname, _junk, gname = sys.argv[1].partition(':')
data = File(fname)
grp = data[gname or '/']
simtype = grp.attrs['sim_type']
print('sim_type', simtype)
def show_vector(grp, vector='state'):
'Show envelope size and angle as a function of s position'
pos = grp['pos']
state = grp[vector]
subplot(2,1,1)
plot(pos, state[:,0], '-b',
pos, state[:,2], '-r',
pos, state[:,4], '-g')
xlabel('s')
ylabel('size')
legend(['x','y','z'])
subplot(2,1,2)
plot(pos, state[:,1], '-b',
pos, state[:,3], '-r',
pos, state[:,5], '-g')
xlabel('s')
ylabel('angle')
def show_generic(grp):
print("Unknown sim_type")
showsim = {
'Vector': show_vector,
'MomentMatrix2': lambda x:show_vector(x,'moment0'),
}
showsim.get(simtype)(grp)
show()
| mit | Python |
f547d131a3ad8d9c452a4583c080ac9d29b53709 | bump version | vmalloc/noselog | noselog/__version__.py | noselog/__version__.py | __version__ = "0.0.2"
| __version__ = "0.0.1"
| bsd-3-clause | Python |
c94671faaa2580d05494d770cca5183cde29e55e | bump version | crateio/carrier | conveyor/__init__.py | conveyor/__init__.py | __version__ = "0.1.dev23"
| __version__ = "0.1.dev22"
| bsd-2-clause | Python |
aaf6b2fefec03ec0896662605c3208d26aaab0dd | change i18n | Hybrid-Cloud/conveyor,Hybrid-Cloud/conveyor | conveyor/cmd/plan.py | conveyor/cmd/plan.py | #!/usr/bin/python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Starter script for Conveyor plan."""
import eventlet
import os
import sys
import warnings
from oslo_config import cfg
from oslo_log import log as logging
from oslo_reports import guru_meditation_report as gmr
from conveyor import i18n
from conveyor import service
from conveyor import utils
from conveyor import version
# Need to register global_opts
from conveyor.common import config # noqa
from conveyor.conveyorheat.common import config as heat_config
from conveyor.conveyorheat.engine import template
from conveyor.i18n import _LC
if os.name == 'nt':
# eventlet monkey patching the os module causes subprocess.Popen to fail
# on Windows when using pipes due to missing non-blocking IO support.
eventlet.monkey_patch(os=False)
else:
eventlet.monkey_patch()
warnings.simplefilter('once', DeprecationWarning)
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'conveyor', '__init__.py')):
sys.path.insert(0, possible_topdir)
i18n.enable_lazy()
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def init_heat():
heat_config.startup_sanity_check()
mgr = None
try:
mgr = template._get_template_extension_manager()
except template.TemplatePluginNotRegistered as ex:
LOG.critical(_LC("%s"), ex)
if not mgr or not mgr.names():
sys.exit("ERROR: No template format plugins registered")
gmr.TextGuruMeditation.setup_autorun(version)
def main():
logging.register_options(CONF)
CONF(sys.argv[1:], project='conveyor',
version=version.version_string())
logging.setup(CONF, "conveyor")
init_heat()
utils.monkey_patch()
server = service.Service.create(binary='conveyor-plan')
service.serve(server)
service.wait()
| #!/usr/bin/python
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Starter script for Conveyor plan."""
import eventlet
import os
import sys
import warnings
from oslo_config import cfg
from oslo_log import log as logging
from oslo_reports import guru_meditation_report as gmr
from conveyor import i18n
from conveyor import service
from conveyor import utils
from conveyor import version
# Need to register global_opts
from conveyor.common import config # noqa
from conveyor.conveyorheat.common import config as heat_config
from conveyor.conveyorheat.common.i18n import _LC
from conveyor.conveyorheat.engine import template
if os.name == 'nt':
# eventlet monkey patching the os module causes subprocess.Popen to fail
# on Windows when using pipes due to missing non-blocking IO support.
eventlet.monkey_patch(os=False)
else:
eventlet.monkey_patch()
warnings.simplefilter('once', DeprecationWarning)
possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(possible_topdir, 'conveyor', '__init__.py')):
sys.path.insert(0, possible_topdir)
i18n.enable_lazy()
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def init_heat():
heat_config.startup_sanity_check()
mgr = None
try:
mgr = template._get_template_extension_manager()
except template.TemplatePluginNotRegistered as ex:
LOG.critical(_LC("%s"), ex)
if not mgr or not mgr.names():
sys.exit("ERROR: No template format plugins registered")
gmr.TextGuruMeditation.setup_autorun(version)
def main():
logging.register_options(CONF)
CONF(sys.argv[1:], project='conveyor',
version=version.version_string())
logging.setup(CONF, "conveyor")
init_heat()
utils.monkey_patch()
server = service.Service.create(binary='conveyor-plan')
service.serve(server)
service.wait()
| apache-2.0 | Python |
08f40c4e0c9dca21f6f7ff108868970b43725716 | change mimetype of get-video by ID endpoint as well | techytux/oertube,techytux/oertube,techytux/oertube | oertube/controllers.py | oertube/controllers.py | import os
from flask import Flask, request, Response
from flask import render_template, url_for, redirect, send_from_directory, jsonify
from flask import send_file, make_response, abort
from oertube import app
from bson import json_util
# routing for API endpoints, generated from the models designated as API_MODELS
from oertube.core import api_manager
from oertube.models import *
for model_name in app.config['API_MODELS']:
model_class = app.config['API_MODELS'][model_name]
api_manager.create_api(model_class, methods=['GET', 'POST'])
session = api_manager.session
# routing for basic pages (pass routing onto the Angular app)
@app.route('/')
@app.route('/about')
@app.route('/blog')
def basic_pages(**kwargs):
return make_response(open('oertube/templates/index.html').read())
# routing for CRUD-style endpoints
# passes routing onto the angular frontend if the requested resource exists
from sqlalchemy.sql import exists
crud_url_models = app.config['CRUD_URL_MODELS']
@app.route('/<model_name>/')
@app.route('/<model_name>/<item_id>')
def rest_pages(model_name, item_id=None):
if model_name in crud_url_models:
model_class = crud_url_models[model_name]
if item_id is None or session.query(exists().where(
model_class.id == item_id)).scalar():
return make_response(open(
'oertube/templates/index.html').read())
abort(404)
@app.route('/lists')
def lists(item_id=None):
lists_json = get_lists_json()
return jsonify(lists_json) # lists_json
@app.route('/list/<list_name>')
def list(list_name, item_id=None):
list_json = get_list_json(list_name)
return jsonify(list_json) # lists_json
@app.route('/rate/<video_id>/<rating>')
def rate(video_id, rating, item_id=None):
list_json = save_rating(video_id, rating)
return jsonify(list_json) # lists_json
@app.route('/ratings')
def ratings():
ratings_json = get_ratings()
return Response(json_util.dumps(ratings_json), mimetype="application/json") # lists_json
# might not be used anymore
@app.route('/get-video/<video_id>')
def get_video(video_id):
video_dict = get_video_by_id(video_id)
result_json = {"result": "success", 'data': video_dict['online']}
return Response(json_util.dumps(result_json), mimetype="application/json") # lists_json
# special file handlers and error handlers
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'img/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| import os
from flask import Flask, request, Response
from flask import render_template, url_for, redirect, send_from_directory, jsonify
from flask import send_file, make_response, abort
from oertube import app
from bson import json_util
# routing for API endpoints, generated from the models designated as API_MODELS
from oertube.core import api_manager
from oertube.models import *
for model_name in app.config['API_MODELS']:
model_class = app.config['API_MODELS'][model_name]
api_manager.create_api(model_class, methods=['GET', 'POST'])
session = api_manager.session
# routing for basic pages (pass routing onto the Angular app)
@app.route('/')
@app.route('/about')
@app.route('/blog')
def basic_pages(**kwargs):
return make_response(open('oertube/templates/index.html').read())
# routing for CRUD-style endpoints
# passes routing onto the angular frontend if the requested resource exists
from sqlalchemy.sql import exists
crud_url_models = app.config['CRUD_URL_MODELS']
@app.route('/<model_name>/')
@app.route('/<model_name>/<item_id>')
def rest_pages(model_name, item_id=None):
if model_name in crud_url_models:
model_class = crud_url_models[model_name]
if item_id is None or session.query(exists().where(
model_class.id == item_id)).scalar():
return make_response(open(
'oertube/templates/index.html').read())
abort(404)
@app.route('/lists')
def lists(item_id=None):
lists_json = get_lists_json()
return jsonify(lists_json) # lists_json
@app.route('/list/<list_name>')
def list(list_name, item_id=None):
list_json = get_list_json(list_name)
return jsonify(list_json) # lists_json
@app.route('/rate/<video_id>/<rating>')
def rate(video_id, rating, item_id=None):
list_json = save_rating(video_id, rating)
return jsonify(list_json) # lists_json
@app.route('/ratings')
def ratings():
ratings_json = get_ratings()
return Response(json_util.dumps(ratings_json), mimetype="application/json") # lists_json
# might not be used anymore
@app.route('/get-video/<video_id>')
def get_video(video_id):
video_dict = get_video_by_id(video_id)
result_json = {"result": "success", 'data': video_dict['online']}
return json_util.dumps(result_json)
# special file handlers and error handlers
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'img/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| mit | Python |
3266ecee8f9a6601d8678a91e8923c6d7137adb3 | Make ilock lock name user specific | OGGM/oggm,OGGM/oggm,bearecinos/oggm,bearecinos/oggm,juliaeis/oggm,TimoRoth/oggm,anoukvlug/oggm,TimoRoth/oggm,juliaeis/oggm,anoukvlug/oggm | oggm/tests/conftest.py | oggm/tests/conftest.py | import pytest
import logging
import getpass
from oggm import cfg, utils
logger = logging.getLogger(__name__)
def pytest_configure(config):
if config.pluginmanager.hasplugin('xdist'):
try:
from ilock import ILock
utils.lock = ILock("oggm_xdist_download_lock_" + getpass.getuser())
logger.info("ilock locking setup successfully for xdist tests")
except:
logger.warning("could not setup ilock locking for distributed tests")
| import pytest
import logging
import multiprocessing as mp
from oggm import cfg, utils
import pickle
logger = logging.getLogger(__name__)
def pytest_configure(config):
if config.pluginmanager.hasplugin('xdist'):
try:
from ilock import ILock
utils.lock = ILock("oggm_xdist_download_lock")
logger.info("ilock locking setup successfully for xdist tests")
except:
logger.warning("could not setup ilock locking for distributed tests")
| bsd-3-clause | Python |
05f0d841bf1bd6b440d21f66df4bd18cb797ec24 | increment version | mikedh/trimesh,mikedh/trimesh,mikedh/trimesh,mikedh/trimesh | trimesh/version.py | trimesh/version.py | __version__ = '3.10.3'
| __version__ = '3.10.2'
| mit | Python |
cc298348cceece3b5ff64bdfb030d3885cc2e53f | fix filter | znick/anytask,znick/anytask,znick/anytask,znick/anytask | anytask/courses/templatetags/form_func.py | anytask/courses/templatetags/form_func.py | from BeautifulSoup import BeautifulSoup, Comment
from django import template
from django.http import QueryDict
register = template.Library()
@register.filter(name='field_in_data')
def form_selected_value(data, field):
if isinstance(data, QueryDict):
return data.getlist(field.name)
return []
@register.filter(name='selected')
def form_selected_value(data, val):
if str(val) in data:
return 'selected'
return ''
| from BeautifulSoup import BeautifulSoup, Comment
from django import template
from django.utils.translation import ugettext as _
from issues.models import Issue
from issues.model_issue_field import IssueStatusField
register = template.Library()
@register.filter(name='field_in_data')
def form_selected_value(data, field):
print data
return data.getlist(field.name)
@register.filter(name='selected')
def form_selected_value(data, val):
if str(val) in data:
return 'selected'
return ''
| mit | Python |
6ed303ae43b2313f6425d34f08697121a4f7b0cd | remove unused var accidents_file | hasadna/anyway,hasadna/anyway,hasadna/anyway,hasadna/anyway | anyway/parsers/preprocessing_cbs_files.py | anyway/parsers/preprocessing_cbs_files.py | from six import iteritems
import os
CBS_FILES_HEBREW = {'sadot': 'Fields',
'zmatim_ironiim': 'IntersectUrban',
'zmatim_lo_ironiim': 'IntersectNonUrban',
'rehev': 'VehData',
'milon':'Dictionary',
'meoravim': 'InvData',
'klali': 'AccData',
'rechovot':'DicStreets'}
def update_cbs_files_names(directory):
files = sorted([path for path in os.listdir(directory)])
for file in files:
file_path = os.path.join(directory,file)
for hebrew_file_name, english_file_name in iteritems(CBS_FILES_HEBREW):
if hebrew_file_name in file.lower() and english_file_name.lower() not in file.lower():
os.rename(file_path,file_path.replace('.csv', '_' + english_file_name + '.csv'))
def get_accidents_file_data(directory):
for file_path in os.listdir(directory):
if file_path.endswith("{0}{1}".format(CBS_FILES_HEBREW['klali'], '.csv')):
return os.path.join(directory, file_path)
| from six import iteritems
import os
CBS_FILES_HEBREW = {'sadot': 'Fields',
'zmatim_ironiim': 'IntersectUrban',
'zmatim_lo_ironiim': 'IntersectNonUrban',
'rehev': 'VehData',
'milon':'Dictionary',
'meoravim': 'InvData',
'klali': 'AccData',
'rechovot':'DicStreets'}
def update_cbs_files_names(directory):
files = sorted([path for path in os.listdir(directory)])
accidents_file = None
for file in files:
file_path = os.path.join(directory,file)
for hebrew_file_name, english_file_name in iteritems(CBS_FILES_HEBREW):
if hebrew_file_name in file.lower() and english_file_name.lower() not in file.lower():
os.rename(file_path,file_path.replace('.csv', '_' + english_file_name + '.csv'))
def get_accidents_file_data(directory):
for file_path in os.listdir(directory):
if file_path.endswith("{0}{1}".format(CBS_FILES_HEBREW['klali'], '.csv')):
return os.path.join(directory, file_path)
# def main(path):
# update_cbs_files_names(path)
| mit | Python |
c6095cd72e5a0c60a837214e5408feb42baf939d | Update config.py | kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io | core/nginx/config.py | core/nginx/config.py | #!/usr/bin/python
import jinja2
import os
convert = lambda src, dst, args: open(dst, "w").write(jinja2.Template(open(src).read()).render(**args))
args = os.environ.copy()
# Get the first DNS server
with open("/etc/resolv.conf") as handle:
content = handle.read().split()
args["RESOLVER"] = content[content.index("nameserver") + 1]
if "HOST_WEBMAIL" not in args:
args["HOST_WEBMAIL"] = "webmail"
if "HOST_ADMIN" not in args:
args["HOST_ADMIN"] = "admin"
if "HOST_WEBDAV" not in args:
args["HOST_WEBDAV"] = "webdav:5232"
if "HOST_ANTISPAM" not in args:
args["HOST_ANTISPAM"] = "antispam:11334"
# TLS configuration
cert_name = os.getenv("TLS_CERT_FILENAME", default="cert.pem")
keypair_name = os.getenv("TLS_KEYPAIR_FILENAME", default="key.pem")
args["TLS"] = {
"cert": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name),
"letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem",
"/certs/letsencrypt/live/mailu/privkey.pem"),
"mail": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name),
"mail-letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem",
"/certs/letsencrypt/live/mailu/privkey.pem"),
"notls": None
}[args["TLS_FLAVOR"]]
if args["TLS"] and not all(os.path.exists(file_path) for file_path in args["TLS"]):
print("Missing cert or key file, disabling TLS")
args["TLS_ERROR"] = "yes"
# Build final configuration paths
convert("/conf/tls.conf", "/etc/nginx/tls.conf", args)
convert("/conf/proxy.conf", "/etc/nginx/proxy.conf", args)
convert("/conf/nginx.conf", "/etc/nginx/nginx.conf", args)
if os.path.exists("/var/run/nginx.pid"):
os.system("nginx -s reload")
| #!/usr/bin/python
import jinja2
import os
convert = lambda src, dst, args: open(dst, "w").write(jinja2.Template(open(src).read()).render(**args))
args = os.environ.copy()
# Get the first DNS server
with open("/etc/resolv.conf") as handle:
content = handle.read().split()
args["RESOLVER"] = content[content.index("nameserver") + 1]
if "HOST_WEBMAIL" not in args:
args["HOST_WEBMAIL"] = "webmail"
if "HOST_ADMIN" not in args:
args["HOST_ADMIN"] = "admin"
if "HOST_WEBDAV" not in args:
args["HOST_WEBDAV"] = "webdav:5232"
if "HOST_ANTISPAM" not in args:
args["HOST_ANTISPAM"] = "antispam:11334"
# TLS configuration
cert_name = os.getenv("TLS_CERT_FILENAME", default="cert.pem")
keypair_name = os.getenv("TLS_KEYPAIR_FILENAME", default="key.pem")
args["TLS"] = {
"cert": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name),
"letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem",
"/certs/letsencrypt/live/mailu/privkey.pem"),
"mail": ("/certs/%s" % cert_name, "/certs/%s" % keypair_name),
"mail-letsencrypt": ("/certs/letsencrypt/live/mailu/fullchain.pem",
"/certs/letsencrypt/live/mailu/privkey.pem"),
"notls": None
}[args["TLS_FLAVOR"]]
if args["TLS"] and not all(os.path.exists(file_path) for file_path in args["TLS"]):
print("Missing cert or key file, disabling TLS")
args["TLS_ERROR"] = "yes"
# Build final configuration paths
convert("/conf/tls.conf", "/etc/nginx/tls.conf", args)
convert("/conf/proxy.conf", "/etc/nginx/proxy.conf", args)
convert("/conf/nginx.conf", "/etc/nginx/nginx.conf", args)
if os.path.exists("/var/log/nginx.pid"):
os.system("nginx -s reload")
| mit | Python |
1ed08e4bd93c713d8a31a387265057f1d4d7ae8f | Increment the version | DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd | txircd/__init__.py | txircd/__init__.py | __version__ = '0.2.5' | __version__ = '0.2.4' | bsd-3-clause | Python |
712ea23eec78df7650e0af47e3361313aa4c3901 | clean set context data, articles utils | williamroot/opps,williamroot/opps,jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,opps/opps | opps/articles/utils.py | opps/articles/utils.py | # -*- coding: utf-8 -*-
from django.conf import settings
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
if len(self.article) >= 1:
article = self.article[0]
context['articleboxes'] = ArticleBox.objects.filter(
channel=article.channel)
if len(self.article) == 1:
context['articleboxes'] = context['articleboxes'].filter(
article=article)
context['opps_channel'] = article.channel
context['opps_channel_conf'] = settings.OPPS_CHANNEL_CONF.get(
article.channel.slug, '')
return context
| # -*- coding: utf-8 -*-
from django.conf import settings
from opps.articles.models import ArticleBox
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
if len(self.article) >= 1:
article = self.article[0]
context['articleboxes'] = ArticleBox.objects.filter(
channel=article.channel)
if len(self.article) == 1:
context['articleboxes'] = context['articleboxes'].filter(
article=article)
context['opps_channel'] = article.channel
context['opps_channel_conf'] = settings.OPPS_CHANNEL_CONF.get(
article.channel.slug, '')
return context
| mit | Python |
06fb3fa4b4850dced647eaaf699e05f006a31817 | Change rating strings | DLance96/nativx-survey,DLance96/nativx-survey,DLance96/nativx-survey | survey/forms.py | survey/forms.py | from django import forms
class ActivityRatingForm(forms.Form):
RATING_CHOICES = (
('0', 'No shot'),
('1', 'Probably would not go'),
('2', 'Probably would go'),
('3', 'Heck yea!')
)
rating = forms.ChoiceField(choices=RATING_CHOICES, required=False, label="", label_suffix='')
def __init__(self, *args, **kwargs):
label = kwargs.pop('label', "")
super(ActivityRatingForm, self).__init__(*args, **kwargs)
if label:
self.fields['rating'].label = label
def __str__(self):
return self.fields['rating'].label
class TextInputForm(forms.Form):
textInput = forms.CharField(required=False, label="",
widget=forms.Textarea(attrs=
{'rows': 8, 'style': 'width: 100%', 'class': 'form-control',
'placeholder': 'Custom Text Input'}))
| from django import forms
class ActivityRatingForm(forms.Form):
RATING_CHOICES = (
('0', 'Would not go'),
('1', 'Probably would not go'),
('2', 'Probably would go'),
('3', 'Would go')
)
rating = forms.ChoiceField(choices=RATING_CHOICES, required=False, label="", label_suffix='')
def __init__(self, *args, **kwargs):
label = kwargs.pop('label', "")
super(ActivityRatingForm, self).__init__(*args, **kwargs)
if label:
self.fields['rating'].label = label
def __str__(self):
return self.fields['rating'].label
class TextInputForm(forms.Form):
textInput = forms.CharField(required=False, label="",
widget=forms.Textarea(attrs=
{'rows': 8, 'style': 'width: 100%', 'class': 'form-control',
'placeholder': 'Custom Text Input'}))
| mit | Python |
6135699ccf19d8c5f801b1a73794f2414f7200d7 | test to check recording a survey actually records something | mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey,mysociety/manchester-survey | survey/tests.py | survey/tests.py | from django.core.urlresolvers import reverse
from django.test import TestCase
from survey.models import User, Item
class StartPageTest(TestCase):
def test_front_page_displays(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
def test_cannot_complete_survey_twice(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
self.client.post(reverse('survey:record'))
self.assertIsNotNone(self.client.cookies['usercode'])
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "already completed")
def test_completing_survey_creates_user(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
self.client.post(reverse('survey:record'))
self.assertIsNotNone(self.client.cookies['usercode'].value)
usercode = self.client.cookies['usercode']
u = User.objects.get(code=usercode.value)
self.assertIsNotNone(u.id)
def test_survey_is_recorded(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
self.client.post(reverse('survey:record'), {'1': 'a'})
self.assertIsNotNone(self.client.cookies['usercode'])
usercode = self.client.cookies['usercode']
u = User.objects.get(code=usercode.value)
responses = Item.objects.filter(user_id=u.id)
self.assertTrue(len(responses) == 1)
response = Item.objects.filter(user_id=u.id).filter(key='1').filter(value='a')
self.assertTrue(len(responses) == 1)
| from django.core.urlresolvers import reverse
from django.test import TestCase
from survey.models import User
class StartPageTest(TestCase):
def test_front_page_displays(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
def test_cannot_complete_survey_twice(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
self.client.post(reverse('survey:record'))
self.assertIsNotNone(self.client.cookies['usercode'])
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "already completed")
def test_completing_survey_creates_user(self):
response = self.client.get(reverse('survey:survey'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "University of Manchester")
self.client.post(reverse('survey:record'))
self.assertIsNotNone(self.client.cookies['usercode'].value)
usercode = self.client.cookies['usercode']
u = User.objects.get(code=usercode.value)
self.assertIsNotNone(u.id)
| agpl-3.0 | Python |
c73f1674dd57217569009716260af3d13a787052 | Bump version to 0.1.11 | botify-labs/python-simple-workflow,botify-labs/python-simple-workflow | swf/__init__.py | swf/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
version = (0, 1, 11)
__title__ = "python-simple-workflow"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
version = (0, 1, 10)
__title__ = "python-simple-workflow"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
| mit | Python |
a79f422991532cf559d394b608237953aa376bf6 | add admin for project | praekelt/mc2,praekelt/mc2,praekelt/mc2,universalcore/unicore-mc,universalcore/unicore-mc,praekelt/mc2,universalcore/unicore-mc,universalcore/unicore-mc,praekelt/mc2 | unicoremc/admin.py | unicoremc/admin.py | # ensure celery autodiscovery runs
from djcelery import admin as celery_admin
from djcelery.models import (
TaskState, WorkerState, PeriodicTask, IntervalSchedule, CrontabSchedule)
from django.contrib import admin
from django.contrib.sites.models import Site
from unicoremc.models import Project, Localisation
class ProjectAdmin(admin.ModelAdmin):
list_display = (
'app_type', 'country', 'state', 'base_repo_url', 'repo_url')
readonly_fields = (
'app_type', 'base_repo_url', 'country', 'state', 'repo_url', 'owner',
'available_languages')
admin.site.register(Localisation, admin.ModelAdmin)
admin.site.register(Project, ProjectAdmin)
# remove celery from admin
admin.site.unregister(Site)
admin.site.unregister(TaskState)
admin.site.unregister(WorkerState)
admin.site.unregister(IntervalSchedule)
admin.site.unregister(CrontabSchedule)
admin.site.unregister(PeriodicTask)
| # ensure celery autodiscovery runs
from djcelery import admin as celery_admin
from djcelery.models import (
TaskState, WorkerState, PeriodicTask, IntervalSchedule, CrontabSchedule)
from django.contrib import admin
from django.contrib.sites.models import Site
# remove celery from admin
admin.site.unregister(Site)
admin.site.unregister(TaskState)
admin.site.unregister(WorkerState)
admin.site.unregister(IntervalSchedule)
admin.site.unregister(CrontabSchedule)
admin.site.unregister(PeriodicTask)
| bsd-2-clause | Python |
fd50ec469eebafe28938a5d9484a760f67ed154d | set version to 1.6dev | Chris7/cutadapt,marcelm/cutadapt | cutadapt/__init__.py | cutadapt/__init__.py | from __future__ import print_function
import sys
__version__ = '1.6dev'
def check_importability():
try:
import cutadapt._align
except ImportError as e:
if 'undefined symbol' in str(e):
print("""
ERROR: A required extension module could not be imported because it is
incompatible with your system. A quick fix is to recompile the extension
modules with the following command:
{} setup.py build_ext -i
See the README file for alternative ways of installing the program.
The original error message follows.
""".format(sys.executable))
raise
| from __future__ import print_function
import sys
__version__ = '1.5'
def check_importability():
try:
import cutadapt._align
except ImportError as e:
if 'undefined symbol' in str(e):
print("""
ERROR: A required extension module could not be imported because it is
incompatible with your system. A quick fix is to recompile the extension
modules with the following command:
{} setup.py build_ext -i
See the README file for alternative ways of installing the program.
The original error message follows.
""".format(sys.executable))
raise
| mit | Python |
102f7979338b948744b6af06689f928deb72f27c | Fix lemma ordering in test | spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy | spacy/tests/regression/test_issue781.py | spacy/tests/regression/test_issue781.py | # coding: utf-8
from __future__ import unicode_literals
import pytest
# Note: "chromosomes" worked previous the bug fix
@pytest.mark.models('en')
@pytest.mark.parametrize('word,lemmas', [("chromosomes", ["chromosome"]), ("endosomes", ["endosome"]), ("colocalizes", ["colocaliz", "colocalize"])])
def test_issue781(EN, word, lemmas):
lemmatizer = EN.Defaults.create_lemmatizer()
assert lemmatizer(word, 'noun', morphology={'number': 'plur'}) == lemmas
| # coding: utf-8
from __future__ import unicode_literals
import pytest
# Note: "chromosomes" worked previous the bug fix
@pytest.mark.models('en')
@pytest.mark.parametrize('word,lemmas', [("chromosomes", ["chromosome"]), ("endosomes", ["endosome"]), ("colocalizes", ["colocalize", "colocaliz"])])
def test_issue781(EN, word, lemmas):
lemmatizer = EN.Defaults.create_lemmatizer()
assert lemmatizer(word, 'noun', morphology={'number': 'plur'}) == lemmas
| mit | Python |
c74251b3dc92e4188c8a27e5ca4bb13fe7c80df2 | add annotator urls | DeepController/tellina,DeepController/tellina,DeepController/tellina | tellina/urls.py | tellina/urls.py | """django_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from website import annotator, cmd2html, views
urlpatterns = [
url(r'^$', views.index),
url(r'^translate', views.translate),
url(r'^info$', views.info),
url(r'^remember_ip_address$', views.remember_ip_address),
url(r'^vote', views.vote),
url(r'^login', annotator.login),
url(r'^uri_panel', annotator.url_panel),
url(r'^utility_panel', annotator.utility_panel),
url(r'^explain_cmd$', cmd2html.explain_cmd),
url(r'^admin', admin.site.urls)
] | """django_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from website import views
from website import cmd2html
urlpatterns = [
url(r'^$', views.index),
url(r'^translate', views.translate),
url(r'^info$', views.info),
url(r'^explain_cmd$', cmd2html.explain_cmd),
url(r'^remember_ip_address$', views.remember_ip_address),
url(r'^recently_asked$', views.recently_asked),
url(r'^vote', views.vote),
url(r'^admin', admin.site.urls)
]
| mit | Python |
3d836dded0f21afb2d81c0a448149f73f4217fb3 | Bump to rc2. | gregoiresage/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool,pebble/pebble-tool,gregoiresage/pebble-tool,pebble/pebble-tool | pebble_tool/version.py | pebble_tool/version.py | version_base = (4, 0, 1)
version_suffix = 'rc2'
if version_suffix is None:
__version_info__ = version_base
else:
__version_info__ = version_base + (version_suffix,)
__version__ = '{}.{}'.format(*version_base)
if version_base[2] != 0:
__version__ += '.{}'.format(version_base[2])
if version_suffix is not None:
__version__ += '-{}'.format(version_suffix)
| version_base = (4, 0, 1)
version_suffix = 'rc1'
if version_suffix is None:
__version_info__ = version_base
else:
__version_info__ = version_base + (version_suffix,)
__version__ = '{}.{}'.format(*version_base)
if version_base[2] != 0:
__version__ += '.{}'.format(version_base[2])
if version_suffix is not None:
__version__ += '-{}'.format(version_suffix)
| mit | Python |
a2b3d233acb432142e0683217d46d5281649a696 | Update __version__.py | cvium/irc_bot | irc_bot/__version__.py | irc_bot/__version__.py | __version__ = '1.0.36'
| __version__ = '1.0.35'
| mit | Python |
b97a9290dfddd6777187f4acb6c3222b1e346fd8 | Fix defect in argument parser invokation | JIC-CSB/jicirodsmanager,JIC-CSB/jicirodsmanager | jicirodsmanager/cli.py | jicirodsmanager/cli.py | """Manger iRODS storage."""
import argparse
import logging
from jicirodsmanager.irods import IrodsStorageManager
root = logging.getLogger()
root.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
root.addHandler(handler)
def adduser(args):
root.info("Calling adduser")
storage_manager = IrodsStorageManager()
storage_manager.add_user(args.user_name, args.group_name)
def addgroup(args):
root.info("Calling addgroup")
storage_manager = IrodsStorageManager()
storage_manager.add_group(args.group_name, args.quota)
def addproject(args):
root.info("Calling addproject")
storage_manager = IrodsStorageManager()
storage_manager.add_project(args.project_name, args.quota)
def main():
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers()
user = subparsers.add_parser("adduser")
user.add_argument("user_name")
user.add_argument("group_name")
user.set_defaults(func=adduser)
group = subparsers.add_parser("addgroup")
group.add_argument("group_name")
group.add_argument("-q", "--quota", type=int, default=None)
group.set_defaults(func=addgroup)
group = subparsers.add_parser("addproject")
group.add_argument("project_name")
group.add_argument("-q", "--quota", type=int, default=None)
group.set_defaults(func=addproject)
args = parser.parse_args()
# Run it!
args.func(args)
if __name__ == "__main__":
main()
| """Manger iRODS storage."""
import argparse
import logging
from jicirodsmanager.irods import IrodsStorageManager
root = logging.getLogger()
root.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
root.addHandler(handler)
def adduser(args):
root.info("Calling adduser")
storage_manager = IrodsStorageManager()
storage_manager.add_user(args.user_name, args.group_name)
def addgroup(args):
root.info("Calling addgroup")
storage_manager = IrodsStorageManager()
storage_manager.add_group(args.group_name, args.quota)
def addproject(args):
root.info("Calling addproject")
storage_manager = IrodsStorageManager()
storage_manager.add_project(args.project_name, args.quota)
def main():
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers()
user = subparsers.add_parser("adduser")
user.add_argument("user_name")
user.add_argument("group_name")
user.set_defaults(func=adduser)
group = subparsers.add_parser("addgroup")
group.add_argument("group_name")
group.add_argument("-q", "--quota", type=int, default=None)
group.set_defaults(func=addgroup)
group = subparsers.add_parser("addproject")
group.add_argument("project_name")
group.add_argument("-q", "--quota", type=int, default=None)
group.set_defaults(func=addgroup)
args = parser.parse_args()
# Run it!
args.func(args)
if __name__ == "__main__":
main()
| mit | Python |
df78c3093e377a6f4f9e7a92653d2273bd8a356d | Add to test per tutorial | jonathanstallings/learning-journal,jonathanstallings/learning-journal | test_journal.py | test_journal.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError
TEST_DATABASE_URL = os.environ.get(
'DATABASE_URL',
'postgresql://jonathan:@localhost:5432/test-learning-journal'
)
os.environ['DATABASE_URL'] = TEST_DATABASE_URL
os.environ['TESTING'] = "True"
import journal
@pytest.fixture(scope='session')
def connection(request):
engine = create_engine(TEST_DATABASE_URL)
journal.Base.metadata.create_all(engine)
connection = engine.connect()
journal.DBSession.registry.clear()
journal.DBSession.configure(bind=connection)
journal.Base.metadata.bind = engine
request.addfinalizer(journal.Base.metadata.drop_all)
return connection
@pytest.fixture()
def db_session(request, connection):
from transaction import abort
trans = connection.begin()
request.addfinalizer(trans.rollback)
request.addfinalizer(abort)
from journal import DBSession
return DBSession
def test_write_entry(db_session):
kwargs = {'title': "Test Title", 'text': "Test entry text"}
kwargs['session'] = db_session
# first, assert that there are no entries in the database:
assert db_session.query(journal.Entry).count() == 0
# now, create an entry using the 'write' class method
entry = journal.Entry.write(**kwargs)
# the entry we get back ought to be an instance of Entry
assert isinstance(entry, journal.Entry)
# id and created are generated automatically, but only on writing to
# the database
auto_fields = ['id', 'created']
for field in auto_fields:
assert getattr(entry, field, None) is None
# flush the session to "write" the data to the database
db_session.flush()
# now, we should have one entry:
assert db_session.query(journal.Entry).count() == 1
for field in kwargs:
if field != 'session':
assert getattr(entry, field, '') == kwargs[field]
# id and created should be set automatically upon writing to db:
for auto in ['id', 'created']:
assert getattr(entry, auto, None) is not None
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import pytest
from sqlalchemy import create_engine
from sqlalchemy.exc import IntegrityError
TEST_DATABASE_URL = os.environ.get(
'DATABASE_URL',
'postgresql://jonathan:@localhost:5432/test-learning-journal'
)
os.environ['DATABASE_URL'] = TEST_DATABASE_URL
os.environ['TESTING'] = "True"
import journal
@pytest.fixture(scope='session')
def connection(request):
engine = create_engine(TEST_DATABASE_URL)
journal.Base.metadata.create_all(engine)
connection = engine.connect()
journal.DBSession.registry.clear()
journal.DBSession.configure(bind=connection)
journal.Base.metadata.bind = engine
request.addfinalizer(journal.Base.metadata.drop_all)
return connection
@pytest.fixture()
def db_session(request, connection):
from transaction import abort
trans = connection.begin()
request.addfinalizer(trans.rollback)
request.addfinalizer(abort)
from journal import DBSession
return DBSession
def test_write_entry(db_session):
kwargs = {'title': "Test Title", 'text': "Test entry text"}
kwargs['session'] = db_session
# first, assert that there are no entries in the database:
assert db_session.query(journal.Entry).count() == 0
# now, create an entry using the 'write' class method
entry = journal.Entry.write(**kwargs)
# the entry we get back ought to be an instance of Entry
assert isinstance(entry, journal.Entry)
# id and created are generated automatically, but only on writing to
# the database
auto_fields = ['id', 'created']
for field in auto_fields:
assert getattr(entry, field, None) is None
| mit | Python |
52d4b82a2dff35b22cea389274aaf3731570fe80 | add numpy import | AustereCuriosity/astropy,saimn/astropy,funbaker/astropy,pllim/astropy,pllim/astropy,aleksandr-bakanov/astropy,AustereCuriosity/astropy,pllim/astropy,kelle/astropy,dhomeier/astropy,lpsinger/astropy,astropy/astropy,larrybradley/astropy,mhvk/astropy,DougBurke/astropy,AustereCuriosity/astropy,stargaser/astropy,tbabej/astropy,lpsinger/astropy,pllim/astropy,MSeifert04/astropy,MSeifert04/astropy,StuartLittlefair/astropy,saimn/astropy,dhomeier/astropy,kelle/astropy,saimn/astropy,joergdietrich/astropy,aleksandr-bakanov/astropy,astropy/astropy,tbabej/astropy,tbabej/astropy,DougBurke/astropy,tbabej/astropy,kelle/astropy,mhvk/astropy,tbabej/astropy,kelle/astropy,bsipocz/astropy,lpsinger/astropy,bsipocz/astropy,kelle/astropy,MSeifert04/astropy,joergdietrich/astropy,mhvk/astropy,bsipocz/astropy,astropy/astropy,pllim/astropy,StuartLittlefair/astropy,joergdietrich/astropy,stargaser/astropy,dhomeier/astropy,AustereCuriosity/astropy,dhomeier/astropy,mhvk/astropy,StuartLittlefair/astropy,mhvk/astropy,dhomeier/astropy,larrybradley/astropy,joergdietrich/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,aleksandr-bakanov/astropy,funbaker/astropy,StuartLittlefair/astropy,larrybradley/astropy,funbaker/astropy,joergdietrich/astropy,DougBurke/astropy,lpsinger/astropy,larrybradley/astropy,bsipocz/astropy,stargaser/astropy,funbaker/astropy,stargaser/astropy,astropy/astropy,saimn/astropy,larrybradley/astropy,DougBurke/astropy,MSeifert04/astropy,lpsinger/astropy,StuartLittlefair/astropy,astropy/astropy,saimn/astropy | astropy/cosmology/tests/test_cosmology.py | astropy/cosmology/tests/test_cosmology.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .. import cosmology
import numpy as np
def test_cosmology():
cosmo = cosmology.Cosmology(H0=70, Om=0.27 ,Ol=0.73)
z = 1
# Test values were taken from the following web cosmology
# calculators on 27th Feb 2012:
# Wright: http://www.astro.ucla.edu/~wright/CosmoCalc.html
# Kempner: http://www.kempner.net/cosmic.php
# iCosmos: http://www.icosmos.co.uk/index.html
# The order of values below is Wright, Kempner, iCosmos'
assert np.allclose(cosmo.dc(z), [3364.5, 3364.8, 3364.7988], rtol=1e-4)
assert np.allclose(cosmo.da(z), [1682.3, 1682.4, 1682.3994], rtol=1e-4)
assert np.allclose(cosmo.dl(z), [6729.2, 6729.6, 6729.5976], rtol=1e-4)
assert np.allclose(cosmo.tl(z), [7.841, 7.84178, 7.843], rtol=1e-3)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from .. import cosmology
def test_cosmology():
cosmo = cosmology.Cosmology(H0=70, Om=0.27 ,Ol=0.73)
z = 1
# Test values were taken from the following web cosmology
# calculators on 27th Feb 2012:
# Wright: http://www.astro.ucla.edu/~wright/CosmoCalc.html
# Kempner: http://www.kempner.net/cosmic.php
# iCosmos: http://www.icosmos.co.uk/index.html
# The order of values below is Wright, Kempner, iCosmos'
assert np.allclose(cosmo.dc(z), [3364.5, 3364.8, 3364.7988], rtol=1e-4)
assert np.allclose(cosmo.da(z), [1682.3, 1682.4, 1682.3994], rtol=1e-4)
assert np.allclose(cosmo.dl(z), [6729.2, 6729.6, 6729.5976], rtol=1e-4)
assert np.allclose(cosmo.tl(z), [7.841, 7.84178, 7.843], rtol=1e-3)
| bsd-3-clause | Python |
182fa87ebb013c71744566bc0fe693ccef8df251 | add unit test suite | ContinuumIO/pycosat,ContinuumIO/pycosat,sandervandorsten/pycosat,sandervandorsten/pycosat | test_pycosat.py | test_pycosat.py | import unittest
import pycosat
tests = []
class TestSolver(unittest.TestCase):
def test_sat_1(self):
res = pycosat.solve(5, [[1, -5, 4],
[-1, 5, 3, 4],
[-3, -4]])
self.assertEqual(res, [True, False, False, False, True])
def test_unsat_2(self):
res = pycosat.solve(2, [[-1],
[1]])
self.assertEqual(res, False)
tests.append(TestSolver)
# ------------------------------------------------------------------------
def run(verbosity=1, repeat=1):
suite = unittest.TestSuite()
for cls in tests:
for _ in range(repeat):
suite.addTest(unittest.makeSuite(cls))
runner = unittest.TextTestRunner(verbosity=verbosity)
return runner.run(suite)
if __name__ == '__main__':
run()
| import pycosat
print pycosat.solve(
5, [
[1, -5, 4],
[-1, 5, 3, 4],
[-3, -4],
],
# True
)
print pycosat.solve(
2, [
[-1],
[1],
],
# True
)
| mit | Python |
68989a94b738a9e5ab7aed000523950da99029ec | update cloto db | Fiware/cloud.Facts,Fiware/cloud.Facts,telefonicaid/fiware-facts,telefonicaid/fiware-facts,Fiware/cloud.Facts,telefonicaid/fiware-facts | facts/cloto_db_client.py | facts/cloto_db_client.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright 2014 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
#
__author__ = 'gjp'
import logging.config
from config import config
import MySQLdb as mysql
from keystoneclient.exceptions import NotFound
class cloto_db_client():
"""This class provides methods to provide connection with Cloto database.
"""
conn = None
def get_window_size(self, tenantId):
"""
This method is in charge of retrieve the window size of a tenantId from cloto database.
:param tenantId: the id of the tenant to request the windowsize
:return: the window size
"""
try:
db = config.get('mysql', 'db')
if self.conn == None:
self.conn = mysql.connect(charset=config.get('mysql', 'charset'), use_unicode=True,
host=config.get('mysql', 'host'),
user=config.get('mysql', 'user'), passwd=config.get('mysql', 'password'),
db=db)
cursor = self.conn.cursor()
cursor.execute('SELECT * FROM cloto_tenantinfo WHERE tenantId="{0}"'.format(tenantId))
data = cursor.fetchall()
if len(data) == 0:
raise NotFound('{"error": "TenantID %s not found in database"}' % tenantId)
else:
tenant_information = data[0]
window_size = tenant_information[1]
except Exception, e:
logging.error("Error %s" % e.message)
raise e
finally:
if self.conn:
self.conn.close()
self.conn = None
return window_size
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright 2014 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
#
__author__ = 'gjp'
import logging.config
from config import config
import MySQLdb as mysql
from keystoneclient.exceptions import NotFound
class cloto_db_client():
"""This class provides methods to provide connection with Cloto database.
"""
conn = None
def get_window_size(self, tenantId):
"""
This method is in charge of retrieve the window size of a tenantId from cloto database.
:param tenantId: the id of the tenant to request the windowsize
:return: the window size
"""
try:
db = config.get('mysql', 'db')
if self.conn == None:
self.conn = mysql.connect(charset=config.get('mysql', 'charset'), use_unicode=True,
host=config.get('mysql', 'host'),
user=config.get('mysql', 'user'), passwd=config.get('mysql', 'password'),
db=db)
cursor = self.conn.cursor()
cursor.execute('SELECT * FROM {0}.cloto_tenantinfo WHERE tenantId="{1}"'.format(db, tenantId))
data = cursor.fetchall()
if len(data) == 0:
raise NotFound('{"error": "TenantID %s not found in database"}' % tenantId)
else:
tenant_information = data[0]
window_size = tenant_information[1]
except Exception, e:
logging.error("Error %s" % e.message)
raise e
finally:
if self.conn:
self.conn.close()
self.conn = None
return window_size
| apache-2.0 | Python |
0ae9b232b82285f2fa275b8ffa5dced6b9377b0e | Add equality operator to EnvironCredential | jaraco/keyring | keyring/credentials.py | keyring/credentials.py | import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def __eq__(self, other: object) -> bool:
if not isinstance(other, EnvironCredential):
return NotImplemented
return (
self.user_env_var == other.user_env_var
and self.pwd_env_var == other.pwd_env_var
)
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| import os
import abc
class Credential(metaclass=abc.ABCMeta):
"""Abstract class to manage credentials"""
@abc.abstractproperty
def username(self):
return None
@abc.abstractproperty
def password(self):
return None
class SimpleCredential(Credential):
"""Simple credentials implementation"""
def __init__(self, username, password):
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
class EnvironCredential(Credential):
"""Source credentials from environment variables.
Actual sourcing is deferred until requested.
"""
def __init__(self, user_env_var, pwd_env_var):
self.user_env_var = user_env_var
self.pwd_env_var = pwd_env_var
def _get_env(self, env_var):
"""Helper to read an environment variable"""
value = os.environ.get(env_var)
if not value:
raise ValueError('Missing environment variable:%s' % env_var)
return value
@property
def username(self):
return self._get_env(self.user_env_var)
@property
def password(self):
return self._get_env(self.pwd_env_var)
| mit | Python |
af572475c4ce2a2d5eafbf95dcbeff5759401cb3 | Fix typo in docstring | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | byceps/services/authentication/service.py | byceps/services/authentication/service.py | """
byceps.services.authentication.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from ..user import service as user_service
from ..user.transfer.models import User
from .exceptions import AuthenticationFailed
from .password import service as password_service
def authenticate(screen_name_or_email_address: str, password: str) -> User:
"""Try to authenticate the user.
Return the user object on success, or raise an exception on failure.
"""
# Look up user.
user = _find_user_by_screen_name_or_email_address(
screen_name_or_email_address
)
if user is None:
# Screen name/email address is unknown.
raise AuthenticationFailed()
_require_user_account_is_active(user)
# Verify credentials.
if not password_service.is_password_valid_for_user(user.id, password):
# Password does not match.
raise AuthenticationFailed()
return user.to_dto()
def _find_user_by_screen_name_or_email_address(
screen_name_or_email_address: str
) -> Optional[User]:
if '@' in screen_name_or_email_address:
return user_service.find_user_by_email_address(
screen_name_or_email_address
)
else:
return user_service.find_user_by_screen_name(
screen_name_or_email_address, case_insensitive=True
)
def _require_user_account_is_active(user: User) -> None:
"""Raise exception if user account has not been initialized, is
suspended, or has been deleted.
"""
if (not user.initialized) or user.suspended or user.deleted:
raise AuthenticationFailed()
| """
byceps.services.authentication.service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from ..user import service as user_service
from ..user.transfer.models import User
from .exceptions import AuthenticationFailed
from .password import service as password_service
def authenticate(screen_name_or_email_address: str, password: str) -> User:
"""Try to authenticate the user.
Return the user object on success, or raise an exception on failure.
"""
# Look up user.
user = _find_user_by_screen_name_or_email_address(
screen_name_or_email_address
)
if user is None:
# Screen name/email address is unknown.
raise AuthenticationFailed()
_require_user_account_is_active(user)
# Verify credentials.
if not password_service.is_password_valid_for_user(user.id, password):
# Password does not match.
raise AuthenticationFailed()
return user.to_dto()
def _find_user_by_screen_name_or_email_address(
screen_name_or_email_address: str
) -> Optional[User]:
if '@' in screen_name_or_email_address:
return user_service.find_user_by_email_address(
screen_name_or_email_address
)
else:
return user_service.find_user_by_screen_name(
screen_name_or_email_address, case_insensitive=True
)
def _require_user_account_is_active(user: User) -> None:
"""Raise exception if user account has not been initialized, is
suspeded, or has been deleted.
"""
if (not user.initialized) or user.suspended or user.deleted:
raise AuthenticationFailed()
| bsd-3-clause | Python |
b1de92ea458c5207620b999454ebb0e42716b629 | print eta only when fps > 0 | chainer/chainercv,pfnet/chainercv,yuyu2172/chainercv,yuyu2172/chainercv,chainer/chainercv | chainercv/utils/iterator/progress_hook.py | chainercv/utils/iterator/progress_hook.py | from __future__ import division
import sys
import time
class ProgressHook(object):
"""A hook class reporting the progress of iteration.
This is a hook class designed for
:func:`~chainercv.utils.apply_prediction_to_iterator`.
Args:
n_total (int): The number of images. This argument is optional.
"""
def __init__(self, n_total=None):
self.n_total = n_total
self.start = time.time()
self.n_processed = 0
def __call__(self, in_values, out_values, rest_values):
self.n_processed += len(in_values[0])
fps = self.n_processed / (time.time() - self.start)
if self.n_total is not None and fps > 0:
eta = int((self.n_total - self.n_processed) / fps)
sys.stdout.write(
'\r{:d} of {:d} samples, {:.2f} samples/sec,'
' {:4d}:{:02d}:{:02d}'.format(
self.n_processed, self.n_total, fps,
eta // 60 // 60, (eta // 60) % 60, eta % 60))
else:
sys.stdout.write(
'\r{:d} samples, {:.2f} samples/sec'.format(
self.n_processed, fps))
sys.stdout.flush()
| from __future__ import division
import sys
import time
class ProgressHook(object):
"""A hook class reporting the progress of iteration.
This is a hook class designed for
:func:`~chainercv.utils.apply_prediction_to_iterator`.
Args:
n_total (int): The number of images. This argument is optional.
"""
def __init__(self, n_total=None):
self.n_total = n_total
self.start = time.time()
self.n_processed = 0
def __call__(self, in_values, out_values, rest_values):
self.n_processed += len(in_values[0])
fps = self.n_processed / (time.time() - self.start)
if self.n_total is not None:
eta = int((self.n_total - self.n_processed) / fps)
sys.stdout.write(
'\r{:d} of {:d} samples, {:.2f} samples/sec,'
' {:4d}:{:02d}:{:02d}'.format(
self.n_processed, self.n_total, fps,
eta // 60 // 60, (eta // 60) % 60, eta % 60))
else:
sys.stdout.write(
'\r{:d} samples, {:.2f} samples/sec'.format(
self.n_processed, fps))
sys.stdout.flush()
| mit | Python |
9f76d65e4f372faa0f96d873d3ef07ca027038b9 | Use openssl pkey to convert EC key into printable form before comparing, because different openssl versions use different privkey formats | vbwagner/ctypescrypto | tests/testec.py | tests/testec.py | from ctypescrypto.oid import Oid
from ctypescrypto.ec import create
from base64 import b16decode
from subprocess import Popen, PIPE
import unittest
def dump_key(key):
""" Convert key into printable form using openssl utility
Used to compare keys which can be stored in different
format by different OpenSSL versions
"""
return Popen(["openssl","pkey","-text","-noout"],stdin=PIPE,stdout=PIPE).communicate(key)[0]
def dump_pub_key(key):
""" Convert key into printable form using openssl utility
Used to compare keys which can be stored in different
format by different OpenSSL versions
"""
return Popen(["openssl","pkey","-text_pub","-noout"],stdin=PIPE,stdout=PIPE).communicate(key)[0]
class TestEcCreation(unittest.TestCase):
ec1priv="""-----BEGIN PRIVATE KEY-----
MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgKnG6neqZvB98EEuuxnHs
fv+L/5abuNNG20wzUqRpncOhRANCAARWKXWeUZ6WiCKZ2kHx87jmJyx0G3ZB1iQC
+Gp2AJYswbQPhGPigKolzIbZYfwnn7QOca6N8QDhPAn3QQK8trZI
-----END PRIVATE KEY-----
"""
bigkey="""-----BEGIN PRIVATE KEY-----
MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgAAAAAAAAAAAAAAAAAAAA
AUVRIxlQt1/EQC2hcy/Jvr6hRANCAASRZsKJufkF5V+ePfn2nX81a0oiCV+JT0cV
cUqktWYGr/GB65Zr5Ky1z/nha2bYCb6U4hTwbJP9CRCZr5hJklXn
-----END PRIVATE KEY-----
"""
def test_keyone(self):
key=create(Oid("secp256k1"),b16decode("2A71BA9DEA99BC1F7C104BAEC671EC7EFF8BFF969BB8D346DB4C3352A4699DC3",True))
out=key.exportpriv()
self.assertEqual(dump_key(out),dump_key(self.ec1priv))
def test_bignum(self):
keyval='\xff'*32
key=create(Oid("secp256k1"),keyval)
self.assertEqual(dump_key(key.exportpriv()),dump_key(self.bigkey))
if __name__ == "__main__":
unittest.main()
| from ctypescrypto.oid import Oid
from ctypescrypto.ec import create
from base64 import b16decode
import unittest
class TestEcCreation(unittest.TestCase):
ec1priv="""-----BEGIN PRIVATE KEY-----
MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgKnG6neqZvB98EEuuxnHs
fv+L/5abuNNG20wzUqRpncOhRANCAARWKXWeUZ6WiCKZ2kHx87jmJyx0G3ZB1iQC
+Gp2AJYswbQPhGPigKolzIbZYfwnn7QOca6N8QDhPAn3QQK8trZI
-----END PRIVATE KEY-----
"""
bigkey="""-----BEGIN PRIVATE KEY-----
MIGEAgEAMBAGByqGSM49AgEGBSuBBAAKBG0wawIBAQQgAAAAAAAAAAAAAAAAAAAA
AUVRIxlQt1/EQC2hcy/Jvr6hRANCAASRZsKJufkF5V+ePfn2nX81a0oiCV+JT0cV
cUqktWYGr/GB65Zr5Ky1z/nha2bYCb6U4hTwbJP9CRCZr5hJklXn
-----END PRIVATE KEY-----
"""
def test_keyone(self):
key=create(Oid("secp256k1"),b16decode("2A71BA9DEA99BC1F7C104BAEC671EC7EFF8BFF969BB8D346DB4C3352A4699DC3",True))
out=key.exportpriv()
self.assertEqual(out,self.ec1priv)
def test_bignum(self):
keyval='\xff'*32
key=create(Oid("secp256k1"),keyval)
self.assertEqual(key.exportpriv(),self.bigkey)
if __name__ == "__main__":
unittest.main()
| mit | Python |
97312b55525f81ecba3c0df1d6c2e55fa217ce83 | Make tests run on sqlite by default | dabapps/django-db-queue | testsettings.py | testsettings.py | import os
import dj_database_url
DATABASE_URL = os.environ.get("DATABASE_URL", "sqlite:///:memory:")
DATABASES = {
"default": dj_database_url.parse(DATABASE_URL),
}
INSTALLED_APPS = ("django_dbq",)
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
SECRET_KEY = "abcde12345"
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"handlers": {"console": {"level": "DEBUG", "class": "logging.StreamHandler",},},
"root": {"handlers": ["console"], "level": "INFO",},
"loggers": {"django_dbq": {"level": "CRITICAL", "propagate": True,},},
}
| import os
import dj_database_url
DATABASES = {
"default": dj_database_url.parse(os.environ["DATABASE_URL"]),
}
INSTALLED_APPS = ("django_dbq",)
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
SECRET_KEY = "abcde12345"
LOGGING = {
"version": 1,
"disable_existing_loggers": True,
"handlers": {"console": {"level": "DEBUG", "class": "logging.StreamHandler",},},
"root": {"handlers": ["console"], "level": "INFO",},
"loggers": {"django_dbq": {"level": "CRITICAL", "propagate": True,},},
}
| bsd-2-clause | Python |
3a3dd951bc4b3fe4c54cbc542cb63f84d5b0fdcc | Bump version number to 2.2alpha for continued development. | mkuiack/tkp,transientskp/tkp,bartscheers/tkp,bartscheers/tkp,mkuiack/tkp,transientskp/tkp | tkp/__init__.py | tkp/__init__.py | """
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "2.2a0"
| """
This package contains the Python modules used by the LOFAR Transients Pipeline
(TraP). This includes:
- Pipeline configuration management;
- Task distribution;
- Image loading and quality control;
- Source detection and measurement;
- Storing and associating sources in the database.
For details, see http://docs.transientskp.org/.
"""
__version__ = "2.1.0"
| bsd-2-clause | Python |
328d40f8f166f6744db94332d5620500cac73f02 | fix bug, required not allowed for positionals | ixdy/kubernetes-release,ixdy/kubernetes-release,kubernetes/release,kubernetes/release | defs/gcs_uploader.py | defs/gcs_uploader.py | #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import atexit
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
def main(argv):
scratch = tempfile.mkdtemp(prefix="bazel-gcs.")
atexit.register(lambda: shutil.rmtree(scratch))
with open(argv.manifest) as manifest:
for artifact in manifest:
artifact = artifact.strip()
try:
os.makedirs(os.path.join(scratch, os.path.dirname(artifact)))
except (OSError):
# skip directory already exists errors
pass
os.symlink(os.path.join(argv.root, artifact), os.path.join(scratch, artifact))
sys.exit(subprocess.call(["gsutil", "-m", "rsync", "-C", "-r", scratch, argv.gcs_path]))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Upload build targets to GCS.')
parser.add_argument("--manifest", required=True, help="path to manifest of targets")
parser.add_argument("--root", required=True, help="path to root of workspace")
parser.add_argument("gcs_path", help="path in gcs to push targets")
main(parser.parse_args())
| #!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import atexit
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
def main(argv):
scratch = tempfile.mkdtemp(prefix="bazel-gcs.")
atexit.register(lambda: shutil.rmtree(scratch))
with open(argv.manifest) as manifest:
for artifact in manifest:
artifact = artifact.strip()
try:
os.makedirs(os.path.join(scratch, os.path.dirname(artifact)))
except (OSError):
# skip directory already exists errors
pass
os.symlink(os.path.join(argv.root, artifact), os.path.join(scratch, artifact))
sys.exit(subprocess.call(["gsutil", "-m", "rsync", "-C", "-r", scratch, argv.gcs_path]))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Upload build targets to GCS.')
parser.add_argument("--manifest", required=True, help="path to manifest of targets")
parser.add_argument("--root", required=True, help="path to root of workspace")
parser.add_argument("gcs_path", required=True, help="path in gcs to push targets")
main(parser.parse_args())
| apache-2.0 | Python |
a8246967f239938251d754bd934823f56f3d4df1 | update try_lightGBM.py | hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice | try_lightGBM.py | try_lightGBM.py | import numpy as np
import pandas as pd
from pandas import Series, DataFrame
import lightgbm as lgb
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder,OneHotEncoder
from sklearn.model_selection import train_test_split
data=pd.read_csv('adult.csv',header=None)
# assign column names to the data
data.columns=['age','workclass','fnlwgt','education','education-num','marital_Status','occupation','relationship','race','sex','capital_gain','capital_loss','hours_per_week','native_country','Income']
data.head()
# encode label
l=LabelEncoder()
l.fit(data.Income)
l.classes_
data.Income=Series(l.transform(data.Income))
data.Income.value_counts() # label has been encoded as 0, 1
# convert categorical data into one-hot, and drop original categorical data
one_hot_workclass=pd.get_dummies(data.workclass)
one_hot_education=pd.get_dummies(data.education)
one_hot_marital_Status=pd.get_dummies(data.marital_Status)
one_hot_occupation=pd.get_dummies(data.occupation)
one_hot_relationship=pd.get_dummies(data.relationship)
one_hot_race=pd.get_dummies(data.race)
one_hot_sex=pd.get_dummies(data.sex)
one_hot_native_country=pd.get_dummies(data.native_country)
data.drop(['workclass','education','marital_Status','occupation','relationship','race','sex','native_country'],axis=1,inplace=True)
data=pd.concat([data,one_hot_workclass,one_hot_education,one_hot_marital_Status,one_hot_occupation,one_hot_relationship,one_hot_race,one_hot_sex,one_hot_native_country],axis=1)
#removing dulpicate columns
i = np.unique(data.columns, return_index=True)
i[1] # index of unique columns
data=data.iloc[:, i[1]] # use the index of unique columns
data.head()
# seperate features and the label
features = data.drop('Income',axis=1)
label = data.Income
label.mode()[0]
label.fillna(label.mode()[0],inplace=True) # impute missing data with mode
label.value_counts()
# split into training and testing data
features_train,features_test,label_train,label_test=train_test_split(features,label,test_size=.3)
# Method 1 - XGBOOST, single thread
dtrain=xgb.DMatrix(features_train,label=label_train)
dtest=xgb.DMatrix(features_test)
## xgboost params
parameters={'max_depth':7, 'eta':1, 'silent':1,'objective':'binary:logistic','eval_metric':'auc','learning_rate':.05}
from datetime import datetime
num_round=50
start = datetime.now()
xg=xgb.train(parameters,dtrain,num_round) # train the model
stop = datetime.now()
execution_time_xgb = stop-start
print(execution_time_xgb) # 0:00:06.377225
ypred=xg.predict(dtest)
print(ypred)
#Converting probabilities into 1 or 0
for i in range(0,9769):
if ypred[i]>=.5: # setting threshold to .5
ypred[i]=1
else:
ypred[i]=0
from sklearn.metrics import accuracy_score
accuracy_xgb = accuracy_score(label_test,ypred)
accuracy_xgb # 0.86713071962329824
from sklearn.metrics import confusion_matrix, roc_auc_score
cm = confusion_matrix(label_test, ypred)
TP = cm[0][0]
FP = cm[0][1]
FN = cm[1][0]
TN = cm[1][1]
accuracy = (TP + TN)/(TP+FP+FN+TN) # accuracy: 0.867130719623
auc_score = roc_auc_score(label_test, ypred) # AUC: 0.775170828432
precision = TP/(TP+FP) # 0.952047413793
specificity = TN/(TN+FP) # 0.797612279704
recall = TP/(TP+TN) # 0.834376106717
| import numpy as np
import pandas as pd
from pandas import Series, DataFrame
import lightgbm as lgb
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder,OneHotEncoder
from sklearn.model_selection import train_test_split
data=pd.read_csv('adult.csv',header=None)
# assign column names to the data
data.columns=['age','workclass','fnlwgt','education','education-num','marital_Status','occupation','relationship','race','sex','capital_gain','capital_loss','hours_per_week','native_country','Income']
data.head()
# encode label
l=LabelEncoder()
l.fit(data.Income)
l.classes_
data.Income=Series(l.transform(data.Income))
data.Income.value_counts() # label has been encoded as 0, 1
# convert categorical data into one-hot, and drop original categorical data
one_hot_workclass=pd.get_dummies(data.workclass)
one_hot_education=pd.get_dummies(data.education)
one_hot_marital_Status=pd.get_dummies(data.marital_Status)
one_hot_occupation=pd.get_dummies(data.occupation)
one_hot_relationship=pd.get_dummies(data.relationship)
one_hot_race=pd.get_dummies(data.race)
one_hot_sex=pd.get_dummies(data.sex)
one_hot_native_country=pd.get_dummies(data.native_country)
data.drop(['workclass','education','marital_Status','occupation','relationship','race','sex','native_country'],axis=1,inplace=True)
data=pd.concat([data,one_hot_workclass,one_hot_education,one_hot_marital_Status,one_hot_occupation,one_hot_relationship,one_hot_race,one_hot_sex,one_hot_native_country],axis=1)
#removing dulpicate columns
i = np.unique(data.columns, return_index=True)
i[1] # index of unique columns
data=data.iloc[:, i[1]] # use the index of unique columns
data.head()
# seperate features and the label
features = data.drop('Income',axis=1)
label = data.Income
label.mode()[0]
label.fillna(label.mode()[0],inplace=True) # impute missing data with mode
label.value_counts()
# split into training and testing data
features_train,features_test,label_train,label_test=train_test_split(features,label,test_size=.3)
| mit | Python |
ef638d679a87cb33975fb29fea652cc84aa71663 | Rename /atom.xml to /feed.atom | jarus/flask-rst | flaskrst/modules/atom.py | flaskrst/modules/atom.py | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app, url_for
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/feed.atom")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)
@app.before_request
def inject_atom_feed():
atom_feed = (url_for('atom.atom_feed'), 'application/atom+xml',
app.config.get("SITE_NAME") + " Atom Feed")
if app.config['FEEDS'].count(atom_feed) < 1:
app.config['FEEDS'].append(atom_feed) | # -*- coding: utf-8 -*-
"""
flask-rst.modules.atom
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2011 by Christoph Heer.
:license: BSD, see LICENSE for more details.
"""
from flask import Blueprint, request, current_app, url_for
from werkzeug.contrib.atom import AtomFeed, FeedEntry
from flaskrst.modules.blog import get_posts
atom = Blueprint('atom', __name__)
@atom.route("/atom.xml")
def atom_feed():
feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"),
feed_url=request.url, url=request.host_url,
subtitle=current_app.config.get('SITE_SUBTITLE', None))
for post in get_posts():
entry = FeedEntry(post.title,
url=post.external_url,
updated=post.pub_date,
content=post.body,
summary=post.config.get('summary', None),
author={
'name': current_app.config.get('AUTHOR_NAME'),
'email': current_app.config.get('AUTHOR_EMAIL')
})
feed.add(entry)
return feed.to_string(), 200, {}, "application/atom+xml"
def setup(app, cfg):
app.register_blueprint(atom)
@app.before_request
def inject_atom_feed():
atom_feed = (url_for('atom.atom_feed'), 'application/atom+xml',
app.config.get("SITE_NAME") + " Atom Feed")
if app.config['FEEDS'].count(atom_feed) < 1:
app.config['FEEDS'].append(atom_feed) | bsd-3-clause | Python |
6350c312891b91aea16eebc8a9cb1502d1a058a4 | Bump version to 1.1.3 | edoburu/django-fluent-utils | fluent_utils/__init__.py | fluent_utils/__init__.py | # following PEP 386
__version__ = "1.1.3"
| # following PEP 386
__version__ = "1.1.2"
| apache-2.0 | Python |
1b05421a58b8b6d62d62bfd7323e91df2fbc496d | patch auth.User to have utility methods | gizmag/django-generic-follow,pombredanne/django-generic-follow,gizmag/django-generic-follow | generic_follow/models.py | generic_follow/models.py | from django.conf import settings
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from .managers import FollowManager
class Follow(models.Model):
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'))
created = models.DateTimeField(auto_now_add=True)
# generic foreign key to target
target_content_type = models.ForeignKey(ContentType)
target_object_id = models.PositiveIntegerField()
target = generic.GenericForeignKey('target_content_type', 'target_object_id')
objects = FollowManager()
# apply user model mixins to auth.User model
if getattr(settings, 'AUTH_USER_MODEL', 'auth.User') == 'auth.User':
from .model_mixins import UserFollowMixin
from django.contrib.auth.models import User
for name, method in UserFollowMixin.__dict__:
if not name.startswith('__'):
User.add_to_class(name, method)
| from django.conf import settings
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from .managers import FollowManager
class Follow(models.Model):
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'))
created = models.DateTimeField(auto_now_add=True)
# generic foreign key to target
target_content_type = models.ForeignKey(ContentType)
target_object_id = models.PositiveIntegerField()
target = generic.GenericForeignKey('target_content_type', 'target_object_id')
objects = FollowManager()
| mit | Python |
d88bb6542373f248f6792bd064ad84a17efe9414 | fix teacher and student model | crike/crike,crike/crike,crike/crike,crike/crike | src/crike_django/crike_django/models.py | src/crike_django/crike_django/models.py | #coding:utf-8
from django.db import models
from django.contrib.auth.models import User
from mongoengine import *
# 数据库基本模型分为word、dict、user、course、voice、image、game、video
# 当前目标:实现word、dict、user、course,其余皆往后排
# dict包含多个word,course包含多个word,user包含多个course
class Word(Document):
name = StringField(required=True, max_length=50)
phonetics = StringField(required=True, max_length=80)
mean = ListField(StringField(max_length=80), required=True)
#pos = ListField(StringField(max_length=20), required=True)
#audio = FileField(required=True)
#audio = StringField()
image = FileField()
class Lesson(EmbeddedDocument):
name = StringField(required=True)
words = ListField(ReferenceField(Word))
class Dict(Document):
name = StringField(required=True)
lessons = ListField(EmbeddedDocumentField(Lesson))
meta = {'allow_inheritance': True}
class CET4Dict(Dict):
pass
class CET6Dict(Dict):
pass
class WebsterDict(Dict):
pass
# Accounts area
# This class is to keep compability with other apps
# which use original User model.
class Profile(models.Model):
user = models.ForeignKey(User)
@property
def is_student(self):
try:
self.student
return True
except Student.DoesNotExist:
return False
@property
def is_teacher(self):
try:
self.teacher
return True
except Student.DoesNotExist:
return False
class Teacher(Profile):
pass
class Student(Profile):
pass
class TeachingAssistant(Profile):
pass
class Course(models.Model):
pass
| #coding:utf-8
from django.db import models
from django.contrib.auth.models import User
from mongoengine import *
# 数据库基本模型分为word、dict、user、course、voice、image、game、video
# 当前目标:实现word、dict、user、course,其余皆往后排
# dict包含多个word,course包含多个word,user包含多个course
class Word(Document):
name = StringField(required=True, max_length=50)
phonetics = StringField(required=True, max_length=80)
mean = ListField(StringField(max_length=80), required=True)
#pos = ListField(StringField(max_length=20), required=True)
#audio = FileField(required=True)
#audio = StringField()
image = FileField()
class Lesson(EmbeddedDocument):
name = StringField(required=True)
words = ListField(ReferenceField(Word))
class Dict(Document):
name = StringField(required=True)
lessons = ListField(EmbeddedDocumentField(Lesson))
meta = {'allow_inheritance': True}
class CET4Dict(Dict):
pass
class CET6Dict(Dict):
pass
class WebsterDict(Dict):
pass
# Accounts area
# This class is to keep compability with other apps
# which use original User model.
class Profile(models.Model):
user = models.ForeignKey(User)
@property
def is_student(self):
try:
self.student
return True
except Student.DoesNotExist:
return False
@property
def is_teacher(self):
try:
self.teacher
return True
except Student.DoesNotExist:
return False
class Teacher(Profile):
profile = models.ForeignKey(Profile)
class Meta:
db_table = 'teacher_user'
class Student(Profile):
profile = models.ForeignKey(Profile)
class Meta:
db_table = 'student_user'
class TeachingAssistant(Profile):
pass
class Course(models.Model):
pass
| apache-2.0 | Python |
b5a92b53e95bef1858b3dfa1ab6b1828dd085ed4 | disable blue logs | rr-/dotfiles,rr-/dotfiles,rr-/dotfiles | libdotfiles/logging.py | libdotfiles/logging.py | # pylint: disable=unused-import,protected-access,invalid-name
import logging
import os
from logging import (
DEBUG,
ERROR,
INFO,
WARNING,
debug,
error,
getLogger,
info,
warning,
)
import coloredlogs
def _add_custom_level(number, name):
logging.addLevelName(number, name.upper())
def member(self, message, *args, **kwargs):
if self.isEnabledFor(number):
self._log(number, message, args, **kwargs)
def function(message, *args, **kwargs):
if len(logging.Logger.root.handlers) == 0:
logging.basicConfig()
logging.Logger.root._log(number, message, args, **kwargs)
setattr(logging.Logger, name, member)
return (number, function)
SUCCESS, success = _add_custom_level(29, "success")
def setup_colored_logs(fmt: str = "%(message)s") -> None:
coloredlogs.install(
fmt=fmt,
level_styles={
"warning": {"color": "yellow"},
"success": {"color": "green", "bold": True},
"error": {"color": "red", "bold": True},
"info": {},
},
isatty=True if "COLORED_LOGS" in os.environ else None,
)
| # pylint: disable=unused-import,protected-access,invalid-name
import logging
import os
from logging import (
DEBUG,
ERROR,
INFO,
WARNING,
debug,
error,
getLogger,
info,
warning,
)
import coloredlogs
def _add_custom_level(number, name):
logging.addLevelName(number, name.upper())
def member(self, message, *args, **kwargs):
if self.isEnabledFor(number):
self._log(number, message, args, **kwargs)
def function(message, *args, **kwargs):
if len(logging.Logger.root.handlers) == 0:
logging.basicConfig()
logging.Logger.root._log(number, message, args, **kwargs)
setattr(logging.Logger, name, member)
return (number, function)
SUCCESS, success = _add_custom_level(29, "success")
def setup_colored_logs(fmt: str = "%(message)s") -> None:
coloredlogs.install(
fmt=fmt,
level_styles={
"warning": {"color": "yellow"},
"success": {"color": "green", "bold": True},
"error": {"color": "red", "bold": True},
"info": {"color": "blue", "bold": True},
},
isatty=True if "COLORED_LOGS" in os.environ else None,
)
| mit | Python |
fed687bb6a5b4685b04b3533920288c8e5ae4faf | Use db.session directly rather than relying on convenience methods | eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,eXcomm/gratipay.com,bountysource/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,studio666/gratipay.com,studio666/gratipay.com,gratipay/gratipay.com,MikeFair/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,eXcomm/gratipay.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com | gittip/authentication.py | gittip/authentication.py | """Defines website authentication helpers.
"""
import datetime
import rfc822
import time
import pytz
from aspen import Response
from gittip.orm import db
from gittip.models import User
BEGINNING_OF_EPOCH = rfc822.formatdate(0)
TIMEOUT = 60 * 60 * 24 * 7 # one week
def inbound(request):
"""Authenticate from a cookie.
"""
if 'session' in request.headers.cookie:
token = request.headers.cookie['session'].value
user = User.from_session_token(token)
else:
user = User()
request.context['user'] = user
def outbound(response):
if 'user' in response.request.context:
user = response.request.context['user']
if not isinstance(user, User):
raise Response(400, "If you define 'user' in a simplate it has to "
"be a User instance.")
else:
user = User()
if user.ANON: # user is anonymous
if 'session' not in response.request.headers.cookie:
# no cookie in the request, don't set one on response
return
else:
# expired cookie in the request, instruct browser to delete it
response.headers.cookie['session'] = ''
expires = 0
else: # user is authenticated
user = User.from_session_token(user.session_token)
response.headers['Expires'] = BEGINNING_OF_EPOCH # don't cache
response.headers.cookie['session'] = user.session_token
expires = time.time() + TIMEOUT
user.session_expires = datetime.datetime.fromtimestamp(expires)\
.replace(tzinfo=pytz.utc)
db.session.add(user)
db.session.commit()
cookie = response.headers.cookie['session']
# I am not setting domain, because it is supposed to default to what we
# want: the domain of the object requested.
#cookie['domain']
cookie['path'] = '/'
cookie['expires'] = rfc822.formatdate(expires)
cookie['httponly'] = "Yes, please." | """Defines website authentication helpers.
"""
import datetime
import rfc822
import time
import pytz
from aspen import Response
from gittip.models import User
BEGINNING_OF_EPOCH = rfc822.formatdate(0)
TIMEOUT = 60 * 60 * 24 * 7 # one week
def inbound(request):
"""Authenticate from a cookie.
"""
if 'session' in request.headers.cookie:
token = request.headers.cookie['session'].value
user = User.from_session_token(token)
else:
user = User()
request.context['user'] = user
def outbound(response):
if 'user' in response.request.context:
user = response.request.context['user']
if not isinstance(user, User):
raise Response(400, "If you define 'user' in a simplate it has to "
"be a User instance.")
else:
user = User()
if user.ANON: # user is anonymous
if 'session' not in response.request.headers.cookie:
# no cookie in the request, don't set one on response
return
else:
# expired cookie in the request, instruct browser to delete it
response.headers.cookie['session'] = ''
expires = 0
else: # user is authenticated
user = User.from_session_token(user.session_token)
response.headers['Expires'] = BEGINNING_OF_EPOCH # don't cache
response.headers.cookie['session'] = user.session_token
expires = time.time() + TIMEOUT
user.session_expires = datetime.datetime.fromtimestamp(expires)\
.replace(tzinfo=pytz.utc)
user.save()
cookie = response.headers.cookie['session']
# I am not setting domain, because it is supposed to default to what we
# want: the domain of the object requested.
#cookie['domain']
cookie['path'] = '/'
cookie['expires'] = rfc822.formatdate(expires)
cookie['httponly'] = "Yes, please." | mit | Python |
383a9d6d74c55b8fb037e43c8236d0dc7ff38e07 | add argv argument to the process function (which defaults to sys.argv) | stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis | Synopsis/process.py | Synopsis/process.py | # $Id: process.py,v 1.5 2003/12/11 04:38:59 stefan Exp $
#
# Copyright (C) 2003 Stefan Seefeld
# All rights reserved.
# Licensed to the public under the terms of the GNU LGPL (>= 2),
# see the file COPYING for details.
#
from Processor import Processor
import AST
from getoptions import getoptions
import sys
def error(msg):
"""Write an error message and exit."""
sys.stderr.write(msg)
sys.stderr.write('\n')
sys.exit(-1)
def process(argv = sys.argv, **commands):
"""Accept a set of commands and process according to command line options.
The typical call will start with the name of the processor to be executed,
followed by a set of parameters, followed by non-parameter arguments.
All parameters are either of the form 'name=value', or '--name=value'.
The first form expects 'value' to be valid python, the second a string.
The remaining non-parameter arguments are associated with the 'input'
parameter.
Once this initialization is done, the named command's 'process' method
is executed.
"""
#first make sure the function was called with the correct argument types
for c in commands:
if not isinstance(commands[c], Processor):
error("command '%s' isn't a valid processor"%c)
if len(argv) < 2:
error("Usage : %s <command> [args] [input files]"%argv[0])
elif argv[1] == '--help':
print "Usage: %s --help"%argv[0]
print " or: %s <command> --help"%argv[0]
print " or: %s <command> [parameters]"%argv[0]
print ""
print "Available commands:"
for c in commands:
print " %s"%c
sys.exit(0)
command = argv[1]
args = argv[2:]
if '--help' in args:
print "Parameters for command '%s'"%command
parameters = commands[command].get_parameters()
tab = max(map(lambda x:len(x), parameters.keys()))
for p in parameters:
print " %-*s %s"%(tab, p, parameters[p].doc)
sys.exit(0)
props = {}
# process all option arguments...
for o, a in getoptions(args): props[o] = a
# ...and keep remaining (non-option) arguments as 'input'
if args: props['input'] = args
if command in commands:
ast = AST.AST()
try:
commands[command].process(ast, **props)
except KeyError, e:
error('missing argument "%s"'%e)
else:
error('no command "%s"'%command)
| # $Id: process.py,v 1.5 2003/12/11 04:38:59 stefan Exp $
#
# Copyright (C) 2003 Stefan Seefeld
# All rights reserved.
# Licensed to the public under the terms of the GNU LGPL (>= 2),
# see the file COPYING for details.
#
from Processor import Processor
import AST
from getoptions import getoptions
import sys
def error(msg):
"""Write an error message and exit."""
sys.stderr.write(msg)
sys.stderr.write('\n')
sys.exit(-1)
def process(**commands):
"""Accept a set of commands and process according to command line options.
The typical call will start with the name of the processor to be executed,
followed by a set of parameters, followed by non-parameter arguments.
All parameters are either of the form 'name=value', or '--name=value'.
The first form expects 'value' to be valid python, the second a string.
The remaining non-parameter arguments are associated with the 'input'
parameter.
Once this initialization is done, the named command's 'process' method
is executed.
"""
#first make sure the function was called with the correct argument types
for c in commands:
if not isinstance(commands[c], Processor):
error("command '%s' isn't a valid processor"%c)
if len(sys.argv) < 2:
error("Usage : %s <command> [args] [input files]"%sys.argv[0])
elif sys.argv[1] == '--help':
print "Usage: %s --help"%sys.argv[0]
print " or: %s <command> --help"%sys.argv[0]
print " or: %s <command> [parameters]"%sys.argv[0]
print ""
print "Available commands:"
for c in commands:
print " %s"%c
sys.exit(0)
command = sys.argv[1]
args = sys.argv[2:]
if '--help' in args:
print "Parameters for command '%s'"%command
parameters = commands[command].get_parameters()
tab = max(map(lambda x:len(x), parameters.keys()))
for p in parameters:
print " %-*s %s"%(tab, p, parameters[p].doc)
sys.exit(0)
props = {}
# process all option arguments...
for o, a in getoptions(args): props[o] = a
# ...and keep remaining (non-option) arguments as 'input'
if args: props['input'] = args
if command in commands:
ast = AST.AST()
try:
commands[command].process(ast, **props)
except KeyError, e:
error('missing argument "%s"'%e)
else:
error('no command "%s"'%command)
| lgpl-2.1 | Python |
1de2ebab77eba6bdf35b7db004af42804a3984d1 | Fix admin file widget for cases where no file exists | google-code-export/marinemap,Alwnikrotikz/marinemap,Alwnikrotikz/marinemap,Alwnikrotikz/marinemap,google-code-export/marinemap,google-code-export/marinemap,Alwnikrotikz/marinemap,google-code-export/marinemap | lingcod/array/forms.py | lingcod/array/forms.py | #from django.contrib.admin.widgets import AdminFileWidget
from django.forms import ModelForm
from django.utils.safestring import mark_safe
from django import forms
from lingcod.array.models import MpaArray
from lingcod.rest.forms import UserForm
from os.path import splitext,split
class AdminFileWidget(forms.FileInput):
"""
A FileField Widget that shows its current value if it has one.
"""
def __init__(self, attrs={}):
super(AdminFileWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = []
if value and hasattr(value, "name"):
filename = split(value.name)[-1]
output.append('<p>%s %s</p> <p>%s ' % \
('Currently:', filename, 'Change:'))
output.append(super(AdminFileWidget, self).render(name, value, attrs))
output.append("</p>")
return mark_safe(u''.join(output))
# http://www.neverfriday.com/sweetfriday/2008/09/-a-long-time-ago.html
class FileValidationError(forms.ValidationError):
def __init__(self):
super(FileValidationError, self).__init__('Document types accepted: ' + ', '.join(ValidFileField.valid_file_extensions))
class ValidFileField(forms.FileField):
"""A validating document upload field"""
valid_file_extensions = ['odt', 'pdf', 'doc', 'xls', 'txt', 'csv', 'kml', 'kmz', 'jpeg', 'jpg', 'png', 'gif', 'zip']
def __init__(self, *args, **kwargs):
super(ValidFileField, self).__init__(*args, **kwargs)
def clean(self, data, initial=None):
f = super(ValidFileField, self).clean(data, initial)
if f:
ext = splitext(f.name)[1][1:].lower()
if ext in ValidFileField.valid_file_extensions:
# check data['content-type'] ?
return f
raise FileValidationError()
class ArrayForm(UserForm):
supportfile1 = ValidFileField(widget=AdminFileWidget,required=False,label="Support File",
help_text="e.g. Narrative Summary or other document associated with this array.")
supportfile2 = ValidFileField(widget=AdminFileWidget,required=False,label="Additional Support File")
class Meta:
model = MpaArray
| #from django.contrib.admin.widgets import AdminFileWidget
from django.forms import ModelForm
from django.utils.safestring import mark_safe
from django import forms
from lingcod.array.models import MpaArray
from lingcod.rest.forms import UserForm
from os.path import splitext,split
class AdminFileWidget(forms.FileInput):
"""
A FileField Widget that shows its current value if it has one.
"""
def __init__(self, attrs={}):
super(AdminFileWidget, self).__init__(attrs)
def render(self, name, value, attrs=None):
output = []
filename = split(value.name)[-1]
if value and hasattr(value, "url"):
output.append('<p>%s %s</p> <p>%s ' % \
('Currently:', filename, 'Change:'))
output.append(super(AdminFileWidget, self).render(name, value, attrs))
output.append("</p>")
return mark_safe(u''.join(output))
# http://www.neverfriday.com/sweetfriday/2008/09/-a-long-time-ago.html
class FileValidationError(forms.ValidationError):
def __init__(self):
super(FileValidationError, self).__init__('Document types accepted: ' + ', '.join(ValidFileField.valid_file_extensions))
class ValidFileField(forms.FileField):
"""A validating document upload field"""
valid_file_extensions = ['odt', 'pdf', 'doc', 'xls', 'txt', 'csv', 'kml', 'kmz', 'jpeg', 'jpg', 'png', 'gif', 'zip']
def __init__(self, *args, **kwargs):
super(ValidFileField, self).__init__(*args, **kwargs)
def clean(self, data, initial=None):
f = super(ValidFileField, self).clean(data, initial)
if f:
ext = splitext(f.name)[1][1:].lower()
if ext in ValidFileField.valid_file_extensions:
# check data['content-type'] ?
return f
raise FileValidationError()
class ArrayForm(UserForm):
supportfile1 = ValidFileField(widget=AdminFileWidget,required=False,label="Support File",
help_text="e.g. Narrative Summary or other document associated with this array.")
supportfile2 = ValidFileField(widget=AdminFileWidget,required=False,label="Additional Support File")
class Meta:
model = MpaArray
| bsd-3-clause | Python |
074b09f7a0831dd34e00727951fba89bfa559c42 | Update ipc_lista1.2.py | any1m1c/ipc20161 | lista1/ipc_lista1.2.py | lista1/ipc_lista1.2.py | #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| apache-2.0 | Python |
468c7496109dcd23f02b6407bd10a0d758229c2a | Update ipc_lista1.5.py | any1m1c/ipc20161 | lista1/ipc_lista1.5.py | lista1/ipc_lista1.5.py | #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em centímetros: ")
centimetros = metros * 100
print
| #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em centímetros: ")
centimetros = metros * 100
print
| apache-2.0 | Python |
f2b30562b76ed7ab52a1d52c065dd01de9edaef0 | Update ipc_lista1.7.py | any1m1c/ipc20161 | lista1/ipc_lista1.7.py | lista1/ipc_lista1.7.py | #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura do quadrado em metros: ")
| #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
largura = input("Digite a largura do quadrado em metros:
| apache-2.0 | Python |
768c18f1b0bb8a974ffc4323dc302b03c96cbf86 | Bump version | mitsuhiko/logbook | logbook/__version__.py | logbook/__version__.py | __version__ = "1.5.3"
| __version__ = "1.5.2"
| bsd-3-clause | Python |
e8c180e65dda3422ab472a6580183c715ef325c3 | Update error message of UnsupportedOperationException. | KarlGong/easyium-python,KarlGong/easyium | easyium/decorator.py | easyium/decorator.py | __author__ = 'karl.gong'
from .exceptions import UnsupportedOperationException
def SupportedBy(*web_driver_types):
def handle_func(func):
def handle_args(*args, **kwargs):
wd_types = []
for wd_type in web_driver_types:
if isinstance(wd_type, list):
wd_types += wd_type
else:
wd_types += [wd_type]
current_web_driver_type = args[0].get_web_driver_type()
if current_web_driver_type not in wd_types:
raise UnsupportedOperationException(
"Operation [%s] is not supported by web driver [%s]." % (func.__name__, current_web_driver_type))
return func(*args, **kwargs)
return handle_args
return handle_func
| __author__ = 'karl.gong'
from .exceptions import UnsupportedOperationException
def SupportedBy(*web_driver_types):
def handle_func(func):
def handle_args(*args, **kwargs):
wd_types = []
for wd_type in web_driver_types:
if isinstance(wd_type, list):
wd_types += wd_type
else:
wd_types += [wd_type]
current_web_driver_type = args[0].get_web_driver_type()
if current_web_driver_type not in wd_types:
raise UnsupportedOperationException(
"This operation is not supported by web driver [%s]." % current_web_driver_type)
return func(*args, **kwargs)
return handle_args
return handle_func
| apache-2.0 | Python |
3e8b3dc25d82d4d70d5c8068b12bc814723b7b08 | Make the spam stop | gnmerritt/casino,gnmerritt/casino,gnmerritt/casino,gnmerritt/casino | matchmaker/__init__.py | matchmaker/__init__.py | import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import slacker_log_handler as slh
app = Flask(__name__, static_folder='static')
app.config.from_object('matchmaker.default_settings')
app.config.from_envvar('CASINO_SETTINGS', silent=True)
if not app.debug:
handler = slh.SlackerLogHandler(
app.config['SLACK_API_KEY'],
app.config['SLACK_CHANNEL'],
username=app.config['SLACK_USERNAME']
)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
db = SQLAlchemy(app)
import models
import login
import views
import api
| import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import slacker_log_handler as slh
app = Flask(__name__, static_folder='static')
app.config.from_object('matchmaker.default_settings')
app.config.from_envvar('CASINO_SETTINGS', silent=True)
if not app.debug:
handler = slh.SlackerLogHandler(
app.config['SLACK_API_KEY'],
app.config['SLACK_CHANNEL'],
username=app.config['SLACK_USERNAME']
)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
app.logger.error("Casino Matchmaker Webapp restarted")
db = SQLAlchemy(app)
import models
import login
import views
import api
| mit | Python |
b5c2bc906194e9be69d1895b6290bc918e5a040b | Increment version | ines/wasabi | wasabi/about.py | wasabi/about.py | __title__ = "wasabi"
__version__ = "0.5.0"
__summary__ = "A lightweight console printing and formatting toolkit"
__uri__ = "https://ines.io"
__author__ = "Ines Montani"
__email__ = "ines@explosion.ai"
__license__ = "MIT"
| __title__ = "wasabi"
__version__ = "0.4.2"
__summary__ = "A lightweight console printing and formatting toolkit"
__uri__ = "https://ines.io"
__author__ = "Ines Montani"
__email__ = "ines@explosion.ai"
__license__ = "MIT"
| mit | Python |
750dd5437314908985d5241a3b5483037adfd73f | change set comp in case python 2.6 | RiskIQ/pyyamlcfg | yamlcfg/util.py | yamlcfg/util.py | #!/usr/bin/env python
''' yamlcfg.util
Cross module utilities
'''
import os
def normalize(var, type=None, **kwargs):
if var is None:
return None
if type is None:
return var
if type is basestring:
return str(type)
elif type is int:
return int(type)
elif type is hex:
return int(type, 16)
elif type is bin:
return int(type, 2)
elif type is oct:
return int(type, 8)
else:
raise ValueError('Unrecognized type argument. '
'Cannot normalize variable.')
def validate_ext(path, valid_ext):
path, ext = os.path.splitext(path)
if ext.startswith('.'):
ext = ext[1:].lower()
if isinstance(valid_ext, basestring):
return ext == valid_ext.lower()
elif hasattr(valid_ext, '__contains__'):
return ext in [x.lower() for x in valid_ext]
else:
raise ValueError('valid_ext must be either a string or implement '
'__contains__')
| #!/usr/bin/env python
''' yamlcfg.util
Cross module utilities
'''
import os
def normalize(var, type=None, **kwargs):
if var is None:
return None
if type is None:
return var
if type is basestring:
return str(type)
elif type is int:
return int(type)
elif type is hex:
return int(type, 16)
elif type is bin:
return int(type, 2)
elif type is oct:
return int(type, 8)
else:
raise ValueError('Unrecognized type argument. '
'Cannot normalize variable.')
def validate_ext(path, valid_ext):
path, ext = os.path.splitext(path)
if ext.startswith('.'):
ext = ext[1:].lower()
if isinstance(valid_ext, basestring):
return ext == valid_ext.lower()
elif hasattr(valid_ext, '__contains__'):
return ext in {x.lower() for x in valid_ext}
else:
raise ValueError('valid_ext must be either a string or implement '
'__contains__')
| bsd-2-clause | Python |
07750d05ad92aade63accb2629f21180282c4be0 | Fix location of dashboard and license in setup script | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus | lib/pegasus/python/pegasus-setup.py | lib/pegasus/python/pegasus-setup.py | """
Installation script for Pegasus Python library
Author: Dan Gunter <dkgunter@lbl.gov>
"""
try:
from setuptools import setup
except:
from distutils.core import setup
import os
VERSION = os.environ.get('PEGASUS_VERSION','trunk')
setup(name = "Pegasus",
version=VERSION,
packages = [
"Pegasus",
"Pegasus.monitoring",
"Pegasus.dashboard",
"Pegasus.plots_stats",
"Pegasus.plots_stats.plots",
"Pegasus.plots_stats.stats",
"Pegasus.test",
"Pegasus.tools"
],
ext_modules = [],
package_data = {},
scripts = [],
install_requires=[ ],
author = "Pegasus Team",
author_email = "pegasus-support@isi.edu",
maintainer = "Karan Vahi",
maintainer_email = "vahi@isi.edu",
description = "Pegasus Python library",
long_description = "",
license = "Apache 2.0",
keywords = "workflow",
url = "http://pegasus.isi.edu",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Database",
"Topic :: Workflow",
"Topic :: System :: Monitoring",
],
)
| """
Installation script for Pegasus Python library
Author: Dan Gunter <dkgunter@lbl.gov>
"""
try:
from setuptools import setup
except:
from distutils.core import setup
from glob import glob
import os
import sys
VERSION = os.environ.get('PEGASUS_VERSION','trunk')
# Main function
# -------------
setup(name = "Pegasus",
version=VERSION,
packages = ["Pegasus",
"Pegasus.monitoring",
"Pegasus.monitoring.dashboard",
"Pegasus.plots_stats",
"Pegasus.plots_stats.plots",
"Pegasus.plots_stats.stats",
"Pegasus.test",
"Pegasus.tools"],
ext_modules = [],
package_data = {},
scripts = [ ],
install_requires=[ ],
# metadata for upload to PyPI
author = "Pegasus Team",
author_email = "deelman@isi.edu",
maintainer = "Karan Vahi",
maintainer_email = "vahi@isi.edu",
description = "Pegasus Python library",
long_description = "",
license = "LBNL Open-Source",
keywords = "workflow",
url = "https://confluence.pegasus.isi.edu/display/pegasus/Home",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: No Input/Output (Daemon)",
"Intended Audience :: Science/Research",
"Intended Audience :: System Administrators",
"License :: Other/Proprietary License",
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Database",
"Topic :: Workflow",
"Topic :: System :: Monitoring",
],
)
| apache-2.0 | Python |
b70d06ac546b0c9736818600f52a3341f1e5fffc | Fix import | wjimenez5271/coastguard | coastguard/config.py | coastguard/config.py | import ConfigParser
from lib.hostfilter import HostFilter
import os
def load_config(configfile):
"""
Load config from ini formatted text file
:param configfile: str. path to file
:return: dict. configuration attributes
"""
parser = ConfigParser.SafeConfigParser()
parser.read(configfile)
config = {}
config['mail_server'] = parser.get('settings', 'mail_server')
config['mail_recipient'] = parser.get('settings', 'mail_recipient')
config['mail_sender'] = parser.get('settings', 'mail_sender')
config['subject'] = parser.get('settings', 'subject')
config['mail_alert_address'] = parser.get('settings', 'mail_alert_address')
config['email_alert'] = parser.get('settings', 'email_alert')
config['uptime_threshold'] = parser.get('settings', 'uptime_threshold')
config['terminate_long_running'] = parser.get('settings', 'terminate_long_running')
if config['terminate_long_running'].lower == "false":
config['terminate_long_running'] = False
elif config['terminate_long_running'].lower == "true":
config['terminate_long_running'] = True
else:
config['terminate_long_running'] = None
# One-liner because Daniel's fancy that way.
config['DO_TOKEN'] = os.environ.get('DO_TOKEN', None) or parser.get('DigitalOcean', 'DO_TOKEN')
config['HostFilter'] = HostFilter.fromConfig(parser)
return config
| import ConfigParser
from lib.HostFilter import HostFilter
import os
def load_config(configfile):
"""
Load config from ini formatted text file
:param configfile: str. path to file
:return: dict. configuration attributes
"""
parser = ConfigParser.SafeConfigParser()
parser.read(configfile)
config = {}
config['mail_server'] = parser.get('settings', 'mail_server')
config['mail_recipient'] = parser.get('settings', 'mail_recipient')
config['mail_sender'] = parser.get('settings', 'mail_sender')
config['subject'] = parser.get('settings', 'subject')
config['mail_alert_address'] = parser.get('settings', 'mail_alert_address')
config['email_alert'] = parser.get('settings', 'email_alert')
config['uptime_threshold'] = parser.get('settings', 'uptime_threshold')
config['terminate_long_running'] = parser.get('settings', 'terminate_long_running')
if config['terminate_long_running'].lower == "false":
config['terminate_long_running'] = False
elif config['terminate_long_running'].lower == "true":
config['terminate_long_running'] = True
else:
config['terminate_long_running'] = None
# One-liner because Daniel's fancy that way.
config['DO_TOKEN'] = os.environ.get('DO_TOKEN', None) or parser.get('DigitalOcean', 'DO_TOKEN')
config['HostFilter'] = HostFilter.fromConfig(parser)
return config
| apache-2.0 | Python |
67ad00b96be8cbaf8cc7a9cb6770fb9e685fe182 | add skeleton of Data.Traversable | billpmurphy/hask,silky/hask | hask/Data/Traversable.py | hask/Data/Traversable.py | from ..lang import build_instance
from ..lang import sig
from ..lang import H
from ..lang import t
from ..Control.Applicative import Applicative
from ..Control.Monad import Monad
from Foldable import Foldable
from Functor import Functor
class Traversable(Foldable, Functor):
"""
Functors representing data structures that can be traversed from left to
right.
Dependencies:
Foldable, Functor
Attributes:
traverse, sequenceA, mapM, sequence
Minimal complete definition:
traverse
"""
@classmethod
def make_instance(typeclass, cls, traverse, sequenceA=None, mapM=None,
sequence=None):
attrs = {"traverse":traverse, "sequenceA":sequenceA, "mapM":mapM,
"sequence":sequence}
build_instance(Traversable, cls, attrs)
return
@sig(H[(Applicative, "f"), (Traversable, "t")]/
(H/ "a" >> t("f", "b")) >> t("t", "a") >> t("f", t("t", "b")))
def traverse(f, t):
"""
traverse :: (Traversable t, Applicative f) => (a -> f b) -> t a -> f (t b)
Map each element of a structure to an action, evaluate these these actions
from left to right, and collect the results. actions from left to right,
and collect the results. For a version that ignores the results see
traverse_.
"""
raise NotImplementedError()
@sig(H[(Applicative, "f"), (Traversable, "t")]/
t("t", t("f", "a")) >> t("f", t("t", "a")))
def sequenceA(t):
"""
sequenceA :: (Traversable t, Applicative f) => t (f a) -> f (t a)
Evaluate each action in the structure from left to right, and and collect
the results. For a version that ignores the results see sequenceA_.
"""
raise NotImplementedError()
@sig(H[(Monad, "m"), (Traversable, "t")]/
(H/ "a" >> t("m", "b")) >> t("t", "a") >> t("m", t("t", "b")))
def mapM(f, m):
"""
mapM :: (Traversable t, Monad m) => (a -> m b) -> t a -> m (t b)
Map each element of a structure to a monadic action, evaluate these actions
from left to right, and collect the results. For a version that ignores the
results see mapM_.
"""
raise NotImplementedError()
@sig(H[(Monad, "m"), (Traversable, "t")]/
t("t", t("m", "a")) >> t("m", t("t", "a")))
def sequence(t):
"""
sequence :: (Traversable t, Monad m) => t (m a) -> m (t a)
Evaluate each monadic action in the structure from left to right, and
collect the results. For a version that ignores the results see sequence_.
"""
raise NotImplementedError()
#=============================================================================#
# Utility functions
@sig(H[(Applicative, "f"), (Traversable, "t")]/
t("t", "a") >> (H/ "a" >> t("f", "b")) >> t("f", t("t", "b")))
def for1(t, f):
"""
for1 :: (Traversable t, Applicative f) => t a -> (a -> f b) -> f (t b)
for1 is traverse with its arguments flipped. For a version that ignores the
results see for1_.
"""
raise NotImplementedError()
@sig(H[(Monad, "m"), (Traversable, "t")]/
t("t", "a") >> (H/ "a" >> t("m", "b")) >> t("m", t("t", "b")))
def forM(t, f):
"""
forM :: (Traversable t, Monad m) => t a -> (a -> m b) -> m (t b)
forM is mapM with its arguments flipped. For a version that ignores the
results see forM_.
"""
raise NotImplementedError()
@sig(H[(Traversable, "t")]/ (H/ "a" >> "b" >> ("a", "c")) >> "a" >> t("t", "b")
>> ("a", t("t", "c")))
def mapAccumL(f, a, tb):
"""
mapAccumL :: Traversable t => (a -> b -> (a, c)) -> a -> t b -> (a, t c)
The mapAccumL function behaves like a combination of fmap and foldl; it
applies a function to each element of a structure, passing an accumulating
parameter from left to right, and returning a final value of this
accumulator together with the new structure.
"""
raise NotImplementedError()
@sig(H[(Traversable, "t")]/ (H/ "a" >> "b" >> ("a", "c")) >> "a" >> t("t", "b")
>> ("a", t("t", "c")))
def mapAccumR(f, a, tb):
"""
mapAccumR :: Traversable t => (a -> b -> (a, c)) -> a -> t b -> (a, t c)
The mapAccumR function behaves like a combination of fmap and foldr; it
applies a function to each element of a structure, passing an accumulating
parameter from right to left, and returning a final value of this
accumulator together with the new structure.
"""
raise NotImplementedError()
| from ..lang import build_instance
from ..Control.Applicative import Applicative
from ..Control.Monad import Monad
from Foldable import Foldable
from Functor import Functor
class Traversable(Foldable, Functor):
"""
Functors representing data structures that can be traversed from left to
right.
Dependencies:
Foldable, Functor
Attributes:
traverse, sequenceA, mapM, sequence
Minimal complete definition:
traverse
"""
@classmethod
def make_instance(typeclass, cls, traverse, sequenceA=None, mapM=None,
sequence=None):
attrs = {"traverse":traverse, "sequenceA":sequenceA, "mapM":mapM,
"sequence":sequence}
build_instance(Traversable, cls, attrs)
return
| bsd-2-clause | Python |
e893ac40de2dda19196c7de7d9f7767b20b23884 | Use a better function (lambda) name | myint/cppclean,myint/cppclean,myint/cppclean,myint/cppclean | headers/cpp/functions.py | headers/cpp/functions.py | #!/usr/bin/env python
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Find and print the functions in a source file."""
import sys
from cpp import ast
def main(argv):
# TODO(nnorwitz): need to ignore friend method declarations.
IsFunction = lambda node: isinstance(node, ast.Function)
ast.PrintAllIndentifiers(argv[1:], IsFunction)
if __name__ == '__main__':
main(sys.argv)
| #!/usr/bin/env python
#
# Copyright 2007 Neal Norwitz
# Portions Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Find and print the functions in a source file."""
import sys
from cpp import ast
def main(argv):
# TODO(nnorwitz): need to ignore friend method declarations.
condition = lambda node: isinstance(node, ast.Function)
ast.PrintAllIndentifiers(argv[1:], condition)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | Python |
19756b4ed4ccc684a69af275b0ff43bc8392e50d | Remove unimplemented methods. | borg-project/borg | src/python/borg/domains/sat/__init__.py | src/python/borg/domains/sat/__init__.py | """@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import contextlib
import borg
from . import features
from . import instance
from . import solvers
class SatisfiabilityTask(object):
def __init__(self, path):
self.path = path
def clean(self):
pass
@borg.named_domain
class Satisfiability(object):
name = "sat"
extensions = ["*.cnf"]
@contextlib.contextmanager
def task_from_path(self, task_path):
"""Clean up cached task resources on context exit."""
task = SatisfiabilityTask(task_path)
yield task
task.clean()
def compute_features(self, task):
return features.get_features_for(task.path)
def is_final(self, task, answer):
"""Is the answer definitive for the task?"""
return answer is not None
def show_answer(self, task, answer):
if answer is None:
print "s UNKNOWN"
return 0
elif answer:
print "s SATISFIABLE"
print "v", " ".join(map(str, answer)), "0"
return 10
else:
print "s UNSATISFIABLE"
return 20
| """@author: Bryan Silverthorn <bcs@cargo-cult.org>"""
import contextlib
import borg
from . import features
from . import instance
from . import solvers
class SatisfiabilityTask(object):
def __init__(self, path):
self.path = path
def clean(self):
pass
@borg.named_domain
class Satisfiability(object):
name = "sat"
extensions = ["*.cnf"]
@contextlib.contextmanager
def task_from_path(self, task_path):
"""Clean up cached task resources on context exit."""
task = SatisfiabilityTask(task_path)
yield task
task.clean()
def compute_features(self, task):
return features.get_features_for(task.path)
def is_final(self, task, answer):
"""Is the answer definitive for the task?"""
return answer is not None
def write_answer(self, file_, answer):
pass
def read_answer(self, file_):
pass
def show_answer(self, task, answer):
if answer is None:
print "s UNKNOWN"
return 0
elif answer:
print "s SATISFIABLE"
print "v", " ".join(map(str, answer)), "0"
return 10
else:
print "s UNSATISFIABLE"
return 20
| mit | Python |
17e58978b5e1b787b15cae817e510913333bd23d | comment out failing test | chamilad/breadpool | src/python/breadpool/tests/test_pool.py | src/python/breadpool/tests/test_pool.py | # Copyright 2015 Chamila de Alwis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import datetime
import time
from ..pool import *
import util
"""
ThreadPool
"""
def test_thread_pool_value_exception():
with pytest.raises(ValueError):
ThreadPool(0, "TestThreadPoolValException")
def test_thread_pool_value_property():
thread_pool = ThreadPool(5, "TestThreadPoolValueProperty", polling_timeout=1)
assert thread_pool.get_pool_size() == 5
thread_pool.terminate()
def test_thread_pool_pool_thread_size():
thread_pool = ThreadPool(5, "TestThreadPoolSize", polling_timeout=1)
live_threads = util.get_threads_with_name("TestThreadPoolSize")
assert len(live_threads.keys()) == 5
thread_pool.terminate()
# def test_thread_pool_polling_timeout():
# thread_pool = ThreadPool(5, "TestThreadPoolPolling", polling_timeout=5)
# while len(util.get_threads_with_name("TestThreadPoolPolling").keys()) < 5:
# time.sleep(1)
#
# before_time = datetime.datetime.now()
# print before_time.time()
# thread_pool.terminate()
# while len(util.get_threads_with_name("TestThreadPoolPolling").keys()) > 4:
# time.sleep(1)
#
# after_time = datetime.datetime.now()
# print after_time.time()
# diff_time = after_time - before_time
# print diff_time.seconds
# # assert False
# assert diff_time.seconds == 5
def test_thread_pool_daemon_flag():
thread_pool = ThreadPool(2, "TestThreadPoolDaemonFlag", daemon=True, polling_timeout=1)
created_threads = util.get_threads_with_name("TestThreadPoolDaemonFlag")
# print len(created_threads)
thread_name, thread_obj = created_threads.popitem()
assert thread_obj.daemon is True
thread_pool.terminate()
def test_thread_pool_thread_limitation():
global counter_queue
thread_pool = ThreadPool(5, "TestThreadPoolLimitation", polling_timeout=1)
i = 0
counter_queue = Queue()
while i < 10:
thread_pool.enqueue(util.TestTask(lambda(l): counter_queue.put(l), "Test%s" % i))
i += 1
assert len(util.get_threads_with_name("TestThreadPoolLimitation")) == 5
thread_pool.terminate()
assert counter_queue.qsize() == 10
| # Copyright 2015 Chamila de Alwis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import datetime
import time
from ..pool import *
import util
"""
ThreadPool
"""
def test_thread_pool_value_exception():
with pytest.raises(ValueError):
ThreadPool(0, "TestThreadPoolValException")
def test_thread_pool_value_property():
thread_pool = ThreadPool(5, "TestThreadPoolValueProperty", polling_timeout=1)
assert thread_pool.get_pool_size() == 5
thread_pool.terminate()
def test_thread_pool_pool_thread_size():
thread_pool = ThreadPool(5, "TestThreadPoolSize", polling_timeout=1)
live_threads = util.get_threads_with_name("TestThreadPoolSize")
assert len(live_threads.keys()) == 5
thread_pool.terminate()
def test_thread_pool_polling_timeout():
thread_pool = ThreadPool(5, "TestThreadPoolPolling", polling_timeout=5)
while len(util.get_threads_with_name("TestThreadPoolPolling").keys()) < 5:
time.sleep(1)
before_time = datetime.datetime.now()
print before_time.time()
thread_pool.terminate()
while len(util.get_threads_with_name("TestThreadPoolPolling").keys()) > 4:
time.sleep(1)
after_time = datetime.datetime.now()
print after_time.time()
diff_time = after_time - before_time
print diff_time.seconds
# assert False
assert diff_time.seconds == 5
def test_thread_pool_daemon_flag():
thread_pool = ThreadPool(2, "TestThreadPoolDaemonFlag", daemon=True, polling_timeout=1)
created_threads = util.get_threads_with_name("TestThreadPoolDaemonFlag")
# print len(created_threads)
thread_name, thread_obj = created_threads.popitem()
assert thread_obj.daemon is True
thread_pool.terminate()
def test_thread_pool_thread_limitation():
global counter_queue
thread_pool = ThreadPool(5, "TestThreadPoolLimitation", polling_timeout=1)
i = 0
counter_queue = Queue()
while i < 10:
thread_pool.enqueue(util.TestTask(lambda(l): counter_queue.put(l), "Test%s" % i))
i += 1
assert len(util.get_threads_with_name("TestThreadPoolLimitation")) == 5
thread_pool.terminate()
assert counter_queue.qsize() == 10
| apache-2.0 | Python |
723b874eaf4945ffb1a8ac9db55099810271f8b0 | add reboot function. | Hybrid-Cloud/badam,Hybrid-Cloud/badam,Hybrid-Cloud/badam | engineering/utils.py | engineering/utils.py | __author__ = 'nash.xiejun'
import subprocess
def reboot():
subprocess.call("reboot")
| __author__ = 'nash.xiejun'
| apache-2.0 | Python |
60ee6a5d2ad9e85fefd973c020ef65bd212e687c | Fix management command to send notification about new blog post. | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin | astrobin/management/commands/notify_new_blog_entry.py | astrobin/management/commands/notify_new_blog_entry.py | from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
import persistent_messages
from zinnia.models import Entry
class Command(BaseCommand):
help = "Notifies all users about most recent blog entry."
def handle(self, *args, **options):
entry = Entry.objects.all()[0]
for u in User.objects.all():
m = persistent_messages.models.Message(
user = u,
from_user = User.objects.get(username = 'astrobin'),
message = '<a href="' + entry.get_absolute_url() + '">New blog entry: <strong>' + entry.title + '</strong></a>',
level = persistent_messages.INFO,
)
m.save()
| from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from zinnia.models import Entry
from astrobin.notifications import push_notification
class Command(BaseCommand):
help = "Notifies all users about most recent blog entry."
def handle(self, *args, **options):
entry = Entry.objects.all()[0]
push_notification(
User.objects.all(),
'new_blog_entry',
{
'object': entry.title,
'object_url': entry.get_absolute_url()
}
)
| agpl-3.0 | Python |
fee5170a4b947d7ab3755fcb246f59e29f2842e8 | Fix exception-message-attribute warning | DBuildService/atomic-reactor,projectatomic/atomic-reactor,DBuildService/atomic-reactor,projectatomic/atomic-reactor,fr34k8/atomic-reactor,fr34k8/atomic-reactor | atomic_reactor/plugins/exit_remove_worker_metadata.py | atomic_reactor/plugins/exit_remove_worker_metadata.py | """
Copyright (c) 2017 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import
from atomic_reactor.plugin import ExitPlugin
from atomic_reactor.constants import PLUGIN_REMOVE_WORKER_METADATA_KEY
from osbs.exceptions import OsbsResponseException
def defer_removal(workflow, cf_map, osbs):
key = RemoveWorkerMetadataPlugin.key
workspace = workflow.plugin_workspace.setdefault(key, {})
workspace.setdefault('cf_maps_to_remove', set())
workspace['cf_maps_to_remove'].add((cf_map, osbs))
class RemoveWorkerMetadataPlugin(ExitPlugin):
"""
Remove worker metadata for each platform.
"""
key = PLUGIN_REMOVE_WORKER_METADATA_KEY
def run(self):
"""
Run the plugin.
"""
workspace = self.workflow.plugin_workspace.get(self.key, {})
cf_maps_to_remove = workspace.get('cf_maps_to_remove', [])
for cm_key, osbs in cf_maps_to_remove:
try:
osbs.delete_config_map(cm_key)
self.log.debug("ConfigMap %s deleted", cm_key)
except OsbsResponseException as ex:
self.log.warning("Failed to delete ConfigMap %s: %s", cm_key, ex)
| """
Copyright (c) 2017 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import absolute_import
from atomic_reactor.plugin import ExitPlugin
from atomic_reactor.constants import PLUGIN_REMOVE_WORKER_METADATA_KEY
from osbs.exceptions import OsbsResponseException
def defer_removal(workflow, cf_map, osbs):
key = RemoveWorkerMetadataPlugin.key
workspace = workflow.plugin_workspace.setdefault(key, {})
workspace.setdefault('cf_maps_to_remove', set())
workspace['cf_maps_to_remove'].add((cf_map, osbs))
class RemoveWorkerMetadataPlugin(ExitPlugin):
"""
Remove worker metadata for each platform.
"""
key = PLUGIN_REMOVE_WORKER_METADATA_KEY
def run(self):
"""
Run the plugin.
"""
workspace = self.workflow.plugin_workspace.get(self.key, {})
cf_maps_to_remove = workspace.get('cf_maps_to_remove', [])
for cm_key, osbs in cf_maps_to_remove:
try:
osbs.delete_config_map(cm_key)
self.log.debug("ConfigMap %s deleted", cm_key)
except OsbsResponseException as ex:
self.log.warning("Failed to delete ConfigMap %s: %s", cm_key, ex.message)
| bsd-3-clause | Python |
01646541cb6639e9b41050c6e5466222071364bc | Add "api" prefix to api urls. | franekp/PlanIt,franekp/PlanIt,franekp/PlanIt,franekp/PlanIt,franekp/PlanIt | src/web/web/urls.py | src/web/web/urls.py | """web URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib.admin import site as admin_site
import PlanIt.urls
urlpatterns = [
url(r'^admin/', admin_site.urls),
url(r'^api/', include('PlanIt.urls')),
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework'
)),
]
| """web URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib.admin import site as admin_site
import PlanIt.urls
urlpatterns = [
url(r'^admin/', admin_site.urls),
url(r'^', include('PlanIt.urls')),
url(r'^api-auth/', include(
'rest_framework.urls', namespace='rest_framework'
)),
]
| agpl-3.0 | Python |
f21362be79974914f476be7bc85e74880e70ebf9 | Revert "Put gunicorn in debug mode according to DEBUG" | ovidner/bitket,ovidner/bitket,ovidner/bitket,ovidner/bitket | _conf/gunicorn.py | _conf/gunicorn.py | # -*- coding: utf-8 -*-
import multiprocessing
pythonpath = '/home/sof15/app'
bind = '0.0.0.0:8080'
workers = multiprocessing.cpu_count() * 2 + 1 # todo: optimize
threads = 1 # todo: optimize
user = None
loglevel = 'debug'
errorlog = '-' # stderr | # -*- coding: utf-8 -*-
import multiprocessing
import os
pythonpath = '/home/sof15/app'
bind = '0.0.0.0:8080'
workers = multiprocessing.cpu_count() * 2 + 1 # todo: optimize
threads = 1 # todo: optimize
user = None
loglevel = 'debug'
errorlog = '-' # stderr
debug = os.getenv('DEBUG', False)
| mit | Python |
0a08fe53c8f7a208828779575452f0d543782d33 | Update transactions.py | Chibuzor-IN/python-paystack | python_paystack/objects/transactions.py | python_paystack/objects/transactions.py | '''
transactions.py
'''
import math, uuid
from datetime import datetime
import validators
from .base import Base
from .errors import InvalidEmailError
class Transaction(Base):
'''
Transactions class
'''
reference = None
amount = None
email = None
plan = None
transaction_charge = None
metadata = None
card_locale = 'LOCAL'
authorization_url = None
authorization_code = None
def __init__(self, amount: int, email):
super().__init__()
try:
amount = int(amount)
except ValueError:
raise ValueError("Invalid amount. Amount(in kobo) should be an integer")
else:
if validators.email(email):
self.amount = amount
self.email = email
else:
raise InvalidEmailError
def generate_reference_code(self):
'''
Generates a unique transaction reference code
'''
return uuid.uuid4()
def full_transaction_cost(self, locale, local_cost, intl_cost):
'''
Adds on paystack transaction charges and returns updated cost
Arguments:
locale : Card location (LOCAL or INTERNATIONAL)
'''
if self.amount:
if locale not in ('LOCAL', 'INTERNATIONAL'):
raise ValueError("Invalid locale, locale should be 'LOCAL' or 'INTERNATIONAL'")
else:
locale_cost = {'LOCAL' : local_cost, 'INTERNATIONAL' : intl_cost}
cost = self.amount / (1 - locale_cost[locale])
if cost > 250000:
cost = (self.amount + 100)/ (1 - locale_cost[locale])
paystack_charge = locale_cost[locale] * cost
#Paystack_charge is capped at N2000
if paystack_charge > 200000:
cost = self.amount + 200000
return math.ceil(cost)
else:
raise AttributeError("Amount not set")
| '''
transactions.py
'''
import math
from datetime import datetime
import validators
from .base import Base
from .errors import InvalidEmailError
class Transaction(Base):
'''
Transactions class
'''
reference = None
amount = None
email = None
plan = None
transaction_charge = None
metadata = None
card_locale = 'LOCAL'
authorization_url = None
authorization_code = None
def __init__(self, amount: int, email):
super().__init__()
try:
amount = int(amount)
except ValueError:
raise ValueError("Invalid amount. Amount(in kobo) should be an integer")
else:
if validators.email(email):
self.amount = amount
self.email = email
else:
raise InvalidEmailError
def generate_reference_code(self):
'''
Generates a unique transaction reference code
'''
date = datetime.now()
year = date.year
month = str(date.month).zfill(2)
day = str(date.day).zfill(2)
date_stamp = "%s%s%s" % (year, month, day)
reference_code = "%s%s" % (date_stamp, hash(self.email))
time = date.time()
hour = time.hour
minute = time.minute
second = time.second
reference_code += "%s%s%s" % (hour, minute, second)
return reference_code
def full_transaction_cost(self, locale, local_cost, intl_cost):
'''
Adds on paystack transaction charges and returns updated cost
Arguments:
locale : Card location (LOCAL or INTERNATIONAL)
'''
if self.amount:
if locale not in ('LOCAL', 'INTERNATIONAL'):
raise ValueError("Invalid locale, locale should be 'LOCAL' or 'INTERNATIONAL'")
else:
locale_cost = {'LOCAL' : local_cost, 'INTERNATIONAL' : intl_cost}
cost = self.amount / (1 - locale_cost[locale])
if cost > 250000:
cost = (self.amount + 100)/ (1 - locale_cost[locale])
paystack_charge = locale_cost[locale] * cost
#Paystack_charge is capped at N2000
if paystack_charge > 200000:
cost = self.amount + 200000
return math.ceil(cost)
else:
raise AttributeError("Amount not set")
| mit | Python |
1c0980b5e8d442df77424f7d471df9fe293571ca | build elf_loader_nexe with --pnacl-bias | cohortfsllc/cohort-cocl2-sandbox,cohortfsllc/cohort-cocl2-sandbox,cohortfsllc/cohort-cocl2-sandbox,cohortfsllc/cohort-cocl2-sandbox,cohortfsllc/cohort-cocl2-sandbox,cohortfsllc/cohort-cocl2-sandbox | src/untrusted/elf_loader/elf_loader.gyp | src/untrusted/elf_loader/elf_loader.gyp | # -*- gyp -*-
# Copyright (c) 2015 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../build/common.gypi',
],
'targets' : [
{
'target_name': 'elf_loader_nexe',
'type': 'none',
'variables': {
'nexe_target': 'elf_loader',
'build_glibc': 0,
'build_newlib': 1,
'build_pnacl_newlib': 0,
},
'sources': [
'elf_loader.c',
],
'dependencies': [
'<(DEPTH)/native_client/src/untrusted/nacl/nacl.gyp:nacl_lib_newlib',
],
'conditions': [
['target_arch=="mipsel"', {
'compile_flags': [
'--pnacl-bias=mips32',
],
}],
],
},
],
}
| # -*- gyp -*-
# Copyright (c) 2015 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../build/common.gypi',
],
'targets' : [
{
'target_name': 'elf_loader_nexe',
'type': 'none',
'variables': {
'nexe_target': 'elf_loader',
'build_glibc': 0,
'build_newlib': 1,
'build_pnacl_newlib': 0,
},
'sources': [
'elf_loader.c',
],
'dependencies': [
'<(DEPTH)/native_client/src/untrusted/nacl/nacl.gyp:nacl_lib_newlib',
],
},
],
}
| bsd-3-clause | Python |
9126fa0afef42a8c7ecd12d1f0a882da97974fa9 | revert root-level imports | materialsproject/MPContribs,materialsproject/MPContribs,materialsproject/MPContribs,materialsproject/MPContribs | mpcontribs/__init__.py | mpcontribs/__init__.py | """The Materials Project's Community Contribution Framework"""
__version__ = '0.1.1'
__url__ = 'https://github.com/materialsproject/MPContribs'
__author__ = 'Patrick Huck'
__email__ = 'phuck@lbl.gov'
__copyright__ = 'Copyright 2014-2015, The Materials Project'
__maintainer__ = 'Patrick Huck'
| """The Materials Project's Community Contribution Framework"""
__version__ = '0.1.1'
__url__ = 'https://github.com/materialsproject/MPContribs'
__author__ = 'Patrick Huck'
__email__ = 'phuck@lbl.gov'
__copyright__ = 'Copyright 2014-2015, The Materials Project'
__maintainer__ = 'Patrick Huck'
from mpcontribs.io.archieml.mpfile import MPFile
from mpcontribs.rest.rester import MPContribsRester
from mpcontribs.io.core.recdict import RecursiveDict
from mpcontribs.config import mp_level01_titles
| mit | Python |
ab16ae6ecd1915981ceceeef8fb1c0c1c41b4089 | Add proper index route | CROSoftware/pyramid_object_dispatch | pyramid_dispatch_protocol/__init__.py | pyramid_dispatch_protocol/__init__.py | # encoding: utf-8
from pyramid.response import Response
from collections import deque
from collections.abc import Iterable
from webob.exc import HTTPNotFound
'''
An implementation of web.dispatch.object for pyramid.
'''
class Context(object):
'''
A psuedo implementation of WebCore's context object to better emulate the
usage of web.dispatch.object
'''
def __init__(self, request, response):
self.request = request
self.response = response
def add_controller(self, route_name, pattern, controller, dispatcher, **kw):
'''
:params route_name:
Specify a route_name for this controller
:param pattern
The url pattern to match for this controller, any sub urls will be routed
through this controller's attributes until a match is found or HTTPNotFound
Will be thrown.
:param controller
A class or function that will act as the controller class
Additional parameters will be passed to the internal 'config.add_route()' call.
'''
if pattern is None:
pattern = ''
if pattern.endswith('/'):
pattern = pattern[:-1]
dispatch = dispatcher
def controllerInternalView(request):
url = request.matchdict.get('controller_path', '')
path = url.split('/')
path = deque(path)
response = Response()
context = Context(request, response)
for segment, handler, endpoint, *meta in dispatch(context, controller, path):
if(endpoint and not callable(handler)):
raise HTTPNotFound('No endpoint found.')
if endpoint:
view_output = handler(*path)
if isinstance(view_output, str):
response.text = view_output
elif isinstance(view_output, Iterable):
response.app_iter = view_output
return response
response.status_code = 404
response.status = '404 Not Found'
return response
self.add_route(route_name, pattern + '/{controller_path:.*}', **kw)
self.add_route(route_name+'-index', pattern, **kw)
self.add_view(view=controllerInternalView, route_name=route_name)
self.add_view(view=controllerInternalView, route_name=route_name+'-index')
def includeme(config):
config.add_directive('add_controller', add_controller)
| # encoding: utf-8
from pyramid.response import Response
from collections import deque
from collections.abc import Iterable
from webob.exc import HTTPNotFound
'''
An implementation of web.dispatch.object for pyramid.
'''
class Context(object):
'''
A psuedo implementation of WebCore's context object to better emulate the
usage of web.dispatch.object
'''
def __init__(self, request, response):
self.request = request
self.response = response
def add_controller(self, route_name, pattern, controller, dispatcher, **kw):
'''
:params route_name:
Specify a route_name for this controller
:param pattern
The url pattern to match for this controller, any sub urls will be routed
through this controller's attributes until a match is found or HTTPNotFound
Will be thrown.
:param controller
A class or function that will act as the controller class
Additional parameters will be passed to the internal 'config.add_route()' call.
'''
if pattern is None:
pattern = ''
if pattern.endswith('/'):
pattern = pattern[:-1]
dispatch = dispatcher
def controllerInternalView(request):
url = request.matchdict['controller_path']
path = url.split('/')
path = deque(path)
response = Response()
context = Context(request, response)
for segment, handler, endpoint, *meta in dispatch(context, controller, path):
if(endpoint and not callable(handler)):
raise HTTPNotFound('No endpoint found.')
if endpoint:
view_output = handler(*path)
if isinstance(view_output, str):
response.text = view_output
elif isinstance(view_output, Iterable):
response.app_iter = view_output
return response
response.status_code = 404
response.status = '404 Not Found'
return response
self.add_route(route_name, pattern + '/{controller_path:.*}', **kw)
self.add_view(view=controllerInternalView, route_name=route_name)
def includeme(config):
config.add_directive('add_controller', add_controller)
| mit | Python |
7e6215ababc356a6e5f5789fab106d2322977ac2 | enable 'real' wikidata | legoktm/pywikipedia-rewrite | pywikibot/families/wikidata_family.py | pywikibot/families/wikidata_family.py | # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata families
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'wikidata'
self.langs = {
'wikidata': 'wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
}
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
return ('wikidata',
'wikidata') if code == 'wikidata' else ('repo', 'wikidata')
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata families
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'wikidata'
self.langs = {
'wikidata': 'wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
}
def shared_data_repository(self, code):
# for here an now we just use the test repo
# for wikimedia families the method can return wikidata itself
return ('repo', 'wikidata') if code == 'client' else (None, None)
| mit | Python |
726da12888f47114008977fe228c4f78f920a057 | Remove old import line | laslabs/odoo-base | res_partner_multi_address/__init__.py | res_partner_multi_address/__init__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley <dave@laslabs.com>
# Copyright: 2015 LasLabs, Inc [https://laslabs.com]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Dave Lasley <dave@laslabs.com>
# Copyright: 2015 LasLabs, Inc [https://laslabs.com]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from carepoint.db import Carepoint
| agpl-3.0 | Python |
fe8cceed09ed1f8aadb5a1a6b5a3c85504b2f142 | clean compat_drf | chibisov/drf-extensions | rest_framework_extensions/compat_drf.py | rest_framework_extensions/compat_drf.py | """
The `compat` module provides support for backwards compatibility with older
versions of Django REST Framework.
"""
from rest_framework_extensions.utils import get_rest_framework_features
| """
The `compat` module provides support for backwards compatibility with older
versions of Django REST Framework.
"""
from rest_framework_extensions.utils import get_rest_framework_features
def add_trailing_slash_if_needed(regexp_string):
# todo: test me
if get_rest_framework_features()['router_trailing_slash']:
return regexp_string[:-2] + '{trailing_slash}$'
else:
return regexp_string
def get_lookup_allowed_symbols(kwarg_name='pk', force_dot=False):
# todo: test me
if get_rest_framework_features()['use_dot_in_lookup_regex_by_default'] or force_dot:
return '(?P<{0}>[^/.]+)'.format(kwarg_name)
else:
return '(?P<{0}>[^/]+)'.format(kwarg_name) | mit | Python |
ed738c7962c2b8b144fbee7b382a8f5053b40f3b | Deal with changes made in f9ccf06 | mineo/lala,mineo/lala | lala/plugins/last.py | lala/plugins/last.py | import lala.config
from datetime import datetime
from lala.util import command, msg, regex
lala.config.set_default_options(max_lines="30",
datetime_format="%Y-%m-%d %H:%M:%S")
class _LogEntryBuffer(list):
"""A list with a restricted length."""
def __init__(self, maxentries):
"""
:param maxentries: The amount of entries that can be stored in this list
:type maxentries: Integer
"""
list.__init__(self)
self._maxentries = maxentries
def __add__(self, *args, **kwargs):
raise NotImplemented
def __iadd__(self, *args, **kwargs):
raise NotImplemented
def append(self, item):
if len(self) >= self._maxentries:
self.pop(0)
list.append(self, item)
_chatlog = _LogEntryBuffer(lala.config.get_int("max_lines"))
@command
def last(user, channel, text):
"""Show the last lines from the log"""
max_lines = lala.config.get_int("max_lines")
s_text = text.split()
try:
num_lines = min(max_lines, int(s_text[0]))
except IndexError:
num_lines = max_lines
num_lines = min(num_lines, len(_chatlog))
msg(user, _chatlog[-num_lines:], log=False)
@regex(".*")
def chatlog(user, channel, text, match_obj):
now = datetime.now().strftime(lala.config.get("datetime_format"))
_chatlog.append("[%s] %s: %s" % (now, user, text))
| import lala.config
from datetime import datetime
from lala.util import command, msg, regex
lala.config.set_default_options(max_lines="30",
datetime_format="%Y-%m-%d %H:%M:%S")
class _LogEntryBuffer(list):
"""A list with a restricted length."""
def __init__(self, maxentries):
"""
:param maxentries: The amount of entries that can be stored in this list
:type maxentries: Integer
"""
list.__init__(self)
self._maxentries = maxentries
def __add__(self, *args, **kwargs):
raise NotImplemented
def __iadd__(self, *args, **kwargs):
raise NotImplemented
def append(self, item):
if len(self) >= self._maxentries:
self.pop(0)
list.append(self, item)
_chatlog = _LogEntryBuffer(lala.config.get_int("max_lines"))
@command
def last(user, channel, text):
"""Show the last lines from the log"""
max_lines = lala.config.get_int("max_lines")
s_text = text.split()
try:
num_lines = min(max_lines, int(s_text[1]))
except IndexError:
num_lines = max_lines
num_lines = min(num_lines, len(_chatlog))
msg(user, _chatlog[-num_lines:], log=False)
@regex(".*")
def chatlog(user, channel, text, match_obj):
now = datetime.now().strftime(lala.config.get("datetime_format"))
_chatlog.append("[%s] %s: %s" % (now, user, text))
| mit | Python |
6e3a23edf55e5f153d4390d04c6867696d29ca36 | fix CSV export of unicode chars | hackatbrown/2015.hackatbrown.org,hackatbrown/2015.hackatbrown.org,hackatbrown/2015.hackatbrown.org | hack-at-brown-2015/csv_export.py | hack-at-brown-2015/csv_export.py | import csv
import webapp2
from registration import Hacker, hacker_keys, personal_info_keys
from config import onTeam, isAdmin
import logging
class CsvExport(webapp2.RequestHandler):
def get(self):
if not onTeam(): return self.redirect('/')
keys = list(hacker_keys) + ['status', 'admit_priority', 'rsvpd', 'checked_in', 'has_resume']
if (not self.request.get('include_personally_identifiable_info')) or not isAdmin():
for key in personal_info_keys:
keys.remove(key)
self.response.headers['Content-Type'] = 'text/csv; charset=utf-8'
writer = csv.DictWriter(self.response, fieldnames=keys)
writer.writeheader()
for hacker in Hacker.query():
writer.writerow({key: unicode(val).encode('utf-8') for key, val in hacker.asDict(keys).iteritems()})
| import csv
import webapp2
from registration import Hacker, hacker_keys, personal_info_keys
from config import onTeam, isAdmin
import logging
class CsvExport(webapp2.RequestHandler):
def get(self):
if not onTeam(): return self.redirect('/')
keys = list(hacker_keys) + ['status', 'admit_priority', 'rsvpd', 'checked_in', 'has_resume']
if (not self.request.get('include_personally_identifiable_info')) or not isAdmin():
for key in personal_info_keys:
keys.remove(key)
self.response.headers['Content-Type'] = 'text/csv'
writer = csv.DictWriter(self.response, fieldnames=keys)
writer.writeheader()
for hacker in Hacker.query():
try:
writer.writerow(hacker.asDict(keys))
except UnicodeEncodeError:
logging.error('could not encode\n')
print(hacker)
| mit | Python |
9f9184a17b0aa5f58a104cb606cdca157386b32a | fix undefined variable | emschimmel/CameraPi,emschimmel/CameraPi | led_test/led_test.py | led_test/led_test.py | import ConfigParser
from time import sleep
try:
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
except ImportError:
print('GPIO unavailable')
class led_test:
config = ConfigParser.RawConfigParser()
config.read('config.properties')
def __init__(self):
for lednumber in range(1, 17):
self.setup_led(lednumber)
for lednumber in range(1, 17):
self.turn_led_on(lednumber, True)
sleep(0.25)
sleep(5)
for lednumber in range(1, 17):
self.turn_led_on(lednumber, False)
sleep(0.25)
def setup_led(self, lednumber):
print("setup led "+`lednumber`)
# try:
int_led_value = int(self.config.get('Leds', 'led_'+`lednumber`))
GPIO.setup(int_led_value, GPIO.OUT)
# except:
# print('GPIO unavailable, unable to setup led_'+`lednumber`)
def turn_led_on(self, lednumber, status):
print("turn led "+`lednumber`+' to '+`status`)
# try:
int_led_value = int(self.config.get('Leds', 'led_'+`lednumber`))
GPIO.output(int_led_value,status) #turn on the LED
# except:
# print('GPIO unavailable, unable to turn led_'+`lednumber`+' to '+`status`)
class button_test:
config = ConfigParser.RawConfigParser()
config.read('config.properties')
global run_main_loop
run_main_loop = False
def __init__(self):
for buttonnumber in range(1, 2):
self.setup_button(buttonnumber)
run_button_loops()
def setup_button(self, buttonnumber):
print("setup button "+`buttonnumber`)
# try:
btn_int_value = int(self.config.get('Leds', 'led_'+`buttonnumber`))
GPIO.setup(btn_int_value, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# except:
# print('GPIO unavailable, unable to setup button_'+`buttonnumber`)
def run_button_loops(self):
global run_main_loop
run_main_loop = True
while run_main_loop:
for buttonnumber in range(1, 2):
# try:
btn_int_value = int(self.config.get('Leds', 'led_'+`buttonnumber`))
GPIO.wait_for_edge(btn_int_value, GPIO.FALLING)
print('button pressed')
time.sleep(0.3) #debounce
# except:
# run_main_loop = False
# print('GPIO unavailable, unable to setup button_'+`buttonnumber`)
led_test()
button_test() | import ConfigParser
from time import sleep
try:
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BOARD)
except ImportError:
print('GPIO unavailable')
class led_test:
config = ConfigParser.RawConfigParser()
config.read('config.properties')
def __init__(self):
for lednumber in range(1, 17):
self.setup_led(lednumber)
for lednumber in range(1, 17):
self.turn_led_on(lednumber, True)
sleep(0.25)
sleep(5)
for lednumber in range(1, 17):
self.turn_led_on(lednumber, False)
sleep(0.25)
def setup_led(self, lednumber):
print("setup led "+`lednumber`)
# try:
int_led_value = int(self.config.get('Leds', 'led_'+`lednumber`))
GPIO.setup(int_led_value, GPIO.OUT)
# except:
# print('GPIO unavailable, unable to setup led_'+`lednumber`)
def turn_led_on(self, lednumber, status):
print("turn led "+`lednumber`+' to '+`status`)
# try:
int_led_value = int(self.config.get('Leds', 'led_'+`lednumber`))
GPIO.output(int_led_value,status) #turn on the LED
# except:
# print('GPIO unavailable, unable to turn led_'+`lednumber`+' to '+`status`)
class button_test:
config = ConfigParser.RawConfigParser()
config.read('config.properties')
global run_main_loop
run_main_loop = False
def __init__(self):
global run_main_loop
for buttonnumber in range(1, 2):
self.setup_button(buttonnumber)
run_main_loop = True
def setup_button(self, buttonnumber):
print("setup button "+`buttonnumber`)
# try:
btn_int_value = int(self.config.get('Leds', 'led_'+`buttonnumber`))
GPIO.setup(btn_int_value, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# except:
# print('GPIO unavailable, unable to setup button_'+`buttonnumber`)
while run_main_loop:
# try:
GPIO.wait_for_edge(btn_pin, GPIO.FALLING)
print('button pressed')
time.sleep(0.3) #debounce
# except:
# run_main_loop = False
# print('GPIO unavailable, unable to setup button_'+`buttonnumber`)
led_test()
button_test() | apache-2.0 | Python |
52d4d8283eaf7e899398b6539df80551ec3437ae | FIX version | ingadhoc/odoo-legal | legal/__openerp__.py | legal/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': 'Legal',
'version': '8.0.0.6.0',
'description': 'Legal',
'category': 'base.module_category_knowledge_management',
'author': 'ADHOC',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': ['calendar',
'portal', 'report_aeroo',
'partner_person', 'calendar_state', 'account'],
'data': [
'security/legal_group.xml',
'security/ir.model.access.csv',
'view/legal_menuitem.xml',
'view/res_partner_view.xml',
'view/prosecution_view.xml',
'view/claims_view.xml',
'view/radication_view.xml',
'view/auxiliary_field_view.xml',
'view/models_view.xml',
'view/office_view.xml',
'view/regulation_view.xml',
'view/news_view.xml',
'view/parts_view.xml',
'view/evidence_view.xml',
'view/negotiation_view.xml',
'view/claims_type_view.xml',
'view/responsibility_view.xml',
'view/news_view.xml',
'view/calendar_view.xml',
'view/expertise_view.xml',
'view/expertise_detail_view.xml',
'view/expertise_detail_type_view.xml',
'view/account_invoice_view.xml',
'prosecution_data.xml',
'wizard/stock_case_wizard.xml',
'report/legal_report.xml'
],
'installable': True,
'auto_install': False,
'application': True,
'demo': [
'data/demo/res.partner.csv',
'data/demo/res_users.xml',
'data/demo/res_company.xml',
'data/demo/legal.office.csv',
'data/demo/legal.expertise.type.csv',
'data/demo/legal.expertise.detail_type.csv',
'data/demo/legal.status.csv',
'data/demo/legal.substate.csv',
'data/demo/legal.office.csv',
'data/demo/legal.role.csv',
'data/demo/legal.claim.type.csv',
'data/demo/legal.claim.category.csv',
'data/demo/legal.prosecution_type.csv',
'data/demo/legal.prosecution.csv',
],
}
| # -*- coding: utf-8 -*-
{
'name': 'Legal',
'version': '8.0.0.5.0',
'description': 'Legal',
'category': 'base.module_category_knowledge_management',
'author': 'ADHOC',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'depends': ['calendar',
'portal', 'report_aeroo',
'partner_person', 'calendar_state', 'account'],
'data': [
'security/legal_group.xml',
'security/ir.model.access.csv',
'view/legal_menuitem.xml',
'view/res_partner_view.xml',
'view/prosecution_view.xml',
'view/claims_view.xml',
'view/radication_view.xml',
'view/auxiliary_field_view.xml',
'view/models_view.xml',
'view/office_view.xml',
'view/regulation_view.xml',
'view/news_view.xml',
'view/parts_view.xml',
'view/evidence_view.xml',
'view/negotiation_view.xml',
'view/claims_type_view.xml',
'view/responsibility_view.xml',
'view/news_view.xml',
'view/calendar_view.xml',
'view/expertise_view.xml',
'view/expertise_detail_view.xml',
'view/expertise_detail_type_view.xml',
'view/account_invoice_view.xml',
'prosecution_data.xml',
'wizard/stock_case_wizard.xml',
'report/legal_report.xml'
],
'installable': True,
'auto_install': False,
'application': True,
'demo': [
'data/demo/res.partner.csv',
'data/demo/res_users.xml',
'data/demo/res_company.xml',
'data/demo/legal.office.csv',
'data/demo/legal.expertise.type.csv',
'data/demo/legal.expertise.detail_type.csv',
'data/demo/legal.status.csv',
'data/demo/legal.substate.csv',
'data/demo/legal.office.csv',
'data/demo/legal.role.csv',
'data/demo/legal.claim.type.csv',
'data/demo/legal.claim.category.csv',
'data/demo/legal.prosecution_type.csv',
'data/demo/legal.prosecution.csv',
],
}
| agpl-3.0 | Python |
0e02c451f9d6f0c0af29eaef5f212efefc9d7eef | introduce the concept of an 'ultra' role, which is a role not even admins have unless it is explicitly declared | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj | portality/authorise.py | portality/authorise.py | from portality.core import app
class Authorise(object):
@classmethod
def has_role(cls, role, reference):
ultra = False
if role.startswith("ultra_"):
ultra = True
# if we are the super user we can do anything
if app.config["SUPER_USER_ROLE"] in reference and not ultra:
return True
# if the user's role list contains the role explicitly then do it
if role in reference:
return True
# get the de-duplicated list of roles that the user has
full = cls.get_roles(reference)
if role in full:
return True
return False
@classmethod
def get_roles(cls, reference):
role_map = app.config.get("ROLE_MAP", {})
roles = []
for r in reference:
roles += role_map.get(r, [])
return list(set(roles))
@classmethod
def top_level_roles(cls):
return app.config.get("TOP_LEVEL_ROLES", [])
| from portality.core import app
class Authorise(object):
@classmethod
def has_role(cls, role, reference):
# if we are the super user we can do anything
if app.config["SUPER_USER_ROLE"] in reference:
return True
# if the user's role list contains the role explicitly then do it
if role in reference:
return True
# get the de-duplicated list of roles that the user has
full = cls.get_roles(reference)
if role in full:
return True
return False
@classmethod
def get_roles(cls, reference):
role_map = app.config.get("ROLE_MAP", {})
roles = []
for r in reference:
roles += role_map.get(r, [])
return list(set(roles))
@classmethod
def top_level_roles(cls):
return app.config.get("TOP_LEVEL_ROLES", [])
| apache-2.0 | Python |
35ed0d75715a000c343de7e04a6c34dd2c14b2b9 | Split lines correctly, add leading 0 to patent number if length == 7 | yngcan/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor | test/goldstandard/benchmark_confirm.py | test/goldstandard/benchmark_confirm.py | import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
split_lines = line_read.split(', ')
# Strip out weird characters/formatting
# Need to add leading "0" to Patent if not Design/Util/etc..
patent_to_match = split_lines[0].strip(' \t\n\r')
if len(patent_to_match) == 7:
patent_to_match = "0" + patent_to_match
last_name = split_lines[1].strip(' \t\n\r')
first_name = split_lines[2].strip(' \t\n\r')
| import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
| bsd-2-clause | Python |
589e1b3587a555983b764338d86a08596961247c | fix formatting of help text | TheReverend403/Pyper,TheReverend403/Pyper | commands/cmd_hash.py | commands/cmd_hash.py | import hashlib
from lib.command import Command
class HashCommand(Command):
name = 'hash'
description = 'Hashes text.'
def run(self, message, args):
# Remove duplicates
available_algorithms = list(set(x.lower() for x in hashlib.algorithms_available))
if not args or len(args) != 2:
self.reply(message, '<b>/{0} [algorithm] [text]</b>, where [algorithm] is one of {1}'.format(
self.name, ', '.join(available_algorithms)), parse_mode='HTML')
return
algorithm = args[0].lower()
if algorithm not in [x for x in available_algorithms]:
self.reply(message, 'Invalid algorithm. Please choose one of {0}'.format(
', '.join(available_algorithms)))
return
text = ' '.join(args[1:]).encode('utf-8')
hash_object = hashlib.new(algorithm)
hash_object.update(text)
self.reply(message, hash_object.hexdigest())
| import hashlib
from lib.command import Command
class HashCommand(Command):
name = 'hash'
description = 'Hashes text.'
def run(self, message, args):
# Remove duplicates
available_algorithms = list(set(x.lower() for x in hashlib.algorithms_available))
if not args or len(args) != 2:
self.reply(message, '<pre>/{0} [algorithm] [text]</pre>, where [algorithm] is one of {1}'.format(
self.name, ', '.join(available_algorithms)), parse_mode='HTML')
return
algorithm = args[0].lower()
if algorithm not in [x for x in available_algorithms]:
self.reply(message, 'Invalid algorithm. Please choose one of {0}'.format(
', '.join(available_algorithms)))
return
text = ' '.join(args[1:]).encode('utf-8')
hash_object = hashlib.new(algorithm)
hash_object.update(text)
self.reply(message, hash_object.hexdigest())
| agpl-3.0 | Python |
5527b7cd687f6b3b80c3b171d14a4031a01b995e | Add signal handler | phsmit/kaldi-recipes,psmit/kaldi-recipes,phsmit/kaldi-recipes,psmit/kaldi-recipes,psmit/kaldi-recipes | common/filter_lex.py | common/filter_lex.py | #!/usr/bin/env python3
import argparse
from signal import signal, SIGPIPE, SIG_DFL
#Ignore SIG_PIPE and don't throw exceptions on it... (http://docs.python.org/library/signal.html)
signal(SIGPIPE,SIG_DFL)
def main(in_lex, vocab, out_lex, oov):
d = {}
for line in in_lex:
key, trans = line.strip().split(None, 1)
if key not in d:
d[key] = set()
d[key].add(trans)
for line in vocab:
word = line.strip().split()[0]
if word in d:
for trans in d[word]:
print("{}\t{}".format(word, trans), file=out_lex)
else:
print(word, file=oov)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('inlex', type=argparse.FileType('r', encoding='utf-8'))
parser.add_argument('invocab', type=argparse.FileType('r', encoding='utf-8'))
parser.add_argument('outlex', type=argparse.FileType('w', encoding='utf-8'))
parser.add_argument('oovlist', type=argparse.FileType('w', encoding='utf-8'))
args = parser.parse_args()
main(args.inlex, args.invocab, args.outlex, args.oovlist)
| #!/usr/bin/env python3
import argparse
def main(in_lex, vocab, out_lex, oov):
d = {}
for line in in_lex:
key, trans = line.strip().split(None, 1)
if key not in d:
d[key] = set()
d[key].add(trans)
for line in vocab:
word = line.strip().split()[0]
if word in d:
for trans in d[word]:
print("{}\t{}".format(word, trans), file=out_lex)
else:
print(word, file=oov)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('inlex', type=argparse.FileType('r', encoding='utf-8'))
parser.add_argument('invocab', type=argparse.FileType('r', encoding='utf-8'))
parser.add_argument('outlex', type=argparse.FileType('w', encoding='utf-8'))
parser.add_argument('oovlist', type=argparse.FileType('w', encoding='utf-8'))
args = parser.parse_args()
main(args.inlex, args.invocab, args.outlex, args.oovlist)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.