commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
78064948169914aa2fc8290bba04e0bc76bbf98c | Fix typing [roku] (#66397) | rohitranjan1991/home-assistant,toddeye/home-assistant,rohitranjan1991/home-assistant,nkgilley/home-assistant,toddeye/home-assistant,mezz64/home-assistant,GenericStudent/home-assistant,w1ll1am23/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,mezz64/home-assistant,GenericStudent/home-assistant,nkgilley/home-assistant | homeassistant/components/roku/remote.py | homeassistant/components/roku/remote.py | """Support for the Roku remote."""
from __future__ import annotations
from collections.abc import Iterable
from typing import Any
from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import roku_exception_handler
from .const import DOMAIN
from .coordinator import RokuDataUpdateCoordinator
from .entity import RokuEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Load Roku remote based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
unique_id = coordinator.data.info.serial_number
async_add_entities([RokuRemote(unique_id, coordinator)], True)
class RokuRemote(RokuEntity, RemoteEntity):
"""Device that sends commands to an Roku."""
def __init__(self, unique_id: str, coordinator: RokuDataUpdateCoordinator) -> None:
"""Initialize the Roku device."""
super().__init__(
device_id=unique_id,
coordinator=coordinator,
)
self._attr_name = coordinator.data.info.name
self._attr_unique_id = unique_id
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return not self.coordinator.data.state.standby
@roku_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
await self.coordinator.roku.remote("poweron")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
await self.coordinator.roku.remote("poweroff")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_send_command(self, command: Iterable[str], **kwargs: Any) -> None:
"""Send a command to one device."""
num_repeats = kwargs[ATTR_NUM_REPEATS]
for _ in range(num_repeats):
for single_command in command:
await self.coordinator.roku.remote(single_command)
await self.coordinator.async_request_refresh()
| """Support for the Roku remote."""
from __future__ import annotations
from typing import Any
from homeassistant.components.remote import ATTR_NUM_REPEATS, RemoteEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import roku_exception_handler
from .const import DOMAIN
from .coordinator import RokuDataUpdateCoordinator
from .entity import RokuEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Load Roku remote based on a config entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
unique_id = coordinator.data.info.serial_number
async_add_entities([RokuRemote(unique_id, coordinator)], True)
class RokuRemote(RokuEntity, RemoteEntity):
"""Device that sends commands to an Roku."""
def __init__(self, unique_id: str, coordinator: RokuDataUpdateCoordinator) -> None:
"""Initialize the Roku device."""
super().__init__(
device_id=unique_id,
coordinator=coordinator,
)
self._attr_name = coordinator.data.info.name
self._attr_unique_id = unique_id
@property
def is_on(self) -> bool:
"""Return true if device is on."""
return not self.coordinator.data.state.standby
@roku_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
await self.coordinator.roku.remote("poweron")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
await self.coordinator.roku.remote("poweroff")
await self.coordinator.async_request_refresh()
@roku_exception_handler
async def async_send_command(self, command: list, **kwargs: Any) -> None:
"""Send a command to one device."""
num_repeats = kwargs[ATTR_NUM_REPEATS]
for _ in range(num_repeats):
for single_command in command:
await self.coordinator.roku.remote(single_command)
await self.coordinator.async_request_refresh()
| apache-2.0 | Python |
59a0ce1473632ea5417efa5b8f18ee195c96e524 | Document data import instructions | skylines-project/skylines,Turbo87/skylines,skylines-project/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,Turbo87/skylines,Turbo87/skylines | skylines/model/timezone.py | skylines/model/timezone.py | from pytz import timezone
from sqlalchemy.types import Integer, String
from geoalchemy2.types import Geometry
from skylines.database import db
from skylines.lib.string import unicode_to_str
# Instructions
#
# - download raw data from http://efele.net/maps/tz/world/tz_world.zip
# - shp2pgsql -D -s 4326 tz_world.shp > dump.sql
# - psql skylines -f dump.sql
class TimeZone(db.Model):
__tablename__ = 'tz_world'
id = db.Column('gid', Integer, autoincrement=True, primary_key=True)
tzid = db.Column(String(30))
the_geom = db.Column(Geometry('MULTIPOLYGON', srid=4326))
def __unicode__(self):
return self.tzid
def __repr__(self):
return unicode_to_str('<TimeZone: id=%d tzid=\'%s\'>' % (self.id, self.tzid))
@classmethod
def by_location(cls, location):
location = location.make_point(srid=None)
filter = db.func.ST_Contains(cls.the_geom, location)
zone = db.session.query(cls.tzid).filter(filter).scalar()
if zone is None:
return None
return timezone(unicode(zone))
| from pytz import timezone
from sqlalchemy.types import Integer, String
from geoalchemy2.types import Geometry
from skylines.database import db
from skylines.lib.string import unicode_to_str
class TimeZone(db.Model):
__tablename__ = 'tz_world'
id = db.Column('gid', Integer, autoincrement=True, primary_key=True)
tzid = db.Column(String(30))
the_geom = db.Column(Geometry('MULTIPOLYGON', srid=4326))
def __unicode__(self):
return self.tzid
def __repr__(self):
return unicode_to_str('<TimeZone: id=%d tzid=\'%s\'>' % (self.id, self.tzid))
@classmethod
def by_location(cls, location):
location = location.make_point(srid=None)
filter = db.func.ST_Contains(cls.the_geom, location)
zone = db.session.query(cls.tzid).filter(filter).scalar()
if zone is None:
return None
return timezone(unicode(zone))
| agpl-3.0 | Python |
ef05a6c51be615b7df38221235dda0a88704b67c | add vimeo settings to dev-settings.py, http://bugzilla.pculture.org/show_bug.cgi?id=15989 | wevoice/wesub,ujdhesa/unisubs,pculture/unisubs,ReachingOut/unisubs,norayr/unisubs,eloquence/unisubs,ReachingOut/unisubs,pculture/unisubs,ujdhesa/unisubs,norayr/unisubs,ofer43211/unisubs,norayr/unisubs,ujdhesa/unisubs,eloquence/unisubs,wevoice/wesub,ujdhesa/unisubs,ofer43211/unisubs,wevoice/wesub,eloquence/unisubs,ReachingOut/unisubs,ReachingOut/unisubs,pculture/unisubs,pculture/unisubs,eloquence/unisubs,norayr/unisubs,ofer43211/unisubs,ofer43211/unisubs,wevoice/wesub | dev-settings.py | dev-settings.py | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2010 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from settings import *
import logging
from django.contrib.sites.models import Site
SITE_ID = 4
SITE_NAME = 'mirosubs-dev'
TWITTER_CONSUMER_KEY = '6lHYqtxzQBD3lQ55Chi6Zg'
TWITTER_CONSUMER_SECRET = 'ApkJPIIbBKp3Wph0JBoAg2Nsk1Z5EG6PFTevNpd5Y00'
MEDIA_URL = "http://{0}/site_media/".format(Site.objects.get(id=SITE_ID).domain)
# MIDDLEWARE_CLASSES += ('middleware.SqlPrintingMiddleware',)
# Uncomment following line when you want to work with compiled JS.
# JS_USE_COMPILED = True
VIMEO_API_KEY = 'e1a46f832f8dfa99652781ee0b39df12'
VIMEO_API_SECRET = 'bdaeb531298eeee1'
try:
from settings_local import *
except ImportError:
pass
| # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2010 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
from settings import *
import logging
from django.contrib.sites.models import Site
SITE_ID = 4
SITE_NAME = 'mirosubs-dev'
TWITTER_CONSUMER_KEY = '6lHYqtxzQBD3lQ55Chi6Zg'
TWITTER_CONSUMER_SECRET = 'ApkJPIIbBKp3Wph0JBoAg2Nsk1Z5EG6PFTevNpd5Y00'
MEDIA_URL = "http://{0}/site_media/".format(Site.objects.get(id=SITE_ID).domain)
# MIDDLEWARE_CLASSES += ('middleware.SqlPrintingMiddleware',)
# Uncomment following line when you want to work with compiled JS.
# JS_USE_COMPILED = True
try:
from settings_local import *
except ImportError:
pass
| agpl-3.0 | Python |
0e2bee5e651d5ba8c3afe817a8cf49bc7143e019 | Remove unnecessary import | niemmi/algolib | tests/graph/test_bipartite.py | tests/graph/test_bipartite.py | import unittest
from .context import Undirected, bipartite
EDGES = [
[8, 4],
[4, 1],
[1, 0],
[1, 3],
[1, 5],
[0, 2],
[2, 2],
[2, 6],
[2, 7]
]
CASES = [
[[], True],
[[[3, 7]], False],
[[[3, 5]], False],
[[[7, 9], [9, 3]], True],
[[[8, 1]], False]
]
class TestBipartite(unittest.TestCase):
def test_bipartite(self):
graph = Undirected()
for x, y in EDGES:
graph.insert_edge(x, y)
for case, expected in CASES:
copy = graph.copy()
for x, y in case:
copy.insert_edge(x, y)
self.assertEqual(expected, bipartite(copy), str(case) + ' fails')
| import unittest
from .context import Undirected, BFS, bipartite
EDGES = [
[8, 4],
[4, 1],
[1, 0],
[1, 3],
[1, 5],
[0, 2],
[2, 2],
[2, 6],
[2, 7]
]
CASES = [
[[], True],
[[[3, 7]], False],
[[[3, 5]], False],
[[[7, 9], [9, 3]], True],
[[[8, 1]], False]
]
class TestBipartite(unittest.TestCase):
def test_bipartite(self):
graph = Undirected()
for x, y in EDGES:
graph.insert_edge(x, y)
for case, expected in CASES:
copy = graph.copy()
for x, y in case:
copy.insert_edge(x, y)
self.assertEqual(expected, bipartite(copy), str(case) + ' fails')
| bsd-3-clause | Python |
096f3c203d8f8c3f66b5ddf6b32ee582789412c6 | Fix the docstring of the RichTextInline | matthiask/feincms3,matthiask/feincms3,matthiask/feincms3 | feincms3/plugins/richtext.py | feincms3/plugins/richtext.py | """
Provides a rich text area whose content is automatically cleaned using a
very restrictive allowlist of tags and attributes.
Depends on django-ckeditor and `html-sanitizer
<https://pypi.org/project/html-sanitizer>`__.
"""
from content_editor.admin import ContentEditorInline
from django.db import models
from django.utils.html import mark_safe, strip_tags
from django.utils.text import Truncator
from django.utils.translation import gettext_lazy as _
from feincms3.cleanse import CleansedRichTextField
__all__ = ("RichText", "RichTextInline", "render_richtext")
class RichText(models.Model):
"""
Rich text plugin
To use this, a `django-ckeditor
<https://github.com/django-ckeditor/django-ckeditor>`_ configuration named
``richtext-plugin`` is required. See the section :mod:`HTML cleansing
<feincms3.cleanse>` for the recommended configuration.
"""
text = CleansedRichTextField(_("text"), config_name="richtext-plugin")
class Meta:
abstract = True
verbose_name = _("rich text")
verbose_name_plural = _("rich texts")
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=" ...")
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin-ckeditor.css`` file which adjusts the
width of the django-ckeditor widget inside the content editor.
"""
class Media:
css = {"screen": ["feincms3/plugin-ckeditor.css"]}
def render_richtext(plugin, **kwargs):
"""
Return the text of the rich text plugin as a safe string (``mark_safe``)
"""
return mark_safe(plugin.text)
| """
Provides a rich text area whose content is automatically cleaned using a
very restrictive allowlist of tags and attributes.
Depends on django-ckeditor and `html-sanitizer
<https://pypi.org/project/html-sanitizer>`__.
"""
from content_editor.admin import ContentEditorInline
from django.db import models
from django.utils.html import mark_safe, strip_tags
from django.utils.text import Truncator
from django.utils.translation import gettext_lazy as _
from feincms3.cleanse import CleansedRichTextField
__all__ = ("RichText", "RichTextInline", "render_richtext")
class RichText(models.Model):
"""
Rich text plugin
To use this, a `django-ckeditor
<https://github.com/django-ckeditor/django-ckeditor>`_ configuration named
``richtext-plugin`` is required. See the section :mod:`HTML cleansing
<feincms3.cleanse>` for the recommended configuration.
"""
text = CleansedRichTextField(_("text"), config_name="richtext-plugin")
class Meta:
abstract = True
verbose_name = _("rich text")
verbose_name_plural = _("rich texts")
def __str__(self):
# Return the first few words of the content (with tags stripped)
return Truncator(strip_tags(self.text)).words(10, truncate=" ...")
class RichTextInline(ContentEditorInline):
"""
The only difference with the standard ``ContentEditorInline`` is that this
inline adds the ``feincms3/plugin_ckeditor.js`` file which handles the
CKEditor widget activation and deactivation inside the content editor.
"""
class Media:
css = {"screen": ["feincms3/plugin-ckeditor.css"]}
def render_richtext(plugin, **kwargs):
"""
Return the text of the rich text plugin as a safe string (``mark_safe``)
"""
return mark_safe(plugin.text)
| bsd-3-clause | Python |
fbefeb72035d5bf06dfd04a1c309a6292116d8d9 | customize errors | munisisazade/developer_portal,munisisazade/developer_portal,munisisazade/developer_portal | develop/urls.py | develop/urls.py | """develop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include, handler404,handler403,handler404,han
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from news.views import NotFound404
#
# handler400 = NotFound404
# handler403 = NotFound404
# handler404 = NotFound404
# handler500 = NotFound404
urlpatterns = [
url(r'^Adminqaqalar.aspx', admin.site.urls),
url(r'^',include('news.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
| """develop URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include, handler404,handler403,handler404,handler500
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from news.views import NotFound404
handler400 = NotFound404
handler403 = NotFound404
handler404 = NotFound404
handler500 = NotFound404
urlpatterns = [
url(r'^Adminqaqalar.aspx', admin.site.urls),
url(r'^',include('news.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
| mit | Python |
8cd55326f8b06ad26ffb66136715592ef3b5da68 | Check for report_file | philippjfr/bokeh,jakirkham/bokeh,quasiben/bokeh,philippjfr/bokeh,ericmjl/bokeh,dennisobrien/bokeh,DuCorey/bokeh,DuCorey/bokeh,msarahan/bokeh,clairetang6/bokeh,timsnyder/bokeh,draperjames/bokeh,percyfal/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,aiguofer/bokeh,phobson/bokeh,azjps/bokeh,schoolie/bokeh,mindriot101/bokeh,philippjfr/bokeh,stonebig/bokeh,schoolie/bokeh,azjps/bokeh,phobson/bokeh,philippjfr/bokeh,aiguofer/bokeh,rs2/bokeh,jakirkham/bokeh,ericmjl/bokeh,rs2/bokeh,quasiben/bokeh,dennisobrien/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,bokeh/bokeh,mindriot101/bokeh,phobson/bokeh,aiguofer/bokeh,philippjfr/bokeh,mindriot101/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,ericmjl/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,bokeh/bokeh,percyfal/bokeh,stonebig/bokeh,rs2/bokeh,bokeh/bokeh,clairetang6/bokeh,msarahan/bokeh,phobson/bokeh,justacec/bokeh,schoolie/bokeh,justacec/bokeh,jakirkham/bokeh,azjps/bokeh,timsnyder/bokeh,rs2/bokeh,ericmjl/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,KasperPRasmussen/bokeh,aavanian/bokeh,azjps/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,clairetang6/bokeh,quasiben/bokeh,rs2/bokeh,jakirkham/bokeh,msarahan/bokeh,bokeh/bokeh,msarahan/bokeh,timsnyder/bokeh,timsnyder/bokeh,aiguofer/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,ptitjano/bokeh,dennisobrien/bokeh,stonebig/bokeh,timsnyder/bokeh,mindriot101/bokeh,DuCorey/bokeh,DuCorey/bokeh,aavanian/bokeh,azjps/bokeh,justacec/bokeh,percyfal/bokeh,draperjames/bokeh,percyfal/bokeh,aiguofer/bokeh,clairetang6/bokeh,ptitjano/bokeh,phobson/bokeh,draperjames/bokeh,draperjames/bokeh,aavanian/bokeh,stonebig/bokeh,schoolie/bokeh,dennisobrien/bokeh,dennisobrien/bokeh,DuCorey/bokeh,percyfal/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,ptitjano/bokeh,bokeh/bokeh | tests/integration/conftest.py | tests/integration/conftest.py | from __future__ import absolute_import, print_function
import boto
import os
import pytest
from boto.s3.key import Key as S3Key
from boto.exception import NoAuthHandlerFound
from bokeh.io import output_file
from os.path import isfile, join
from .webserver import SimpleWebServer
from ..constants import s3, s3_bucket, build_id
def pytest_sessionfinish(session, exitstatus):
report_file = session.config.option.htmlpath
if report_file:
try_upload = os.environ.get("UPLOAD_PYTEST_HTML", "False") == "True"
report_ready = isfile(report_file)
if try_upload and report_ready:
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(s3_bucket)
upload = True
except NoAuthHandlerFound:
print("Upload was requested but could not connect to S3.")
upload = False
if upload is True:
with open(report_file, "r") as f:
html = f.read()
filename = join(build_id, "report.html")
key = S3Key(bucket, filename)
key.set_metadata("Content-Type", "text/html")
key.set_contents_from_string(html, policy="public-read")
print("\n%s Access report at: %s" % ("---", join(s3, filename)))
@pytest.fixture
def selenium(selenium):
# Give items a chance to load
selenium.implicitly_wait(10)
selenium.set_window_size(width=600, height=600)
return selenium
@pytest.fixture(scope='session', autouse=True)
def server(request):
server = SimpleWebServer()
server.start()
request.addfinalizer(server.stop)
return server
@pytest.fixture(scope='session')
def base_url(request, server):
return 'http://%s:%s' % (server.host, server.port)
@pytest.fixture
def output_file_url(request, base_url):
filename = request.function.__name__ + '.html'
file_obj = request.fspath.dirpath().join(filename)
file_path = file_obj.strpath
output_file(file_path, mode='inline')
def tearDown():
if file_obj.isfile():
file_obj.remove()
request.addfinalizer(tearDown)
return '%s/%s' % (base_url, file_path)
@pytest.fixture(scope="session")
def capabilities(capabilities):
capabilities["browserName"] = "firefox"
capabilities["tunnel-identifier"] = os.environ.get("TRAVIS_JOB_NUMBER")
return capabilities
| from __future__ import absolute_import, print_function
import boto
import os
import pytest
from boto.s3.key import Key as S3Key
from boto.exception import NoAuthHandlerFound
from bokeh.io import output_file
from os.path import isfile, join
from .webserver import SimpleWebServer
from ..constants import s3, s3_bucket, build_id
def pytest_sessionfinish(session, exitstatus):
report_file = session.config.option.htmlpath
try_upload = os.environ.get("UPLOAD_PYTEST_HTML", "False") == "True"
report_ready = isfile(report_file)
if try_upload and report_ready:
try:
conn = boto.connect_s3()
bucket = conn.get_bucket(s3_bucket)
upload = True
except NoAuthHandlerFound:
print("Upload was requested but could not connect to S3.")
upload = False
if upload is True:
with open(report_file, "r") as f:
html = f.read()
filename = join(build_id, "report.html")
key = S3Key(bucket, filename)
key.set_metadata("Content-Type", "text/html")
key.set_contents_from_string(html, policy="public-read")
print("\n%s Access report at: %s" % ("---", join(s3, filename)))
@pytest.fixture
def selenium(selenium):
# Give items a chance to load
selenium.implicitly_wait(10)
selenium.set_window_size(width=600, height=600)
return selenium
@pytest.fixture(scope='session', autouse=True)
def server(request):
server = SimpleWebServer()
server.start()
request.addfinalizer(server.stop)
return server
@pytest.fixture(scope='session')
def base_url(request, server):
return 'http://%s:%s' % (server.host, server.port)
@pytest.fixture
def output_file_url(request, base_url):
filename = request.function.__name__ + '.html'
file_obj = request.fspath.dirpath().join(filename)
file_path = file_obj.strpath
output_file(file_path, mode='inline')
def tearDown():
if file_obj.isfile():
file_obj.remove()
request.addfinalizer(tearDown)
return '%s/%s' % (base_url, file_path)
@pytest.fixture(scope="session")
def capabilities(capabilities):
capabilities["browserName"] = "firefox"
capabilities["tunnel-identifier"] = os.environ.get("TRAVIS_JOB_NUMBER")
return capabilities
| bsd-3-clause | Python |
effe769e1a3274291adb03238ef800d31d3468f5 | add creating message objects on process payment | v0y/django-fortumo | fortumo/views.py | fortumo/views.py | from django.conf import settings
from django.http import HttpResponse
from django.http.response import HttpResponseForbidden
from fortumo.models import Message
def payment_processor(request):
if (
settings.FORTUMO_ENABLE_IP_VALIDATION and
not request.META['REMOTE_ADDR'] in settings.FORTUMO_IPS
):
return HttpResponseForbidden('403')
# TODO: check signature
Message.objects.create(
message=request.GET['message'],
sender=request.GET['sender'],
country=request.GET['country'],
price=request.GET['price'],
price_wo_vat=request.GET['price_wo_vat'],
currency=request.GET['currency'],
service_id=request.GET['service_id'],
message_id=request.GET['message_id'],
keyword=request.GET['keyword'],
shortcode=request.GET['shortcode'],
operator=request.GET['operator'],
billing_type=request.GET['billing_type'],
status=request.GET['status'],
test=request.GET['test'],
sig=request.GET['sig'],
)
return HttpResponse('dummy')
| from django.conf import settings
from django.http import HttpResponse
from django.http.response import HttpResponseForbidden
def payment_processor(request):
if (
settings.FORTUMO_ENABLE_IP_VALIDATION and
not request.META['REMOTE_ADDR'] in settings.FORTUMO_IPS
):
return HttpResponseForbidden('403')
return HttpResponse('dummy')
| mit | Python |
2f23cfd28aa1a010cbccf27299831a895fd71ecf | Validate interface ipv4 address format #42 | openwisp/netconfig-gen,openwisp/netconfig-gen | tests/openwrt/test_formats.py | tests/openwrt/test_formats.py | import unittest
from netjsonconfig import OpenWrt
from netjsonconfig.exceptions import ValidationError
from netjsonconfig.utils import _TabsMixin
class TestFormats(unittest.TestCase, _TabsMixin):
maxDiff = None
def test_general_hostname(self):
o = OpenWrt({"general": {"hostname": "invalid hostname"}})
with self.assertRaises(ValidationError):
o.validate()
o.config['general']['hostname'] = 'valid'
o.validate()
def test_interface_ipv4(self):
o = OpenWrt({
"interfaces": [
{
"name": "eth0",
"type": "ethernet",
"addresses": [
{
"family": "ipv4",
"proto": "static",
"address": "10.0.0.1",
"mask": 28
}
]
}
]
})
o.validate()
# invalid ipv4
o.config['interfaces'][0]['addresses'][0]['address'] = '127_0_0_1'
with self.assertRaises(ValidationError):
o.validate()
| import unittest
from netjsonconfig import OpenWrt
from netjsonconfig.exceptions import ValidationError
from netjsonconfig.utils import _TabsMixin
class TestFormats(unittest.TestCase, _TabsMixin):
maxDiff = None
def test_general_hostname(self):
o = OpenWrt({"general": {"hostname": "invalid hostname"}})
with self.assertRaises(ValidationError):
o.validate()
o.config['general']['hostname'] = 'valid'
o.validate()
| mit | Python |
c81b07f93253acc49cbc5028ec83e5334fb47ed9 | Add default type formatters for Enum | jschneier/flask-admin,jschneier/flask-admin,jschneier/flask-admin,jmagnusson/flask-admin,likaiguo/flask-admin,quokkaproject/flask-admin,flask-admin/flask-admin,lifei/flask-admin,likaiguo/flask-admin,ArtemSerga/flask-admin,iurisilvio/flask-admin,flask-admin/flask-admin,flask-admin/flask-admin,jschneier/flask-admin,jmagnusson/flask-admin,betterlife/flask-admin,closeio/flask-admin,closeio/flask-admin,lifei/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,lifei/flask-admin,quokkaproject/flask-admin,lifei/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,ArtemSerga/flask-admin,closeio/flask-admin,ArtemSerga/flask-admin,likaiguo/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,flask-admin/flask-admin,ArtemSerga/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,rochacbruno/flask-admin,iurisilvio/flask-admin,betterlife/flask-admin,rochacbruno/flask-admin | flask_admin/model/typefmt.py | flask_admin/model/typefmt.py | from jinja2 import Markup
from flask_admin._compat import text_type
try:
from enum import Enum
except ImportError:
Enum = None
def null_formatter(view, value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(view, value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(view, value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
glyph = 'ok-circle' if value else 'minus-sign'
fa = 'check-circle' if value else 'minus-circle'
return Markup('<span class="fa fa-%s glyphicon glyphicon-%s icon-%s"></span>' % (fa, glyph, glyph))
def list_formatter(view, values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(text_type(v) for v in values)
def enum_formatter(view, value):
"""
Return the name of the enumerated member.
:param value:
Value to check
"""
return value.name
BASE_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
EXPORT_FORMATTERS = {
type(None): empty_formatter,
list: list_formatter,
}
if Enum is not None:
BASE_FORMATTERS[Enum] = enum_formatter
EXPORT_FORMATTERS[Enum] = enum_formatter
| from jinja2 import Markup
from flask_admin._compat import text_type
def null_formatter(view, value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(view, value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(view, value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
glyph = 'ok-circle' if value else 'minus-sign'
fa = 'check-circle' if value else 'minus-circle'
return Markup('<span class="fa fa-%s glyphicon glyphicon-%s icon-%s"></span>' % (fa, glyph, glyph))
def list_formatter(view, values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(text_type(v) for v in values)
BASE_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
EXPORT_FORMATTERS = {
type(None): empty_formatter,
list: list_formatter,
}
| bsd-3-clause | Python |
9af25d1ee342f6d8e9205912bb66a99595e767f8 | Add python dll. | mrlitong/fpsgame,mrlitong/fpsgame,mrlitong/Game-Engine-Development-Usage,mrlitong/fpsgame | fpsgame/tests.py | fpsgame/tests.py | from ctypes import *
import sys
import os
import xml.etree.ElementTree as ET
binaries = '../../../binaries'
# Work out the platform-dependent library filename
dll_filename = {
'posix': './libCollada_dbg.so',
'nt': 'Collada_dbg.dll',
}[os.name]
# The DLL may need other DLLs which are in its directory, so set the path to that
# (Don't care about clobbering the old PATH - it doesn't have anything important)
os.environ['PATH'] = '%s/system/' % binaries | from ctypes import *
import sys
import os
import xml.etree.ElementTree as ET
binaries = '../../../binaries'
# Work out the platform-dependent library filename
dll_filename = {
'posix': './libCollada_dbg.so',
'nt': 'Collada_dbg.dll',
}[os.name] | mit | Python |
87f606e4a03f5afdaaa004a173588a754be4a444 | fix import | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | packages/syft/src/syft/core/node/common/node_manager/setup_manager.py | packages/syft/src/syft/core/node/common/node_manager/setup_manager.py | # stdlib
from typing import Any
from typing import List
# third party
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
# relative
from ..node_table.setup import SetupConfig
# from ..exceptions import SetupNotFoundError
from .database_manager import DatabaseManager
class SetupManager(DatabaseManager):
schema = SetupConfig
def __init__(self, database: Engine) -> None:
super().__init__(db=database, schema=SetupManager.schema)
@property
def node_name(self) -> str:
setup = super().all()[0]
return setup.domain_name
@property
def id(self) -> int:
setup = super().all()[0]
return setup.id
def first(self, **kwargs: Any) -> SetupConfig:
result = super().first(**kwargs)
if not result:
# raise SetupNotFoundError
raise Exception
return result
def query(self, **kwargs: Any) -> List[SetupConfig]:
results = super().query(**kwargs)
if len(results) == 0:
# raise SetupNotFoundError
raise Exception
return results
def update(self, **kwargs: Any) -> None:
session_local = sessionmaker(autocommit=False, autoflush=False, bind=self.db)()
session_local.query(self._schema).first().update(**kwargs)
session_local.commit()
session_local.close()
| # stdlib
from typing import Any
from typing import List
# third party
from sqlalchemy.engine import Engine
# relative
from ..node_table.setup import SetupConfig
# from ..exceptions import SetupNotFoundError
from .database_manager import DatabaseManager
class SetupManager(DatabaseManager):
schema = SetupConfig
def __init__(self, database: Engine) -> None:
super().__init__(db=database, schema=SetupManager.schema)
@property
def node_name(self) -> str:
setup = super().all()[0]
return setup.domain_name
@property
def id(self) -> int:
setup = super().all()[0]
return setup.id
def first(self, **kwargs: Any) -> SetupConfig:
result = super().first(**kwargs)
if not result:
# raise SetupNotFoundError
raise Exception
return result
def query(self, **kwargs: Any) -> List[SetupConfig]:
results = super().query(**kwargs)
if len(results) == 0:
# raise SetupNotFoundError
raise Exception
return results
def update(self, **kwargs: Any) -> None:
session_local = sessionmaker(autocommit=False, autoflush=False, bind=self.db)()
session_local.query(self._schema).first().update(**kwargs)
session_local.commit()
session_local.close()
| apache-2.0 | Python |
d537dd609f5aaabc7abcabf1ab0dcdb4540c2bd9 | refactor exception printing | toonst/RIOT,smlng/RIOT,Josar/RIOT,roberthartung/RIOT,rfuentess/RIOT,kaspar030/RIOT,gebart/RIOT,toonst/RIOT,cladmi/RIOT,mtausig/RIOT,mfrey/RIOT,kbumsik/RIOT,biboc/RIOT,rfuentess/RIOT,miri64/RIOT,A-Paul/RIOT,authmillenon/RIOT,mfrey/RIOT,rfuentess/RIOT,adrianghc/RIOT,neiljay/RIOT,x3ro/RIOT,kYc0o/RIOT,neiljay/RIOT,immesys/RiSyn,immesys/RiSyn,yogo1212/RIOT,jasonatran/RIOT,immesys/RiSyn,BytesGalore/RIOT,roberthartung/RIOT,miri64/RIOT,roberthartung/RIOT,LudwigKnuepfer/RIOT,authmillenon/RIOT,yogo1212/RIOT,LudwigKnuepfer/RIOT,josephnoir/RIOT,kYc0o/RIOT,jasonatran/RIOT,ant9000/RIOT,kaspar030/RIOT,gebart/RIOT,jasonatran/RIOT,toonst/RIOT,LudwigKnuepfer/RIOT,roberthartung/RIOT,immesys/RiSyn,kbumsik/RIOT,kbumsik/RIOT,Josar/RIOT,A-Paul/RIOT,miri64/RIOT,cladmi/RIOT,josephnoir/RIOT,Josar/RIOT,kaspar030/RIOT,avmelnikoff/RIOT,cladmi/RIOT,OTAkeys/RIOT,jasonatran/RIOT,kbumsik/RIOT,RIOT-OS/RIOT,ks156/RIOT,BytesGalore/RIOT,avmelnikoff/RIOT,ks156/RIOT,avmelnikoff/RIOT,RIOT-OS/RIOT,LudwigOrtmann/RIOT,aeneby/RIOT,LudwigOrtmann/RIOT,RIOT-OS/RIOT,yogo1212/RIOT,mfrey/RIOT,miri64/RIOT,cladmi/RIOT,ant9000/RIOT,smlng/RIOT,OlegHahm/RIOT,kYc0o/RIOT,lazytech-org/RIOT,basilfx/RIOT,josephnoir/RIOT,adrianghc/RIOT,yogo1212/RIOT,jasonatran/RIOT,ant9000/RIOT,OTAkeys/RIOT,BytesGalore/RIOT,avmelnikoff/RIOT,OTAkeys/RIOT,OTAkeys/RIOT,BytesGalore/RIOT,LudwigKnuepfer/RIOT,Josar/RIOT,LudwigKnuepfer/RIOT,mtausig/RIOT,OlegHahm/RIOT,immesys/RiSyn,basilfx/RIOT,OlegHahm/RIOT,smlng/RIOT,LudwigOrtmann/RIOT,josephnoir/RIOT,neiljay/RIOT,ant9000/RIOT,x3ro/RIOT,kYc0o/RIOT,miri64/RIOT,A-Paul/RIOT,mfrey/RIOT,aeneby/RIOT,mtausig/RIOT,yogo1212/RIOT,lazytech-org/RIOT,lazytech-org/RIOT,biboc/RIOT,adrianghc/RIOT,ks156/RIOT,ant9000/RIOT,OlegHahm/RIOT,kaspar030/RIOT,biboc/RIOT,basilfx/RIOT,Josar/RIOT,yogo1212/RIOT,mfrey/RIOT,mtausig/RIOT,authmillenon/RIOT,ks156/RIOT,adrianghc/RIOT,x3ro/RIOT,authmillenon/RIOT,immesys/RiSyn,x3ro/RIOT,RIOT-OS/RIOT,LudwigOrtmann/RIOT,gebart/RIOT,kaspar030/RIOT,kYc0o/RIOT,rfuentess/RIOT,avmelnikoff/RIOT,A-Paul/RIOT,rfuentess/RIOT,cladmi/RIOT,LudwigOrtmann/RIOT,OTAkeys/RIOT,neiljay/RIOT,aeneby/RIOT,toonst/RIOT,RIOT-OS/RIOT,smlng/RIOT,roberthartung/RIOT,biboc/RIOT,basilfx/RIOT,ks156/RIOT,gebart/RIOT,basilfx/RIOT,aeneby/RIOT,lazytech-org/RIOT,x3ro/RIOT,adrianghc/RIOT,biboc/RIOT,gebart/RIOT,smlng/RIOT,authmillenon/RIOT,mtausig/RIOT,BytesGalore/RIOT,LudwigOrtmann/RIOT,josephnoir/RIOT,lazytech-org/RIOT,aeneby/RIOT,A-Paul/RIOT,neiljay/RIOT,OlegHahm/RIOT,kbumsik/RIOT,toonst/RIOT,authmillenon/RIOT | dist/tools/testrunner/testrunner.py | dist/tools/testrunner/testrunner.py | # Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
# 2014 Martine Lenders <mlenders@inf.fu-berlin.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import signal
import sys
import subprocess
import time
from traceback import extract_tb, print_tb
import pexpect
PEXPECT_PATH = os.path.dirname(pexpect.__file__)
RIOTBASE = os.environ['RIOTBASE'] or \
os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
def list_until(l, cond):
return l[:([i for i, e in enumerate(l) if cond(e)][0])]
def find_exc_origin(exc_info):
pos = list_until(extract_tb(exc_info),
lambda frame: frame.filename.startswith(PEXPECT_PATH)
)[-1]
return pos.line, \
os.path.relpath(os.path.abspath(pos.filename), RIOTBASE), \
pos.lineno
def run(testfunc, timeout=10, echo=True, traceback=False):
env = os.environ.copy()
child = pexpect.spawnu("make term", env=env, timeout=timeout)
# on many platforms, the termprog needs a short while to be ready...
time.sleep(3)
if echo:
child.logfile = sys.stdout
try:
subprocess.check_output(('make', 'reset'), env=env,
stderr=subprocess.PIPE)
except subprocess.CalledProcessError:
# make reset yields error on some boards even if successful
pass
try:
testfunc(child)
except pexpect.TIMEOUT:
line, filename, lineno = find_exc_origin(sys.exc_info()[2])
print("Timeout in expect script at \"%s\" (%s:%d)" %
(line, filename, lineno))
if traceback:
print_tb(sys.exc_info()[2])
return 1
finally:
print("")
os.killpg(os.getpgid(child.pid), signal.SIGKILL)
child.close()
return 0
| # Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
# 2014 Martine Lenders <mlenders@inf.fu-berlin.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import signal
import sys
import subprocess
import time
from traceback import extract_tb, print_tb
import pexpect
PEXPECT_PATH = os.path.dirname(pexpect.__file__)
RIOTBASE = os.environ['RIOTBASE'] or \
os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
def list_until(l, cond):
return l[:([i for i, e in enumerate(l) if cond(e)][0])]
def run(testfunc, timeout=10, echo=True, traceback=False):
env = os.environ.copy()
child = pexpect.spawnu("make term", env=env, timeout=timeout)
# on many platforms, the termprog needs a short while to be ready...
time.sleep(3)
if echo:
child.logfile = sys.stdout
try:
subprocess.check_output(('make', 'reset'), env=env,
stderr=subprocess.PIPE)
except subprocess.CalledProcessError:
# make reset yields error on some boards even if successful
pass
try:
testfunc(child)
except pexpect.TIMEOUT:
timeouted_at = list_until(extract_tb(sys.exc_info()[2]),
lambda frame:
frame.filename.startswith(PEXPECT_PATH))[-1]
print("Timeout in expect script at \"%s\" (%s:%d)" %
(timeouted_at.line,
os.path.relpath(os.path.abspath(timeouted_at.filename), RIOTBASE),
timeouted_at.lineno))
if traceback:
print_tb(sys.exc_info()[2])
return 1
finally:
print("")
os.killpg(os.getpgid(child.pid), signal.SIGKILL)
child.close()
return 0
| lgpl-2.1 | Python |
553cc84a62654df9f7edd4512449144f8874db3d | Remove comment, no longer applicable | amolenaar/gaphor,amolenaar/gaphor | tests/test_undo.py | tests/test_undo.py | from gaphor.tests import TestCase
from gaphor import UML
from gaphor.diagram import items
from gaphor.core import transactional
class UndoTest(TestCase):
services = TestCase.services + ["undo_manager"]
def test_class_association_undo_redo(self):
factory = self.element_factory
undo_manager = self.get_service("undo_manager")
self.assertEqual(0, len(self.diagram.canvas.solver.constraints))
ci1 = self.create(items.ClassItem, UML.Class)
self.assertEqual(2, len(self.diagram.canvas.solver.constraints))
ci2 = self.create(items.ClassItem, UML.Class)
self.assertEqual(4, len(self.diagram.canvas.solver.constraints))
a = self.create(items.AssociationItem)
self.connect(a, a.head, ci1)
self.connect(a, a.tail, ci2)
# Diagram, Association, 2x Class, Property, LiteralSpecification
self.assertEqual(6, len(factory.lselect()))
self.assertEqual(6, len(self.diagram.canvas.solver.constraints))
@transactional
def delete_class():
ci2.unlink()
undo_manager.clear_undo_stack()
self.assertFalse(undo_manager.can_undo())
delete_class()
self.assertTrue(undo_manager.can_undo())
self.assertEqual(ci1, self.get_connected(a.head))
self.assertEqual(None, self.get_connected(a.tail))
for i in range(3):
# Diagram, Class
# self.assertEqual(2, len(factory.lselect()), factory.lselect())
self.assertEqual(3, len(self.diagram.canvas.solver.constraints))
undo_manager.undo_transaction()
self.assertEqual(6, len(self.diagram.canvas.solver.constraints))
self.assertEqual(ci1, self.get_connected(a.head))
self.assertEqual(ci2, self.get_connected(a.tail))
undo_manager.redo_transaction()
| from gaphor.tests import TestCase
from gaphor import UML
from gaphor.diagram import items
from gaphor.core import transactional
class UndoTest(TestCase):
services = TestCase.services + ["undo_manager"]
def test_class_association_undo_redo(self):
factory = self.element_factory
undo_manager = self.get_service("undo_manager")
self.assertEqual(0, len(self.diagram.canvas.solver.constraints))
ci1 = self.create(items.ClassItem, UML.Class)
self.assertEqual(2, len(self.diagram.canvas.solver.constraints))
ci2 = self.create(items.ClassItem, UML.Class)
self.assertEqual(4, len(self.diagram.canvas.solver.constraints))
a = self.create(items.AssociationItem)
self.connect(a, a.head, ci1)
self.connect(a, a.tail, ci2)
# Diagram, Association, 2x Class, Property, LiteralSpecification
self.assertEqual(6, len(factory.lselect()))
self.assertEqual(6, len(self.diagram.canvas.solver.constraints))
@transactional
def delete_class():
ci2.unlink()
undo_manager.clear_undo_stack()
self.assertFalse(undo_manager.can_undo())
delete_class()
# FYI: crashes here, why?
self.assertTrue(undo_manager.can_undo())
self.assertEqual(ci1, self.get_connected(a.head))
self.assertEqual(None, self.get_connected(a.tail))
for i in range(3):
# Diagram, Class
# self.assertEqual(2, len(factory.lselect()), factory.lselect())
self.assertEqual(3, len(self.diagram.canvas.solver.constraints))
undo_manager.undo_transaction()
self.assertEqual(6, len(self.diagram.canvas.solver.constraints))
self.assertEqual(ci1, self.get_connected(a.head))
self.assertEqual(ci2, self.get_connected(a.tail))
undo_manager.redo_transaction()
| lgpl-2.1 | Python |
0c5cc8afaaceb97db30f302c97b80ec9de0979cc | Remove unused imports from tests. | Onapsis/ageofempyres | tests/test_unit.py | tests/test_unit.py | import pytest
from onagame2015.lib import Coordinate
from onagame2015.units import AttackUnit
VALID_MOVES = (
Coordinate(1, 0), Coordinate(-1, 0),
Coordinate(0, 1), Coordinate(0, -1),
)
INVALID_MOVES_FROM_00 = (Coordinate(-1, 0), Coordinate(0, -1))
INVALID_INPUTS = ('UP', 2334, 0.343, {'up', -1})
@pytest.mark.parametrize('invalid_input', INVALID_INPUTS)
def test_attack_unit_move_invalid_input(random_arena, invalid_input):
initial_coordinate = Coordinate(0, 0)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
result = attack_unit.move(invalid_input)
assert result.get('error') and 'invalid' in result.get('error')
assert attack_unit.coordinate == initial_coordinate
@pytest.mark.parametrize('invalid_move', INVALID_MOVES_FROM_00 + (999999, 123))
def test_attack_unit_move_out_of_arena(random_arena, invalid_move):
initial_coordinate = Coordinate(0, 0)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
result = attack_unit.move((99999, 99999))
assert result.get('error') and 'invalid' in result.get('error')
assert attack_unit.coordinate == initial_coordinate
@pytest.mark.parametrize('valid_move', VALID_MOVES)
def test_attack_unit_move(random_arena, valid_move):
initial_coordinate = Coordinate(1, 1)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
expected = Coordinate(
initial_coordinate.latitude + valid_move.latitude,
initial_coordinate.longitude + valid_move.longitude)
result = attack_unit.move(valid_move)
assert not result.get('error')
assert attack_unit.coordinate != initial_coordinate
assert attack_unit.coordinate == expected
assert result['from'] == initial_coordinate
assert result['to'] == expected
def test_attack_unit_cant_move_if_occupied(random_arena):
initial_coordinate = Coordinate(1, 1)
initial_enemy_coordinate = Coordinate(1, 2)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
enemy_unit = AttackUnit(initial_enemy_coordinate, 2, random_arena)
random_arena.set_content_on_tile(initial_coordinate, attack_unit)
random_arena.set_content_on_tile(initial_enemy_coordinate, enemy_unit)
result = attack_unit.move(Coordinate(0, 1))
assert result['error']
assert result['from'] == initial_coordinate
assert result['to'] == initial_coordinate
| from random import randint
import pytest
from onagame2015.lib import Coordinate
from onagame2015.units import AttackUnit
from onagame2015.arena import ArenaGrid, TileContainer
VALID_MOVES = (Coordinate(1, 0), Coordinate(-1, 0),
Coordinate(0, 1), Coordinate(0, -1),
)
INVALID_MOVES_FROM_00 = (Coordinate(-1, 0), Coordinate(0, -1))
INVALID_INPUTS = ('UP', 2334, 0.343, {'up', -1})
@pytest.mark.parametrize('invalid_input', INVALID_INPUTS)
def test_attack_unit_move_invalid_input(random_arena, invalid_input):
initial_coordinate = Coordinate(0, 0)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
result = attack_unit.move(invalid_input)
assert result.get('error') and 'invalid' in result.get('error')
assert attack_unit.coordinate == initial_coordinate
@pytest.mark.parametrize('invalid_move', INVALID_MOVES_FROM_00 + (999999, 123))
def test_attack_unit_move_out_of_arena(random_arena, invalid_move):
initial_coordinate = Coordinate(0, 0)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
result = attack_unit.move((99999, 99999))
assert result.get('error') and 'invalid' in result.get('error')
assert attack_unit.coordinate == initial_coordinate
@pytest.mark.parametrize('valid_move', VALID_MOVES)
def test_attack_unit_move(random_arena, valid_move):
initial_coordinate = Coordinate(1, 1)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
expected = Coordinate(initial_coordinate.latitude + valid_move.latitude, initial_coordinate.longitude + valid_move.longitude)
result = attack_unit.move(valid_move)
assert not result.get('error')
assert attack_unit.coordinate != initial_coordinate
assert attack_unit.coordinate == expected
assert result['from'] == initial_coordinate
assert result['to'] == expected
def test_attack_unit_cant_move_if_occupied(random_arena):
initial_coordinate = Coordinate(1, 1)
initial_enemy_coordinate = Coordinate(1, 2)
attack_unit = AttackUnit(initial_coordinate, 1, random_arena)
enemy_unit = AttackUnit(initial_enemy_coordinate, 2, random_arena)
random_arena.set_content_on_tile(initial_coordinate, attack_unit)
random_arena.set_content_on_tile(initial_enemy_coordinate, enemy_unit)
result = attack_unit.move(Coordinate(0, 1))
assert result['error']
assert result['from'] == initial_coordinate
assert result['to'] == initial_coordinate
| mit | Python |
1a724e8f655ab1b3e4a8aeb9991a6ef0391b19d9 | test for compare_suffixes | Chris7/cutadapt,marcelm/cutadapt | tests/testalign.py | tests/testalign.py | from __future__ import print_function, division, absolute_import
from cutadapt.align import (locate, compare_prefixes, compare_suffixes,
ALLOW_WILDCARD_SEQ1, ALLOW_WILDCARD_SEQ1)
from cutadapt.adapters import BACK
def test_polya():
s = 'AAAAAAAAAAAAAAAAA'
t = 'ACAGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
result = locate(s, t, 0.0, BACK)
#start_s, stop_s, start_t, stop_t, matches, cost = result
assert result == (0, len(s), 4, 4 + len(s), len(s), 0)
def test_compare_prefixes():
assert compare_prefixes('AAXAA', 'AAAAATTTTTTTTT') == (0, 5, 0, 5, 4, 1)
assert compare_prefixes('AANAA', 'AACAATTTTTTTTT', ALLOW_WILDCARD_SEQ1) == (0, 5, 0, 5, 5, 0)
assert compare_prefixes('AANAA', 'AACAATTTTTTTTT', ALLOW_WILDCARD_SEQ1) == (0, 5, 0, 5, 5, 0)
assert compare_prefixes('XAAAAA', 'AAAAATTTTTTTTT') == (0, 6, 0, 6, 4, 2)
def test_compare_suffixes():
assert compare_suffixes('AAXAA', 'TTTTTTTAAAAA') == (0, 5, 7, 12, 4, 1)
assert compare_suffixes('AANAA', 'TTTTTTTAACAA', ALLOW_WILDCARD_SEQ1) == (0, 5, 7, 12, 5, 0)
assert compare_suffixes('AANAA', 'TTTTTTTAACAA', ALLOW_WILDCARD_SEQ1) == (0, 5, 7, 12, 5, 0)
assert compare_suffixes('AAAAAX', 'TTTTTTTAAAAA') == (0, 6, 6, 12, 4, 2)
| from __future__ import print_function, division, absolute_import
from cutadapt.align import (locate, compare_prefixes,
ALLOW_WILDCARD_SEQ1, ALLOW_WILDCARD_SEQ1)
from cutadapt.adapters import BACK
def test_polya():
s = 'AAAAAAAAAAAAAAAAA'
t = 'ACAGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA'
result = locate(s, t, 0.0, BACK)
#start_s, stop_s, start_t, stop_t, matches, cost = result
assert result == (0, len(s), 4, 4 + len(s), len(s), 0)
def test_compare_prefixes():
assert compare_prefixes('AAXAA', 'AAAAATTTTTTTTT') == (0, 5, 0, 5, 4, 1)
assert compare_prefixes('AANAA', 'AACAATTTTTTTTT', ALLOW_WILDCARD_SEQ1) == (0, 5, 0, 5, 5, 0)
assert compare_prefixes('AANAA', 'AACAATTTTTTTTT', ALLOW_WILDCARD_SEQ1) == (0, 5, 0, 5, 5, 0)
assert compare_prefixes('XAAAAA', 'AAAAATTTTTTTTT') == (0, 6, 0, 6, 4, 2)
| mit | Python |
9015414ed9e2a3b294214d083467ff7946d667c5 | Fix wrong exception name | credativUK/vdirsyncer,untitaker/vdirsyncer,tribut/vdirsyncer,untitaker/vdirsyncer,hobarrera/vdirsyncer,credativUK/vdirsyncer,mathstuf/vdirsyncer,hobarrera/vdirsyncer,tribut/vdirsyncer,untitaker/vdirsyncer,mathstuf/vdirsyncer | tests/storage/dav/conftest.py | tests/storage/dav/conftest.py | # -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.dav.conftest
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
import os
import pytest
import requests
import requests.exceptions
import time
dav_server = os.environ.get('DAV_SERVER', '').strip() or 'radicale_filesystem'
php_sh = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../../../owncloud-testserver/php.sh'
))
def wait():
for i in range(10):
try:
requests.get('http://127.0.0.1:8080/')
except requests.exceptions.ConnectionError:
time.sleep(1)
else:
return True
return False
if dav_server == 'owncloud':
@pytest.fixture(autouse=True)
def start_owncloud_server(xprocess):
def preparefunc(cwd):
return wait, ['sh', php_sh]
xprocess.ensure('owncloud_server', preparefunc)
| # -*- coding: utf-8 -*-
'''
vdirsyncer.tests.storage.dav.conftest
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2014 Markus Unterwaditzer
:license: MIT, see LICENSE for more details.
'''
import os
import pytest
import requests
import requests.exceptions
import time
dav_server = os.environ.get('DAV_SERVER', '').strip() or 'radicale_filesystem'
php_sh = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../../../owncloud-testserver/php.sh'
))
def wait():
for i in range(10):
try:
requests.get('http://127.0.0.1:8080/')
except requests.exceptions.HTTPException:
time.sleep(1)
else:
return True
return False
if dav_server == 'owncloud':
@pytest.fixture(autouse=True)
def start_owncloud_server(xprocess):
def preparefunc(cwd):
return wait, ['sh', php_sh]
xprocess.ensure('owncloud_server', preparefunc)
| mit | Python |
e2ae32aa7ee16eb2362ad54675ef0c8319a2fca2 | Update Homework_Week3_CaseStudy2.py | LamaHamadeh/Harvard-PH526x | Week3-Case-Studies-Part1/Language-Processing/Homework_Week3_CaseStudy2.py | Week3-Case-Studies-Part1/Language-Processing/Homework_Week3_CaseStudy2.py | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 06 13:34:51 2017
@author: ADB3HAMADL
"""
'''
==============================
Case Study 2 -
==============================
'''
#In this case study, we will find and plot the distribution of word frequencies for each translation of Hamlet.
#Perhaps the distribution of word frequencies of Hamlet depends on the translation --- let's find out!
#For these exercises, functions count_words_fast, read_book, and word_stats are already defined as in the Case 2 Videos (Videos 3.2.x).
#----------------------------------------------------------------------------------------------------------------
#Define functions
#------------------
from collections import Counter
def count_words_fast(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = Counter(text.split(" "))
return word_counts
#------------------
def read_book(title_path):
"""Read a book and return it as a string"""
with open(title_path, "r") as current_file:
text = current_file.read()
text = text.replace("\n","").replace("\r","")
return text
#------------------
def word_stats(word_counts):
"""return the number of unique words and word frequencies"""
num_unique = len(word_counts) #calculate the number of unique words in the text
counts = word_counts.values() #calculate the frequency of each word in the text
return(num_unique,counts)
#----------------------------------------------------------------------------------------------------------------
# Exercise 1
#-----------
#TODO: Write a function word_count_distribution(text) that takes a book string and returns a dictionary with items
#corresponding to the count of times a collection of words appears in the translation, and values corresponding to
#the number of number of words that appear with that frequency.
#TODO: First use count_words_fast(text) to create a dictionary called word_counts with unique words in the dictionary
#as keys and their frequency in the book as values.
#TODO: Next, create and return a new dictionary count_distribution with unique values from word_counts as keys and their
#frequency as values. For example, 'you are what you eat' contains three words that occur once and one word that occurs twice,
#so word_count_distribution('you are what you eat') should return a dictionary {1:3, 2:1}.
#TODO: 'Romeo and Juliet' is preloaded as text. Call word_count_distribution(text), and save the result as distribution.
#------------------------------------------------------------------------------
# Exercise 2
#-----------
#TODO:
#------------------------------------------------------------------------------
| # -*- coding: utf-8 -*-
"""
Created on Mon Mar 06 13:34:51 2017
@author: ADB3HAMADL
"""
'''
==============================
Case Study 2 -
==============================
'''
#In this case study, we will find and plot the distribution of word frequencies for each translation of Hamlet.
#Perhaps the distribution of word frequencies of Hamlet depends on the translation --- let's find out!
#For these exercises, functions count_words_fast, read_book, and word_stats are already defined as in the Case 2 Videos (Videos 3.2.x).
#----------------------------------------------------------------------------------------------------------------
#Define functions
#------------------
from collections import Counter
def count_words_fast(text):
"""count the number of times each word occurs in text (str).
Return dictionary where keys are unique words and values are
word counts. skip punctuations"""
text = text.lower() #lowercase for the counting letters so the function can cont the same words whether it's capatilised or not
skips = [".", ",", ";", ":", "'", '"'] #skipping all the punctuations to not be counted with the words that come bfore them
for ch in skips:
text = text.replace(ch,"")
word_counts = Counter(text.split(" "))
return word_counts
#------------------
def read_book(title_path):
"""Read a book and return it as a string"""
with open(title_path, "r") as current_file:
text = current_file.read()
text = text.replace("\n","").replace("\r","")
return text
#------------------
def word_stats(word_counts):
"""return the number of unique words and word frequencies"""
num_unique = len(word_counts) #calculate the number of unique words in the text
counts = word_counts.values() #calculate the frequency of each word in the text
return(num_unique,counts)
#----------------------------------------------------------------------------------------------------------------
# Exercise 1
#-----------
#TODO:
#------------------------------------------------------------------------------
# Exercise 2
#-----------
#TODO:
#------------------------------------------------------------------------------
| mit | Python |
0360715caa7358e2d069e11b08e00fe70ba25129 | test command not found case | FunTimeCoding/python-utility,FunTimeCoding/python-utility | tests/test_command_process.py | tests/test_command_process.py | import pytest
from python_utility.command_process import CommandProcess, CommandFailed
def test_command_process(capfd) -> None:
process = CommandProcess(arguments=['echo', 'hello'])
assert process.get_return_code() == 0
assert process.get_standard_output() == 'hello'
assert process.get_standard_error() == ''
process.print_output()
standard_output, standard_error = capfd.readouterr()
assert standard_output == 'hello\n'
assert standard_error == ''
def test_command_fails_with_output() -> None:
with pytest.raises(CommandFailed) as exception:
CommandProcess(arguments=['tests/fixture/fails-with-output.sh'])
assert 'test stdout' in str(exception.value)
assert exception.value.get_command() == 'tests/fixture/fails-with-output.sh'
assert exception.value.get_return_code() == 1
assert exception.value.get_standard_output() == 'test stdout'
assert exception.value.get_standard_error() == 'test stderr'
def test_command_fails_without_output() -> None:
with pytest.raises(CommandFailed) as exception:
CommandProcess(arguments=['tests/fixture/fails-without-output.sh'])
assert 'CommandFailed' in str(exception.value)
assert exception.value.get_command() == \
'tests/fixture/fails-without-output.sh'
assert exception.value.get_return_code() == 1
assert exception.value.get_standard_output() == ''
assert exception.value.get_standard_error() == ''
def test_command_not_found() -> None:
with pytest.raises(CommandFailed) as exception:
CommandProcess(arguments=['does-not-exist'])
assert 'File not found: does-not-exist' in str(exception.value)
assert exception.value.get_command() == 'does-not-exist'
assert exception.value.get_return_code() == -1
assert exception.value.get_standard_output() == \
'File not found: does-not-exist'
assert exception.value.get_standard_error() == \
'No such file or directory: \'does-not-exist\''
| import pytest
from python_utility.command_process import CommandProcess, CommandFailed
def test_command_process(capfd) -> None:
process = CommandProcess(arguments=['echo', 'hello'])
assert process.get_return_code() == 0
assert process.get_standard_output() == 'hello'
assert process.get_standard_error() == ''
process.print_output()
standard_output, standard_error = capfd.readouterr()
assert standard_output == 'hello\n'
assert standard_error == ''
def test_command_fails_with_output() -> None:
with pytest.raises(CommandFailed) as exception:
CommandProcess(arguments=['tests/fixture/fails-with-output.sh'])
assert 'test stdout' in str(exception.value)
assert exception.value.get_command() == 'tests/fixture/fails-with-output.sh'
assert exception.value.get_return_code() == 1
assert exception.value.get_standard_output() == 'test stdout'
assert exception.value.get_standard_error() == 'test stderr'
def test_command_fails_without_output() -> None:
with pytest.raises(CommandFailed) as exception:
CommandProcess(arguments=['tests/fixture/fails-without-output.sh'])
assert 'CommandFailed' in str(exception.value)
assert exception.value.get_command() == \
'tests/fixture/fails-without-output.sh'
assert exception.value.get_return_code() == 1
assert exception.value.get_standard_output() == ''
assert exception.value.get_standard_error() == ''
| mit | Python |
0fa1f144e63ee74e31af985b9115ac098e662b45 | add curly brace | Caleydo/caleydo_server,phovea/phovea_server,phovea/phovea_server,phovea/phovea_server,phovea/phovea_server,Caleydo/caleydo_server | tests/test_custom_encoders.py | tests/test_custom_encoders.py |
from phovea_server.util import to_json
class TestCustomEncoders:
def test_nan_values(self):
# single variable
test_var = float('nan')
# simple list
test_list_simple = [13, 5, 7, 12, test_var, 22]
# simple dictionary
test_dict = {'first': [4, 6, 2, test_var], 'second': 3, 'third': [test_var, 3, 78, 6, 3, 2]}
# list that contains dictionary
test_list_nested = [13, 5, 7, 12, test_dict, 22]
# convert with to_json
test_result_simple = to_json(dict(myNum=test_var))
test_result_list_simple = to_json(dict(myNum=test_list_simple))
test_result_list_nested = to_json(dict(myNum=test_list_nested))
# make assertions
assert test_result_simple == '{"myNum": null}'
assert test_result_list_simple == '{"myNum": [13, 5, 7, 12, null, 22]'
assert test_result_list_nested == '{"myNum": [13, 5, 7, 12, "{"first": [4, 6, 2, test_var], "second": 3, "third": [test_var, 3, 78, 6, 3, 2]}", 22]}'
|
from phovea_server.util import to_json
class TestCustomEncoders:
def test_nan_values(self):
# single variable
test_var = float('nan')
# simple list
test_list_simple = [13, 5, 7, 12, test_var, 22]
# simple dictionary
test_dict = {'first': [4, 6, 2, test_var], 'second': 3, 'third': [test_var, 3, 78, 6, 3, 2]}
# list that contains dictionary
test_list_nested = [13, 5, 7, 12, test_dict, 22]
# convert with to_json
test_result_simple = to_json(dict(myNum=test_var))
test_result_list_simple = to_json(dict(myNum=test_list_simple))
test_result_list_nested = to_json(dict(myNum=test_list_nested))
# make assertions
assert test_result_simple == '{"myNum": null}'
assert test_result_list_simple == '{"myNum": [13, 5, 7, 12, null, 22]'
assert test_result_list_nested == '{"myNum": [13, 5, 7, 12, "{"first": [4, 6, 2, test_var], "second": 3, "third": [test_var, 3, 78, 6, 3, 2]}", 22]'
| bsd-3-clause | Python |
718803a7f0de83738043f58987a264cccabfa935 | Update __version__.py | avehtari/GPy,dhhjx880713/GPy,SheffieldML/GPy,avehtari/GPy,mikecroucher/GPy,dhhjx880713/GPy,esiivola/GPYgradients,befelix/GPy,dhhjx880713/GPy,befelix/GPy,befelix/GPy,SheffieldML/GPy,avehtari/GPy,esiivola/GPYgradients,mikecroucher/GPy,dhhjx880713/GPy,SheffieldML/GPy,ysekky/GPy,SheffieldML/GPy,ysekky/GPy,avehtari/GPy,esiivola/GPYgradients,ysekky/GPy,mikecroucher/GPy,befelix/GPy,mikecroucher/GPy,esiivola/GPYgradients,ysekky/GPy | GPy/__version__.py | GPy/__version__.py | __version__ = "1.0.6"
| __version__ = "1.0.5"
| bsd-3-clause | Python |
642472b6b19e95640553ffb82a31cec16b07f0ae | Add support for nested Tooltips inside TooltipNodes | jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow | game/__init__.py | game/__init__.py | # -*- coding: utf-8 -*-
"""
Game module
Contains model logic for the game
"""
# Colors
BLUE = 0x0080ff
CYAN = 0x66bbff
DARKCYAN = 0x88aaff
GOLD = 0xe5cc80
GREEN = 0x1eff00
GREY = 0x9d9d9d
ORANGE = 0xff8000
PURPLE = 0xb048f8
RED = 0xff2020
YELLOW = 0xffd100
WHITE = 0xffffff
class Model(object):
"""
Base Model class for all the game models:
Items, Spells, Quests, Talents, ...
"""
@classmethod
def initProxy(cls, proxy):
cls.proxy = proxy(cls)
def __init__(self, id):
if not hasattr(self, "proxy"):
raise RuntimeError("%s.proxy needs to be initialized with initProxy(proxy)" % (self.__class__.__name__))
self.id = id
self.obj = self.proxy.get(id)
#if not self.obj:
#self = None
def __getattr__(self, attr):
if attr != "obj" and hasattr(self.obj, attr):
return getattr(self.obj, attr)
if attr != "proxy" and hasattr(self.proxy, attr):
func = getattr(self.proxy, attr)
return lambda: func(self.obj)
return super(Model, self).__getattribute__(attr)
def __repr__(self):
if hasattr(self, "name"):
return "<%s #%i: %s>" % (self.__class__.__name__, self.id, self.name)
return "<%s #%i>" % (self.__class__.__name__, self.id)
class Tooltip(object):
LEFT = 0
RIGHT = 1
def __init__(self, obj):
self.obj = obj
self.keys = []
self.values = []
def append(self, name, text, color=WHITE, side=LEFT):
if text:
self.keys.append(name)
self.values.append(TooltipNode(name, text, color, side))
def formatAppend(self, name, text, value, color=WHITE):
if value:
self.append(name, text % (value), color)
def render(self, renderer):
return renderer(self.tooltip())
class TooltipNode(object):
def __init__(self, name, content, color, side):
self.name = name
if isinstance(content, Tooltip):
self.tooltip = content
else:
self.text = content
self.color = color
self.side = side
def __repr__(self):
return repr(self.getText())
def __str__(self):
return self.getText()
def getColor(self):
if self.isTooltip():
return 0
return self.color
def getText(self):
if self.isTooltip():
return ""
return str(self.text)
def isTooltip(self):
return hasattr(self, "tooltip")
| # -*- coding: utf-8 -*-
"""
Game module
Contains model logic for the game
"""
# Colors
BLUE = 0x0080ff
CYAN = 0x66bbff
DARKCYAN = 0x88aaff
GOLD = 0xe5cc80
GREEN = 0x1eff00
GREY = 0x9d9d9d
ORANGE = 0xff8000
PURPLE = 0xb048f8
RED = 0xff2020
YELLOW = 0xffd100
WHITE = 0xffffff
class Model(object):
"""
Base Model class for all the game models:
Items, Spells, Quests, Talents, ...
"""
@classmethod
def initProxy(cls, proxy):
cls.proxy = proxy(cls)
def __init__(self, id):
if not hasattr(self, "proxy"):
raise RuntimeError("%s.proxy needs to be initialized with initProxy(proxy)" % (self.__class__.__name__))
self.id = id
self.obj = self.proxy.get(id)
#if not self.obj:
#self = None
def __getattr__(self, attr):
if attr != "obj" and hasattr(self.obj, attr):
return getattr(self.obj, attr)
if attr != "proxy" and hasattr(self.proxy, attr):
func = getattr(self.proxy, attr)
return lambda: func(self.obj)
return super(Model, self).__getattribute__(attr)
def __repr__(self):
if hasattr(self, "name"):
return "<%s #%i: %s>" % (self.__class__.__name__, self.id, self.name)
return "<%s #%i>" % (self.__class__.__name__, self.id)
class Tooltip(object):
LEFT = 0
RIGHT = 1
def __init__(self, obj):
self.obj = obj
self.keys = []
self.values = []
def append(self, name, text, color=WHITE, side=LEFT):
if text:
self.keys.append(name)
self.values.append(TooltipNode(name, text, color, side))
def formatAppend(self, name, text, value, color=WHITE):
if value:
self.append(name, text % (value), color)
def render(self, renderer):
return renderer(self.tooltip())
class TooltipNode(object):
def __init__(self, name, text, color, side):
self.name = name
self.text = text
self.color = color
self.side = side
def __repr__(self):
return repr(self.text)
def __str__(self):
return str(self.text)
def getColor(self):
return self.color
def getText(self):
return str(self.text)
| cc0-1.0 | Python |
c1fa88016da8365290fa62965f592930ae61c033 | Set __version__ to 2.2.0. | hyperspy/start_jupyter_cm | start_jupyter_cm/__init__.py | start_jupyter_cm/__init__.py | __version__ = "2.2.0"
| __version__ = "2.2.dev"
| bsd-3-clause | Python |
39109678414be9b89c4bcc36c53497b5fe197583 | Add beta.herocomics.kr to ALLOWED_HOSTS in hydrocarbon.settings.production | devunt/hydrocarbon,devunt/hydrocarbon,devunt/hydrocarbon | hydrocarbon/settings/production.py | hydrocarbon/settings/production.py | import os
from hydrocarbon.settings.base import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '***REMOVED***'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
# ALLOWED HOSTS
ALLOWED_HOSTS = ['herocomics.kr', 'beta.herocomics.kr']
# Cache backend
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': '10.54.45.1:11211',
}
}
# Session backend
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'herocomics',
'USER': 'herocomics',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '5432',
'ATOMIC_REQUESTS': True,
}
}
# Template loaders
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = '/home/herocomics/static'
STATIC_URL = 'http://s.herocomics.kr/'
# Media files
MEDIA_ROOT = '/home/herocomics/media'
MEDIA_URL = 'http://uc.herocomics.kr/'
| import os
from hydrocarbon.settings.base import *
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '***REMOVED***'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
# ALLOWED HOSTS
ALLOWED_HOSTS = ['herocomics.kr']
# Cache backend
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': '10.54.45.1:11211',
}
}
# Session backend
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'herocomics',
'USER': 'herocomics',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '5432',
'ATOMIC_REQUESTS': True,
}
}
# Template loaders
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = '/home/herocomics/static'
STATIC_URL = 'http://s.herocomics.kr/'
# Media files
MEDIA_ROOT = '/home/herocomics/media'
MEDIA_URL = 'http://uc.herocomics.kr/'
| mit | Python |
8a124fc6d7db89e0a385262fcf02e3b421b33db9 | Add date_published field to Photo model | DZwell/django-imager | imagersite/imager_images/models.py | imagersite/imager_images/models.py | from django.db import models
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
PUBLISHED_CHOICES = (('private', 'private'), ('public', 'public'), ('shared', 'shared'))
PUBLISHED_DEFAULT = PUBLISHED_CHOICES[0][1]
@python_2_unicode_compatible
class Photo(models.Model):
"""Photo class."""
image = models.ImageField(upload_to='media')
title = models.CharField(max_length=250)
description = models.TextField()
uploaded = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
date_published = models.DateTimeField()
album = models.ManyToManyField('Album', related_name='album')
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='photos'
)
published = models.CharField(
max_length=10,
choices=PUBLISHED_CHOICES
)
def __str__(self):
"""Return title."""
return self.title
@python_2_unicode_compatible
class Album(models.Model):
"""Album class."""
photos = models.ManyToManyField('Photo', related_name='photos')
owned_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='albums')
title = models.CharField(max_length=255)
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
# cover_photo = models.ForeignKey('Photo', related_name='cover', blank=True)
published = models.CharField(
max_length=10,
choices=PUBLISHED_CHOICES,
default=PUBLISHED_DEFAULT
)
def __str__(self):
"""Return title."""
return self.title
| from django.db import models
from django.conf import settings
from django.utils.encoding import python_2_unicode_compatible
PUBLISHED_CHOICES = (('private', 'private'), ('public', 'public'), ('shared', 'shared'))
PUBLISHED_DEFAULT = PUBLISHED_CHOICES[0][1]
@python_2_unicode_compatible
class Photo(models.Model):
"""Photo class."""
image = models.ImageField(upload_to='media')
title = models.CharField(max_length=250)
description = models.TextField()
uploaded = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
album = models.ManyToManyField('Album', related_name='album')
owner = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='photos'
)
published = models.CharField(
max_length=10,
choices=PUBLISHED_CHOICES
)
def __str__(self):
"""Return title."""
return self.title
@python_2_unicode_compatible
class Album(models.Model):
"""Album class."""
photos = models.ManyToManyField('Photo', related_name='photos')
owned_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='albums')
title = models.CharField(max_length=255)
description = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
# cover_photo = models.ForeignKey('Photo', related_name='cover', blank=True)
published = models.CharField(
max_length=10,
choices=PUBLISHED_CHOICES,
default=PUBLISHED_DEFAULT
)
def __str__(self):
"""Return title."""
return self.title
| mit | Python |
281b7ccad3b649d03dbf394c89f69b772d9048d8 | Exit with result | adamtheturtle/vws-python,adamtheturtle/vws-python | ci/run_script.py | ci/run_script.py | """
Run tests and linters on Travis CI.
"""
import os
import subprocess
import sys
from pathlib import Path
import pytest
def run_test(test_filename: str) -> None:
"""
Run pytest with a given filename.
"""
path = Path('tests') / 'mock_vws' / test_filename
result = pytest.main([
'-vvv',
'--exitfirst',
str(path),
'--cov=src',
'--cov=tests',
])
sys.exit(result)
if __name__ == '__main__':
TEST_FILENAME = os.environ.get('TEST_FILENAME')
if TEST_FILENAME:
run_test(test_filename=TEST_FILENAME)
else:
subprocess.check_call(['make', 'lint'])
| """
Run tests and linters on Travis CI.
"""
import os
import subprocess
from pathlib import Path
import pytest
def run_test(test_filename: str) -> None:
"""
Run pytest with a given filename.
"""
path = Path('tests') / 'mock_vws' / test_filename
pytest.main([
'--exitfirst',
str(path),
'--cov=src',
'--cov=tests',
])
if __name__ == '__main__':
TEST_FILENAME = os.environ.get('TEST_FILENAME')
if TEST_FILENAME:
run_test(test_filename=TEST_FILENAME)
else:
subprocess.check_call(['make', 'lint'])
| mit | Python |
874c4ab2eff39fa14cbce56609cb7e08b4fda815 | Rewrite interface to accept input and provide output on a local unix socket instead of through stdin/stdout. | matslindh/4096 | 4096/interface.py | 4096/interface.py | import engine, sys, uuid, random, subprocess, socket, os
if len(sys.argv) < 3:
sys.stderr.write("Usage: interface.py <randomseed> <executable>\n")
sys.exit()
# set up seed from arguments
random.seed(sys.argv[1])
# helpers for writing and reading to the socket connection
def write(conn, str):
conn.send(str.encode("utf-8"))
def read(conn):
return conn.recv(1024).decode("utf-8").strip()
# create local unix socket for communication with child
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
identifier = str(uuid.uuid4())
s_path = "/tmp/4096-" + identifier
s.bind(s_path)
# launch child
process = subprocess.Popen([sys.argv[2], s_path])
s.listen(1)
conn, addr = s.accept()
# set up engine and game meta information
game = engine.Engine()
move_count = 0
game_name = read(conn)
sys.stderr.write("Game: " + game_name + "\n")
sys.stderr.write("Identifier: " + identifier + "\n")
# give client board and process input until finished
write(conn, game.to_string())
while True:
c = read(conn)
if c == 'u':
game.up()
elif c == 'd':
game.down()
elif c == 'l':
game.left()
elif c == 'r':
game.right()
write(conn, game.to_string())
move_count += 1
if game.is_board_locked():
write(conn, "FIN " + str(game.score) + "\n")
break
# give score
sys.stderr.write("Score: " + str(game.score) + "\n")
sys.stderr.write("Moves: " + str(move_count) + "\n")
# clean up
process.terminate()
os.remove(s_path) | import engine, sys, uuid, random
if len(sys.argv) < 2:
sys.stderr.write("Usage: interface.py <randomseed>\n")
sys.exit()
random.seed(sys.argv[1])
game = engine.Engine()
move_count = 0
game_name = sys.stdin.readline().strip()
identifier = str(uuid.uuid4())
sys.stderr.write("Game: " + game_name + "\n")
sys.stderr.write("Identifier: " + identifier + "\n")
game.print_board()
while True:
c = sys.stdin.readline().strip()
if c == 'u':
game.up()
elif c == 'd':
game.down()
elif c == 'l':
game.left()
elif c == 'r':
game.right()
game.print_board()
move_count += 1
if game.is_board_locked():
break
sys.stderr.write("Score: " + str(game.score) + "\n")
sys.stderr.write("Moves: " + str(move_count) + "\n") | mit | Python |
a2fd2436cb1c0285dfdd18fad43e505d7c246535 | Handle spotify: -type urls Cleanup | rnyberg/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,nigeljonez/newpyfibot,EArmour/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,rnyberg/pyfibot,aapa/pyfibot,aapa/pyfibot | modules/module_spotify.py | modules/module_spotify.py | import re
import urllib
def do_spotify(bot, user, channel, dataurl):
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
def handle_privmsg(bot, user, reply, msg):
"""Grab Spotify URLs from the messages and handle them"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", msg)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
do_spotify(bot, user, reply, dataurl)
|
import re
import urllib
def handle_url(bot, user, channel, url, msg):
"""Handle IMDB urls"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
| bsd-3-clause | Python |
1ba11bb266684c26c0559651592751730bba97b5 | Update appvalidator/constants.py | mozilla/app-validator,diox/app-validator,mstriemer/app-validator,mozilla/app-validator,eviljeff/app-validator,mstriemer/app-validator,mstriemer/app-validator,mattbasta/perfalator,mozilla/app-validator,diox/app-validator,stasm/app-validator,diox/app-validator,stasm/app-validator,mozilla/app-validator,diox/app-validator,mattbasta/perfalator,eviljeff/app-validator,stasm/app-validator,eviljeff/app-validator,stasm/app-validator,mattbasta/perfalator,eviljeff/app-validator | appvalidator/constants.py | appvalidator/constants.py | "Constants that will be used across files."
import json
import os
# Package type constants.
PACKAGE_ANY = 0
PACKAGE_WEBAPP = 8
PACKAGE_PACKAGED_WEBAPP = 9
SPIDERMONKEY_INSTALLATION = os.environ.get("SPIDERMONKEY_INSTALLATION")
DEFAULT_WEBAPP_MRKT_URLS = ["https://marketplace.firefox.com",
"https://marketplace-dev.allizom.org"]
BUGZILLA_BUG = "https://bugzilla.mozilla.org/show_bug.cgi?id=%d"
DEFAULT_TIMEOUT = 60
MAX_RESOURCE_SIZE = 2 * 1024 * 1024
# Graciously provided by @kumar in bug 614574
if (not SPIDERMONKEY_INSTALLATION or
not os.path.exists(SPIDERMONKEY_INSTALLATION)):
for p in os.environ.get("PATH", "").split(":"):
SPIDERMONKEY_INSTALLATION = os.path.join(p, "js")
if os.path.exists(SPIDERMONKEY_INSTALLATION):
break
if not os.path.exists(SPIDERMONKEY_INSTALLATION):
SPIDERMONKEY_INSTALLATION = "/usr/bin/js"
# The fallback is simply to disable JS tests.
if (not os.path.exists(SPIDERMONKEY_INSTALLATION) or
os.environ.get("TRAVIS", "") == "true"):
SPIDERMONKEY_INSTALLATION = None
try:
from constants_local import *
except ImportError:
pass
| "Constants that will be used across files."
import json
import os
# Package type constants.
PACKAGE_ANY = 0
PACKAGE_WEBAPP = 8
PACKAGE_PACKAGED_WEBAPP = 9
SPIDERMONKEY_INSTALLATION = os.environ.get("SPIDERMONKEY_INSTALLATION")
DEFAULT_WEBAPP_MRKT_URLS = ["https://marketplace.mozilla.org",
"https://marketplace-dev.allizom.org"]
BUGZILLA_BUG = "https://bugzilla.mozilla.org/show_bug.cgi?id=%d"
DEFAULT_TIMEOUT = 60
MAX_RESOURCE_SIZE = 2 * 1024 * 1024
# Graciously provided by @kumar in bug 614574
if (not SPIDERMONKEY_INSTALLATION or
not os.path.exists(SPIDERMONKEY_INSTALLATION)):
for p in os.environ.get("PATH", "").split(":"):
SPIDERMONKEY_INSTALLATION = os.path.join(p, "js")
if os.path.exists(SPIDERMONKEY_INSTALLATION):
break
if not os.path.exists(SPIDERMONKEY_INSTALLATION):
SPIDERMONKEY_INSTALLATION = "/usr/bin/js"
# The fallback is simply to disable JS tests.
if (not os.path.exists(SPIDERMONKEY_INSTALLATION) or
os.environ.get("TRAVIS", "") == "true"):
SPIDERMONKEY_INSTALLATION = None
try:
from constants_local import *
except ImportError:
pass
| bsd-3-clause | Python |
912bb4195136764345e24bcb01eb6b0c94176362 | Support hidden_sizes=[] | toslunar/chainerrl,toslunar/chainerrl | links/mlp_bn.py | links/mlp_bn.py | from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import super
from builtins import range
from future import standard_library
standard_library.install_aliases()
import random
import numpy as np
import chainer
from chainer import functions as F
from chainer import links as L
from chainer import cuda
from q_output import DiscreteQOutput
from q_output import ContinuousQOutput
from functions.lower_triangular_matrix import lower_triangular_matrix
class LinearBN(chainer.Chain):
"""Linear layer with BatchNormalization."""
def __init__(self, in_size, out_size):
linear = L.Linear(in_size, out_size)
bn = L.BatchNormalization(out_size)
bn.avg_var[:] = 1
super().__init__(linear=linear, bn=bn)
def __call__(self, x, test=False):
return self.bn(self.linear(x), test=test)
class MLPBN(chainer.Chain):
"""Multi-Layer Perceptron with BatchNormalization."""
def __init__(self, in_size, out_size, hidden_sizes, normalize_input=True):
self.in_size = in_size
self.out_size = out_size
self.hidden_sizes = hidden_sizes
self.normalize_input = normalize_input
layers = {}
if normalize_input:
layers['input_bn'] = L.BatchNormalization(in_size)
layers['input_bn'].avg_var[:] = 1
if hidden_sizes:
hidden_layers = []
hidden_layers.append(LinearBN(in_size, hidden_sizes[0]))
for hin, hout in zip(hidden_sizes, hidden_sizes[1:]):
hidden_layers.append(LinearBN(hin, hout))
layers['hidden_layers'] = chainer.ChainList(*hidden_layers)
layers['output'] = L.Linear(hidden_sizes[-1], out_size)
else:
layers['output'] = L.Linear(in_size, out_size)
super().__init__(**layers)
def __call__(self, x, test=False):
h = x
assert test or x.shape[0] > 1
if self.normalize_input:
h = self.input_bn(h, test=test)
if self.hidden_sizes:
for l in self.hidden_layers:
h = F.relu(l(h, test=test))
return self.output(h)
| from __future__ import division
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from builtins import super
from builtins import range
from future import standard_library
standard_library.install_aliases()
import random
import numpy as np
import chainer
from chainer import functions as F
from chainer import links as L
from chainer import cuda
from q_output import DiscreteQOutput
from q_output import ContinuousQOutput
from functions.lower_triangular_matrix import lower_triangular_matrix
class LinearBN(chainer.Chain):
"""Linear layer with BatchNormalization."""
def __init__(self, in_size, out_size):
linear = L.Linear(in_size, out_size)
bn = L.BatchNormalization(out_size)
bn.avg_var[:] = 1
super().__init__(linear=linear, bn=bn)
def __call__(self, x, test=False):
return self.bn(self.linear(x), test=test)
class MLPBN(chainer.Chain):
"""Multi-Layer Perceptron with BatchNormalization."""
def __init__(self, in_size, out_size, hidden_sizes, normalize_input=True):
self.in_size = in_size
self.out_size = out_size
self.hidden_sizes = hidden_sizes
self.normalize_input = normalize_input
layers = {}
if normalize_input:
layers['input_bn'] = L.BatchNormalization(in_size)
layers['input_bn'].avg_var[:] = 1
if hidden_sizes:
hidden_layers = []
hidden_layers.append(LinearBN(in_size, hidden_sizes[0]))
for hin, hout in zip(hidden_sizes, hidden_sizes[1:]):
hidden_layers.append(LinearBN(hin, hout))
layers['hidden_layers'] = chainer.ChainList(*hidden_layers)
layers['output'] = L.Linear(hidden_sizes[-1], out_size)
else:
layers['output'] = L.Linear(in_size, out_size)
super().__init__(**layers)
def __call__(self, x, test=False):
h = x
assert test or x.shape[0] > 1
if self.normalize_input:
h = self.input_bn(h, test=test)
for l in self.hidden_layers:
h = F.relu(l(h, test=test))
return self.output(h)
| mit | Python |
af97016a0af3807ee6cb3d4db464d637bbe01de3 | Use utils.chainstruct in core | tkf/fillplots,tkf/fillplots | ineqfill/core.py | ineqfill/core.py | from .utils.chainstruct import Struct
class Config(Struct):
# Should be renamed to "Resource?"
def __init__(self, *args, **kwds):
# FIXME: write arguments explicitly
self.line_args = {}
self.fill_args = {}
self.num_direction_arrows = 5
self.direction_arrows_size = 0.03
super(Config, self).__init__(*args, **kwds)
@property
def ax(self):
from matplotlib import pyplot
return pyplot.gca() # FIXME
def set_lim(self):
self.ax.set_xlim(*self.xlim)
self.ax.set_ylim(*self.ylim)
class Configurable(object):
def __init__(self, baseconfig):
self.config = Config(baseconfig)
| class BaseConfig(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
@property
def ax(self):
from matplotlib import pyplot
return pyplot.gca() # FIXME
def set_lim(self):
self.ax.set_xlim(*self.xlim)
self.ax.set_ylim(*self.ylim)
class Config(BaseConfig):
# Should be renamed to "Resource?"
def __init__(self, **kwds):
# FIXME: write arguments explicitly
self.line_args = {}
self.fill_args = {}
self.num_direction_arrows = 5
self.direction_arrows_size = 0.03
super(Config, self).__init__(**kwds)
class ModifiedConfig(BaseConfig):
def __init__(self, base, **kwds):
self._base = base
"""
Like ``.prototype`` in Javascript.
"""
super(ModifiedConfig, self).__init__(**kwds)
def __getattr__(self, name):
return getattr(self._base, name)
class Configurable(object):
def __init__(self, baseconfig):
self.config = ModifiedConfig(baseconfig)
| bsd-2-clause | Python |
025c6e93e62da5338d651ba37ab942a93f62e635 | Update wsgi.py | fabianvf/scrapi,felliott/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi | api/api/wsgi.py | api/api/wsgi.py | """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
application = get_wsgi_application()
| """
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings")
application = get_wsgi_application()
| apache-2.0 | Python |
bbf5f6ffd9d7bd17e23586efdb339bd08ab60285 | Update settings.py | deccico/gowest,deccico/gowest | gowestapp/gowest/settings.py | gowestapp/gowest/settings.py | """
Django settings for gowest project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'your_secret'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
TEMPLATE_DIRS = (
BASE_DIR + os.sep + 'templates',
)
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'go',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gowest.urls'
WSGI_APPLICATION = 'gowest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
'/var/www/static/',
)
| """
Django settings for gowest project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ugkja!^57&39cp&h6hxi3g^7*dur&lma-f3b=y20+l&$ca_1!='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
TEMPLATE_DIRS = (
BASE_DIR + os.sep + 'templates',
)
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'go',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gowest.urls'
WSGI_APPLICATION = 'gowest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
'/var/www/static/',
) | mit | Python |
8bc25d6b050ba035b4d8bda7f5ad1f07a0c06a5c | Fix bad import | little-dude/monolithe,nuagenetworks/monolithe,little-dude/monolithe,nuagenetworks/monolithe,nuagenetworks/monolithe,little-dude/monolithe | monolithe/lib/__init__.py | monolithe/lib/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__all__ = ["Printer", "SDKUtils", "TaskManager", "apply_extension", "load_language_plugins"]
from .printer import Printer
from .sdkutils import SDKUtils
from .taskmanager import TaskManager
from .utils import apply_extension, load_language_plugins
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__all__ = ["Printer", "SDKLoader", "SDKUtils", "TaskManager", "apply_extension", "load_language_plugins"]
from .printer import Printer
from .sdkloader import SDKLoader
from .sdkutils import SDKUtils
from .taskmanager import TaskManager
from .utils import apply_extension, load_language_plugins
| bsd-3-clause | Python |
99fba41b7392b1e5e4216145f1e8913698b60914 | Remove Python 2 compatibility code | hechtus/mopidy-gmusic,mopidy/mopidy-gmusic | mopidy_gmusic/commands.py | mopidy_gmusic/commands.py | import gmusicapi
from mopidy import commands
from oauth2client.client import OAuth2WebServerFlow
class GMusicCommand(commands.Command):
def __init__(self):
super().__init__()
self.add_child("login", LoginCommand())
class LoginCommand(commands.Command):
def run(self, args, config):
oauth_info = gmusicapi.Mobileclient._session_class.oauth
flow = OAuth2WebServerFlow(**oauth_info._asdict())
print()
print(
"Go to the following URL to get an initial auth code, "
"then provide it below:"
)
print(flow.step1_get_authorize_url())
print()
initial_code = input("code: ")
credentials = flow.step2_exchange(initial_code)
refresh_token = credentials.refresh_token
print("\nPlease update your config to include the following:")
print()
print("[gmusic]")
print("refresh_token =", refresh_token)
print()
| import gmusicapi
from mopidy import commands
from oauth2client.client import OAuth2WebServerFlow
class GMusicCommand(commands.Command):
def __init__(self):
super().__init__()
self.add_child("login", LoginCommand())
class LoginCommand(commands.Command):
def run(self, args, config):
oauth_info = gmusicapi.Mobileclient._session_class.oauth
flow = OAuth2WebServerFlow(**oauth_info._asdict())
print()
print(
"Go to the following URL to get an initial auth code, then "
+ "provide it below: "
+ flow.step1_get_authorize_url()
)
print()
try:
initial_code = raw_input("code: ")
except NameError:
# Python 3
initial_code = input("code: ")
credentials = flow.step2_exchange(initial_code)
refresh_token = credentials.refresh_token
print("\nPlease update your config to include the following:")
print()
print("[gmusic]")
print("refresh_token =", refresh_token)
print()
| apache-2.0 | Python |
87916c801168743ed5a675c1161462b9deadea6e | Remove fixed TODO | kingosticks/mopidy-spotify,mopidy/mopidy-spotify,jodal/mopidy-spotify | mopidy_spotify/backend.py | mopidy_spotify/backend.py | from __future__ import unicode_literals
import logging
import os
import threading
from mopidy import backend
import pykka
import spotify
logger = logging.getLogger(__name__)
class SpotifyBackend(pykka.ThreadingActor, backend.Backend):
_logged_in = threading.Event()
_logged_out = threading.Event()
_logged_out.set()
def __init__(self, config, audio):
super(SpotifyBackend, self).__init__()
self._config = config
self._audio = audio
spotify_config = spotify.Config()
spotify_config.load_application_key_file(
os.path.join(os.path.dirname(__file__), 'spotify_appkey.key'))
spotify_config.cache_location = self._config['spotify']['cache_dir']
spotify_config.settings_location = (
self._config['spotify']['settings_dir'])
self._session = spotify.Session(spotify_config)
self._event_loop = spotify.EventLoop(self._session)
self.library = None
self.playback = None
self.playlists = None
self.uri_schemes = ['spotify']
def on_start(self):
self._session.on(
spotify.SessionEvent.CONNECTION_STATE_UPDATED,
SpotifyBackend.on_connection_state_changed)
self._event_loop.start()
self._session.login(
self._config['spotify']['username'],
self._config['spotify']['password'])
def on_stop(self):
logger.debug('Logging out of Spotify')
self._session.logout()
self._logged_out.wait()
self._event_loop.stop()
@classmethod
def on_connection_state_changed(cls, session):
if session.connection.state is spotify.ConnectionState.LOGGED_IN:
logger.info('Connected to Spotify')
cls._logged_in.set()
cls._logged_out.clear()
elif session.connection.state is spotify.ConnectionState.LOGGED_OUT:
logger.debug('Logged out of Spotify')
cls._logged_in.clear()
cls._logged_out.set()
| from __future__ import unicode_literals
import logging
import os
import threading
from mopidy import backend
import pykka
import spotify
logger = logging.getLogger(__name__)
class SpotifyBackend(pykka.ThreadingActor, backend.Backend):
_logged_in = threading.Event()
_logged_out = threading.Event()
_logged_out.set()
def __init__(self, config, audio):
super(SpotifyBackend, self).__init__()
self._config = config
self._audio = audio
spotify_config = spotify.Config()
spotify_config.load_application_key_file(
os.path.join(os.path.dirname(__file__), 'spotify_appkey.key'))
spotify_config.cache_location = self._config['spotify']['cache_dir']
spotify_config.settings_location = (
self._config['spotify']['settings_dir'])
self._session = spotify.Session(spotify_config)
self._event_loop = spotify.EventLoop(self._session)
self.library = None
self.playback = None
self.playlists = None
self.uri_schemes = ['spotify']
def on_start(self):
self._session.on(
spotify.SessionEvent.CONNECTION_STATE_UPDATED,
SpotifyBackend.on_connection_state_changed)
self._event_loop.start()
self._session.login(
self._config['spotify']['username'],
self._config['spotify']['password'])
def on_stop(self):
# TODO Wait for the logout to complete
logger.debug('Logging out of Spotify')
self._session.logout()
self._logged_out.wait()
self._event_loop.stop()
@classmethod
def on_connection_state_changed(cls, session):
if session.connection.state is spotify.ConnectionState.LOGGED_IN:
logger.info('Connected to Spotify')
cls._logged_in.set()
cls._logged_out.clear()
elif session.connection.state is spotify.ConnectionState.LOGGED_OUT:
logger.debug('Logged out of Spotify')
cls._logged_in.clear()
cls._logged_out.set()
| apache-2.0 | Python |
7b3e4623da5341753d1150642c68b46200f79b79 | Drop leading underscore from UTC._ZERO | christophelec/github3.py,icio/github3.py,balloob/github3.py,wbrefvem/github3.py,krxsky/github3.py,jim-minter/github3.py,agamdua/github3.py,h4ck3rm1k3/github3.py,ueg1990/github3.py,sigmavirus24/github3.py,degustaf/github3.py,itsmemattchung/github3.py | github3/utils.py | github3/utils.py | # -*- coding: utf-8 -*-
from collections import Callable
from datetime import datetime, timedelta, tzinfo
from requests.compat import basestring
import re
# with thanks to https://code.google.com/p/jquery-localtime/issues/detail?id=4
ISO_8601 = re.compile("^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[0-1]|0"
"[1-9]|[1-2][0-9])(T(2[0-3]|[0-1][0-9]):([0-5][0-9]):([0"
"-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[0-1][0-9]):[0-5]["
"0-9])?)?$")
def timestamp_parameter(timestamp, allow_none=True):
if timestamp is None:
if allow_none:
return None
raise ValueError("Timestamp value cannot be None")
if isinstance(timestamp, datetime):
return timestamp.isoformat()
if isinstance(timestamp, basestring):
if not ISO_8601.match(timestamp):
raise ValueError(("Invalid timestamp: %s is not a valid ISO-8601"
" formatted date") % timestamp)
return timestamp
raise ValueError("Cannot accept type %s for timestamp" % type(timestamp))
class UTC(tzinfo):
"""Yet another UTC reimplementation, to avoid a dependency on pytz or
dateutil."""
ZERO = timedelta(0)
def __repr__(self):
return 'UTC()'
def dst(self, dt):
return self.ZERO
def tzname(self, dt):
return 'UTC'
def utcoffset(self, dt):
return self.ZERO
def stream_response_to_file(response, path=None):
pre_opened = False
fd = None
if path:
if isinstance(getattr(path, 'write', None), Callable):
pre_opened = True
fd = path
else:
fd = open(path, 'wb')
else:
header = response.headers['content-disposition']
i = header.find('filename=') + len('filename=')
fd = open(header[i:], 'wb')
for chunk in response.iter_content(chunk_size=512):
fd.write(chunk)
if not pre_opened:
fd.close()
| # -*- coding: utf-8 -*-
from collections import Callable
from datetime import datetime, timedelta, tzinfo
from requests.compat import basestring
import re
# with thanks to https://code.google.com/p/jquery-localtime/issues/detail?id=4
ISO_8601 = re.compile("^(-?(?:[1-9][0-9]*)?[0-9]{4})-(1[0-2]|0[1-9])-(3[0-1]|0"
"[1-9]|[1-2][0-9])(T(2[0-3]|[0-1][0-9]):([0-5][0-9]):([0"
"-5][0-9])(\.[0-9]+)?(Z|[+-](?:2[0-3]|[0-1][0-9]):[0-5]["
"0-9])?)?$")
def timestamp_parameter(timestamp, allow_none=True):
if timestamp is None:
if allow_none:
return None
raise ValueError("Timestamp value cannot be None")
if isinstance(timestamp, datetime):
return timestamp.isoformat()
if isinstance(timestamp, basestring):
if not ISO_8601.match(timestamp):
raise ValueError(("Invalid timestamp: %s is not a valid ISO-8601"
" formatted date") % timestamp)
return timestamp
raise ValueError("Cannot accept type %s for timestamp" % type(timestamp))
class UTC(tzinfo):
"""Yet another UTC reimplementation, to avoid a dependency on pytz or
dateutil."""
_ZERO = timedelta(0)
def __repr__(self):
return 'UTC()'
def dst(self, dt):
return self._ZERO
def tzname(self, dt):
return 'UTC'
def utcoffset(self, dt):
return self._ZERO
def stream_response_to_file(response, path=None):
pre_opened = False
fd = None
if path:
if isinstance(getattr(path, 'write', None), Callable):
pre_opened = True
fd = path
else:
fd = open(path, 'wb')
else:
header = response.headers['content-disposition']
i = header.find('filename=') + len('filename=')
fd = open(header[i:], 'wb')
for chunk in response.iter_content(chunk_size=512):
fd.write(chunk)
if not pre_opened:
fd.close()
| bsd-3-clause | Python |
08d3966122f3c7873faf720a660cac99ff0e1ba7 | Fix sparse.info docstring. | kalvdans/scipy,scipy/scipy,Newman101/scipy,rgommers/scipy,sriki18/scipy,trankmichael/scipy,endolith/scipy,matthew-brett/scipy,lhilt/scipy,giorgiop/scipy,ales-erjavec/scipy,maniteja123/scipy,trankmichael/scipy,vberaudi/scipy,FRidh/scipy,fernand/scipy,chatcannon/scipy,sriki18/scipy,aman-iitj/scipy,nmayorov/scipy,haudren/scipy,futurulus/scipy,mikebenfield/scipy,matthew-brett/scipy,fernand/scipy,vberaudi/scipy,mhogg/scipy,witcxc/scipy,dominicelse/scipy,chatcannon/scipy,gfyoung/scipy,mingwpy/scipy,aeklant/scipy,niknow/scipy,zaxliu/scipy,sargas/scipy,aarchiba/scipy,hainm/scipy,nvoron23/scipy,larsmans/scipy,Dapid/scipy,newemailjdm/scipy,lukauskas/scipy,raoulbq/scipy,aeklant/scipy,vhaasteren/scipy,vberaudi/scipy,ogrisel/scipy,Eric89GXL/scipy,pnedunuri/scipy,aman-iitj/scipy,surhudm/scipy,maciejkula/scipy,zaxliu/scipy,Shaswat27/scipy,gef756/scipy,felipebetancur/scipy,josephcslater/scipy,mikebenfield/scipy,pnedunuri/scipy,jseabold/scipy,ndchorley/scipy,endolith/scipy,hainm/scipy,jonycgn/scipy,maniteja123/scipy,niknow/scipy,anielsen001/scipy,e-q/scipy,Stefan-Endres/scipy,maciejkula/scipy,teoliphant/scipy,dch312/scipy,Gillu13/scipy,chatcannon/scipy,sauliusl/scipy,giorgiop/scipy,fredrikw/scipy,endolith/scipy,vanpact/scipy,ortylp/scipy,futurulus/scipy,nvoron23/scipy,felipebetancur/scipy,niknow/scipy,aman-iitj/scipy,endolith/scipy,jamestwebber/scipy,matthewalbani/scipy,Dapid/scipy,befelix/scipy,nvoron23/scipy,mhogg/scipy,fernand/scipy,ortylp/scipy,ChanderG/scipy,Stefan-Endres/scipy,tylerjereddy/scipy,andyfaff/scipy,woodscn/scipy,minhlongdo/scipy,zerothi/scipy,mdhaber/scipy,woodscn/scipy,juliantaylor/scipy,teoliphant/scipy,pbrod/scipy,richardotis/scipy,ortylp/scipy,apbard/scipy,jamestwebber/scipy,perimosocordiae/scipy,pizzathief/scipy,piyush0609/scipy,Srisai85/scipy,petebachant/scipy,minhlongdo/scipy,chatcannon/scipy,juliantaylor/scipy,Dapid/scipy,person142/scipy,pschella/scipy,woodscn/scipy,aman-iitj/scipy,kleskjr/scipy,ogrisel/scipy,ilayn/scipy,jsilter/scipy,gdooper/scipy,gef756/scipy,sriki18/scipy,sauliusl/scipy,jakevdp/scipy,giorgiop/scipy,Stefan-Endres/scipy,ndchorley/scipy,pbrod/scipy,lukauskas/scipy,Srisai85/scipy,argriffing/scipy,WarrenWeckesser/scipy,mdhaber/scipy,pnedunuri/scipy,endolith/scipy,grlee77/scipy,mikebenfield/scipy,Gillu13/scipy,lhilt/scipy,jseabold/scipy,cpaulik/scipy,jakevdp/scipy,gfyoung/scipy,gef756/scipy,mhogg/scipy,vberaudi/scipy,futurulus/scipy,pbrod/scipy,WillieMaddox/scipy,zxsted/scipy,ndchorley/scipy,FRidh/scipy,endolith/scipy,perimosocordiae/scipy,mtrbean/scipy,arokem/scipy,mgaitan/scipy,rgommers/scipy,sonnyhu/scipy,jseabold/scipy,ilayn/scipy,mhogg/scipy,jamestwebber/scipy,trankmichael/scipy,sauliusl/scipy,maniteja123/scipy,mingwpy/scipy,efiring/scipy,ilayn/scipy,surhudm/scipy,matthewalbani/scipy,newemailjdm/scipy,fernand/scipy,andim/scipy,WarrenWeckesser/scipy,e-q/scipy,person142/scipy,tylerjereddy/scipy,petebachant/scipy,surhudm/scipy,woodscn/scipy,anntzer/scipy,FRidh/scipy,chatcannon/scipy,kleskjr/scipy,mortada/scipy,richardotis/scipy,Stefan-Endres/scipy,gertingold/scipy,minhlongdo/scipy,vhaasteren/scipy,jseabold/scipy,kalvdans/scipy,bkendzior/scipy,nmayorov/scipy,bkendzior/scipy,jjhelmus/scipy,nvoron23/scipy,zaxliu/scipy,vberaudi/scipy,sauliusl/scipy,fredrikw/scipy,e-q/scipy,anielsen001/scipy,njwilson23/scipy,mtrbean/scipy,vanpact/scipy,FRidh/scipy,witcxc/scipy,zerothi/scipy,jor-/scipy,ilayn/scipy,anntzer/scipy,newemailjdm/scipy,Newman101/scipy,anntzer/scipy,nonhermitian/scipy,behzadnouri/scipy,cpaulik/scipy,jor-/scipy,mdhaber/scipy,chatcannon/scipy,Shaswat27/scipy,gfyoung/scipy,ndchorley/scipy,Stefan-Endres/scipy,mortonjt/scipy,sargas/scipy,mhogg/scipy,gfyoung/scipy,dch312/scipy,nmayorov/scipy,Eric89GXL/scipy,gef756/scipy,mingwpy/scipy,witcxc/scipy,zaxliu/scipy,minhlongdo/scipy,richardotis/scipy,pbrod/scipy,trankmichael/scipy,apbard/scipy,jakevdp/scipy,jonycgn/scipy,trankmichael/scipy,bkendzior/scipy,jonycgn/scipy,ChanderG/scipy,niknow/scipy,minhlongdo/scipy,woodscn/scipy,FRidh/scipy,jor-/scipy,Srisai85/scipy,Kamp9/scipy,befelix/scipy,mikebenfield/scipy,ogrisel/scipy,giorgiop/scipy,pizzathief/scipy,juliantaylor/scipy,Eric89GXL/scipy,vhaasteren/scipy,ogrisel/scipy,zerothi/scipy,andim/scipy,FRidh/scipy,sriki18/scipy,Shaswat27/scipy,ortylp/scipy,nmayorov/scipy,cpaulik/scipy,surhudm/scipy,mortonjt/scipy,efiring/scipy,rmcgibbo/scipy,gef756/scipy,jsilter/scipy,josephcslater/scipy,jsilter/scipy,mdhaber/scipy,sonnyhu/scipy,vanpact/scipy,zxsted/scipy,mortada/scipy,nonhermitian/scipy,larsmans/scipy,ilayn/scipy,Kamp9/scipy,apbard/scipy,e-q/scipy,anntzer/scipy,hainm/scipy,fredrikw/scipy,nonhermitian/scipy,mtrbean/scipy,matthew-brett/scipy,andim/scipy,perimosocordiae/scipy,Newman101/scipy,scipy/scipy,lukauskas/scipy,befelix/scipy,bkendzior/scipy,bkendzior/scipy,WillieMaddox/scipy,scipy/scipy,lukauskas/scipy,ales-erjavec/scipy,mortada/scipy,aeklant/scipy,andyfaff/scipy,Dapid/scipy,pyramania/scipy,gdooper/scipy,jonycgn/scipy,behzadnouri/scipy,mgaitan/scipy,befelix/scipy,petebachant/scipy,ales-erjavec/scipy,person142/scipy,Gillu13/scipy,giorgiop/scipy,vhaasteren/scipy,apbard/scipy,ales-erjavec/scipy,Srisai85/scipy,behzadnouri/scipy,arokem/scipy,matthew-brett/scipy,andim/scipy,zerothi/scipy,sargas/scipy,argriffing/scipy,lhilt/scipy,Stefan-Endres/scipy,lukauskas/scipy,jonycgn/scipy,newemailjdm/scipy,gdooper/scipy,cpaulik/scipy,aarchiba/scipy,arokem/scipy,scipy/scipy,anntzer/scipy,pyramania/scipy,person142/scipy,gef756/scipy,anntzer/scipy,pbrod/scipy,pizzathief/scipy,njwilson23/scipy,sauliusl/scipy,sriki18/scipy,Eric89GXL/scipy,haudren/scipy,pyramania/scipy,scipy/scipy,nonhermitian/scipy,vanpact/scipy,ogrisel/scipy,rgommers/scipy,jsilter/scipy,piyush0609/scipy,fredrikw/scipy,mortada/scipy,grlee77/scipy,sauliusl/scipy,andyfaff/scipy,vanpact/scipy,dch312/scipy,njwilson23/scipy,ales-erjavec/scipy,rgommers/scipy,dominicelse/scipy,efiring/scipy,pschella/scipy,mingwpy/scipy,gertingold/scipy,WillieMaddox/scipy,felipebetancur/scipy,mtrbean/scipy,Dapid/scipy,gertingold/scipy,argriffing/scipy,ndchorley/scipy,jjhelmus/scipy,teoliphant/scipy,Kamp9/scipy,newemailjdm/scipy,raoulbq/scipy,pyramania/scipy,zaxliu/scipy,kalvdans/scipy,pizzathief/scipy,mhogg/scipy,sriki18/scipy,larsmans/scipy,fernand/scipy,rmcgibbo/scipy,maniteja123/scipy,richardotis/scipy,gertingold/scipy,ilayn/scipy,kalvdans/scipy,tylerjereddy/scipy,Shaswat27/scipy,tylerjereddy/scipy,sonnyhu/scipy,e-q/scipy,grlee77/scipy,Gillu13/scipy,mortonjt/scipy,andim/scipy,kleskjr/scipy,juliantaylor/scipy,Eric89GXL/scipy,sonnyhu/scipy,maciejkula/scipy,niknow/scipy,raoulbq/scipy,matthewalbani/scipy,kalvdans/scipy,witcxc/scipy,Srisai85/scipy,futurulus/scipy,Newman101/scipy,aarchiba/scipy,raoulbq/scipy,zxsted/scipy,ales-erjavec/scipy,person142/scipy,josephcslater/scipy,dominicelse/scipy,petebachant/scipy,haudren/scipy,andim/scipy,perimosocordiae/scipy,richardotis/scipy,larsmans/scipy,kleskjr/scipy,Newman101/scipy,ortylp/scipy,richardotis/scipy,fredrikw/scipy,andyfaff/scipy,anielsen001/scipy,pnedunuri/scipy,minhlongdo/scipy,vigna/scipy,rmcgibbo/scipy,hainm/scipy,njwilson23/scipy,pyramania/scipy,njwilson23/scipy,vanpact/scipy,ChanderG/scipy,vhaasteren/scipy,nmayorov/scipy,trankmichael/scipy,raoulbq/scipy,mgaitan/scipy,andyfaff/scipy,rgommers/scipy,ChanderG/scipy,larsmans/scipy,jjhelmus/scipy,maniteja123/scipy,gdooper/scipy,vigna/scipy,mgaitan/scipy,mortonjt/scipy,larsmans/scipy,andyfaff/scipy,argriffing/scipy,pnedunuri/scipy,WarrenWeckesser/scipy,WarrenWeckesser/scipy,argriffing/scipy,surhudm/scipy,mingwpy/scipy,rmcgibbo/scipy,aarchiba/scipy,arokem/scipy,gfyoung/scipy,apbard/scipy,mortonjt/scipy,argriffing/scipy,haudren/scipy,witcxc/scipy,arokem/scipy,vhaasteren/scipy,mgaitan/scipy,sargas/scipy,Srisai85/scipy,jseabold/scipy,dominicelse/scipy,anielsen001/scipy,pbrod/scipy,dch312/scipy,tylerjereddy/scipy,aarchiba/scipy,kleskjr/scipy,ndchorley/scipy,niknow/scipy,teoliphant/scipy,anielsen001/scipy,Shaswat27/scipy,lukauskas/scipy,josephcslater/scipy,surhudm/scipy,aman-iitj/scipy,jakevdp/scipy,pschella/scipy,woodscn/scipy,piyush0609/scipy,felipebetancur/scipy,cpaulik/scipy,futurulus/scipy,ChanderG/scipy,jor-/scipy,zxsted/scipy,dch312/scipy,haudren/scipy,efiring/scipy,jor-/scipy,jsilter/scipy,felipebetancur/scipy,mortada/scipy,Dapid/scipy,mdhaber/scipy,Kamp9/scipy,efiring/scipy,jjhelmus/scipy,jonycgn/scipy,perimosocordiae/scipy,WarrenWeckesser/scipy,jamestwebber/scipy,ortylp/scipy,vigna/scipy,Newman101/scipy,grlee77/scipy,lhilt/scipy,haudren/scipy,Gillu13/scipy,zxsted/scipy,Shaswat27/scipy,scipy/scipy,anielsen001/scipy,sonnyhu/scipy,hainm/scipy,behzadnouri/scipy,teoliphant/scipy,WillieMaddox/scipy,petebachant/scipy,hainm/scipy,jseabold/scipy,behzadnouri/scipy,fredrikw/scipy,cpaulik/scipy,sargas/scipy,njwilson23/scipy,efiring/scipy,matthew-brett/scipy,nvoron23/scipy,matthewalbani/scipy,jakevdp/scipy,aman-iitj/scipy,pschella/scipy,Kamp9/scipy,Kamp9/scipy,WillieMaddox/scipy,rmcgibbo/scipy,pschella/scipy,dominicelse/scipy,raoulbq/scipy,befelix/scipy,perimosocordiae/scipy,maniteja123/scipy,nvoron23/scipy,gertingold/scipy,mikebenfield/scipy,mtrbean/scipy,petebachant/scipy,mdhaber/scipy,mortada/scipy,vigna/scipy,pizzathief/scipy,fernand/scipy,vigna/scipy,juliantaylor/scipy,kleskjr/scipy,mingwpy/scipy,zerothi/scipy,felipebetancur/scipy,WarrenWeckesser/scipy,piyush0609/scipy,maciejkula/scipy,aeklant/scipy,matthewalbani/scipy,giorgiop/scipy,nonhermitian/scipy,vberaudi/scipy,mtrbean/scipy,mgaitan/scipy,Eric89GXL/scipy,rmcgibbo/scipy,jjhelmus/scipy,gdooper/scipy,pnedunuri/scipy,behzadnouri/scipy,mortonjt/scipy,zxsted/scipy,piyush0609/scipy,jamestwebber/scipy,sonnyhu/scipy,josephcslater/scipy,lhilt/scipy,grlee77/scipy,zaxliu/scipy,aeklant/scipy,WillieMaddox/scipy,ChanderG/scipy,Gillu13/scipy,maciejkula/scipy,newemailjdm/scipy,futurulus/scipy,piyush0609/scipy,zerothi/scipy | Lib/sparse/info.py | Lib/sparse/info.py | """
Sparse matrix
=============
Scipy 2D sparse matrix module.
Original code by Travis Oliphant.
Modified and extended by Ed Schofield and Robert Cimrman.
There are four available sparse matrix types:
(1) csc_matrix: Compressed Sparse Column format
(2) csr_matrix: Compressed Sparse Row format
(3) lil_matrix: List of Lists format
(4) dok_matrix: Dictionary of Keys format
To construct a matrix efficiently, use either lil_matrix (recommended) or
dok_matrix. The lil_matrix class supports basic slicing and fancy
indexing with a similar syntax to NumPy arrays.
To perform manipulations such as multiplication or inversion, first
convert the matrix to either CSC or CSR format. The lil_matrix format is
row-based, so conversion to CSR is efficient, whereas conversion to CSC
is less so.
Example:
Construct a 10x1000 lil_matrix and add some values to it:
>>> from scipy import sparse, linsolve
>>> from numpy import linalg
>>> from numpy.random import rand
>>> A = sparse.lil_matrix((1000, 1000))
>>> A[0, :100] = rand(100)
>>> A[1, 100:200] = A[0, :100]
>>> A.setdiag(rand(1000))
Now convert it to CSR format and solve (A A^T) x = b for x:
>>> A = A.tocsr()
>>> b = rand(1000)
>>> x = linsolve.spsolve(A * A.T, b)
Convert it to a dense matrix and solve, and check that the result
is the same:
>>> A_ = A.todense()
>>> x_ = linalg.solve(A_ * A_.T, b)
>>> err = linalg.norm(x-x_)
Now we can print the error norm with:
print "Norm error =", err
It should be small :)
"""
postpone_import = 1
| """
Sparse matrix
=============
Scipy 2D sparse matrix module.
Original code by Travis Oliphant.
Modified and extended by Ed Schofield and Robert Cimrman.
There are four available sparse matrix types:
(1) csc_matrix: Compressed Sparse Column format
(2) csr_matrix: Compressed Sparse Row format
(3) lil_matrix: List of Lists format
(4) dok_matrix: Dictionary of Keys format
To construct a matrix efficiently, use either lil_matrix (recommended) or
dok_matrix. The lil_matrix class supports basic slicing and fancy
indexing with a similar syntax to NumPy arrays.
To perform manipulations such as multiplication or inversion, first
convert the matrix to either CSC or CSR format. The lil_matrix format is
row-based, so conversion to CSR is efficient, whereas conversion to CSC
is less so.
Example:
Construct a 10x1000 lil_matrix and add some values to it:
>>> from scipy import sparse, linsolve
>>> from numpy import rand, linalg
>>> A = sparse.lil_matrix((1000, 1000))
>>> A[0, :100] = rand(100)
>>> A[1, 100:200] = A[0, :100]
>>> A.setdiag(rand(1000))
Now convert it to CSR format and solve (A A^T) x = b for x:
>>> A = A.tocsr()
>>> b = rand(1000)
>>> x = linsolve.spsolve(A * A.T, b)
Convert it to a dense matrix and solve, and check that the result
is the same:
>>> A_ = A.todense()
>>> x_ = linalg.solve(A_ * A_.T, b)
>>> err = linalg.norm(x-x_)
Now we can print the error norm with:
print "Norm error =", err
It should be small :)
"""
postpone_import = 1
| bsd-3-clause | Python |
46ce07733913dff688bcd6e3e83dc3222f630c07 | fix error by encoding | nishio/jscc,nishio/jscc,nishio/jscc | client/client.py | client/client.py | import re
import json
from datetime import datetime
import urllib2
import urllib
import argparse
parser = argparse.ArgumentParser(description='send info to visualizing server')
parser.add_argument('--port', default=8104, type=int)
parser.add_argument('--server', default="localhost", type=str)
parser.add_argument('--send-detail', action='store_true',
help='send detailed compile error')
args = parser.parse_args()
URL = "http://%s:%s/api/put?" % (args.server, args.port)
# collect lint error
data = {"error": None, "warning": None}
messages = []
for line in open("lint.log"):
if line.startswith("Line"):
# sample: Line 58, E:0002: Missing space after ","
messages.append(line.split(":", 2)[1])
data["lint"] = len(messages)
# collect compile error
success = False
for line in open("compile.log"):
if "error(s)" in line or "warning(s)" in line:
# sample: 44 error(s), 0 warning(s)
err, warn = re.match("(\d+) error.* (\d+) warn", line).groups()
data["error"] = int(err)
data["warning"] = int(warn)
if "closurebuilder.py: JavaScript compilation succeeded" in line:
success = True
if data["error"] == None: data["error"] = 0
if data["warning"] == None: data["warning"] = 0
if args.send_detail:
if data["error"] == data["warning"] == 0:
data["detail"] = file('lint.log').read()
else:
data["detail"] = file('compile.log').read().decode("sjis")
data["when"] = datetime.now().isoformat()
data["success"] = success
urllib2.urlopen(URL + urllib.urlencode({"json": json.dumps(data)}))
| import re
import json
from datetime import datetime
import urllib2
import urllib
import argparse
parser = argparse.ArgumentParser(description='send info to visualizing server')
parser.add_argument('--port', default=8104, type=int)
parser.add_argument('--server', default="localhost", type=str)
parser.add_argument('--send-detail', action='store_true',
help='send detailed compile error')
args = parser.parse_args()
URL = "http://%s:%s/api/put?" % (args.server, args.port)
# collect lint error
data = {"error": None, "warning": None}
messages = []
for line in open("lint.log"):
if line.startswith("Line"):
# sample: Line 58, E:0002: Missing space after ","
messages.append(line.split(":", 2)[1])
data["lint"] = len(messages)
# collect compile error
success = False
for line in open("compile.log"):
if "error(s)" in line or "warning(s)" in line:
# sample: 44 error(s), 0 warning(s)
err, warn = re.match("(\d+) error.* (\d+) warn", line).groups()
data["error"] = int(err)
data["warning"] = int(warn)
if "closurebuilder.py: JavaScript compilation succeeded" in line:
success = True
if data["error"] == None: data["error"] = 0
if data["warning"] == None: data["warning"] = 0
if args.send_detail:
if success:
data["detail"] = file('lint.log').read()
else:
data["detail"] = file('compile.log').read()
data["when"] = datetime.now().isoformat()
data["success"] = success
urllib2.urlopen(URL + urllib.urlencode({"json": json.dumps(data)}))
| mit | Python |
ab395d012e77e863bc78dea0479c07fa0add2049 | use sequential runner (avoids error with process-based ounit runner) | gfxmonk/gup,gfxmonk/gup,timbertson/gup,timbertson/gup,timbertson/gup,timbertson/gup,gfxmonk/gup | run_tests.py | run_tests.py | #!/usr/bin/env python
import os, sys, subprocess
class Object(object): pass
UNIT = '-u'
INTEGRATION = '-i'
actions = (UNIT, INTEGRATION)
action = sys.argv[1]
assert action in actions, "Expected one of %s" % (", ".join(actions),)
action_name = 'unit' if action == UNIT else 'integration'
args = sys.argv[2:]
cwd = os.getcwd()
kind = os.path.basename(cwd)
kinds = ('python', 'ocaml')
if kind not in kinds:
kind = None
root = os.path.abspath(os.path.dirname(__file__))
test_dir = os.path.join(root, 'test')
try:
def run_nose(args):
subprocess.check_call(['make', '-C', root, 'gup-local.xml'])
subprocess.check_call([
'0install', 'run', '--command=' + os.environ.get('TEST_COMMAND', 'test'),
os.path.join(root, 'gup-local.xml')] + args)
subprocess.check_call(['make', '%s-test-pre' % action_name])
def add_to_env(name, val):
vals = os.environ.get(name, '').split(os.pathsep)
vals.insert(0, val)
os.environ[name] = os.pathsep.join(vals)
if action == INTEGRATION:
# run without adding to PATH
if kind is None:
exe = os.pathsep.join([os.path.join(cwd, kind, 'bin', 'gup') for kind in kinds])
else:
exe = os.path.join(cwd, 'bin', 'gup')
os.environ['GUP_EXE'] = exe
run_nose(['-w', test_dir] + args)
else:
assert action == UNIT
add_to_env('PATH', os.path.join(root, 'test/bin'))
if kind == 'ocaml':
subprocess.check_call(['./test.byte', '-runner', 'sequential'] + args)
else:
run_nose(args)
except subprocess.CalledProcessError: sys.exit(1)
| #!/usr/bin/env python
import os, sys, subprocess
class Object(object): pass
UNIT = '-u'
INTEGRATION = '-i'
actions = (UNIT, INTEGRATION)
action = sys.argv[1]
assert action in actions, "Expected one of %s" % (", ".join(actions),)
action_name = 'unit' if action == UNIT else 'integration'
args = sys.argv[2:]
cwd = os.getcwd()
kind = os.path.basename(cwd)
kinds = ('python', 'ocaml')
if kind not in kinds:
kind = None
root = os.path.abspath(os.path.dirname(__file__))
test_dir = os.path.join(root, 'test')
try:
def run_nose(args):
subprocess.check_call(['make', '-C', root, 'gup-local.xml'])
subprocess.check_call([
'0install', 'run', '--command=' + os.environ.get('TEST_COMMAND', 'test'),
os.path.join(root, 'gup-local.xml')] + args)
subprocess.check_call(['make', '%s-test-pre' % action_name])
def add_to_env(name, val):
vals = os.environ.get(name, '').split(os.pathsep)
vals.insert(0, val)
os.environ[name] = os.pathsep.join(vals)
if action == INTEGRATION:
# run without adding to PATH
if kind is None:
exe = os.pathsep.join([os.path.join(cwd, kind, 'bin', 'gup') for kind in kinds])
else:
exe = os.path.join(cwd, 'bin', 'gup')
os.environ['GUP_EXE'] = exe
run_nose(['-w', test_dir] + args)
else:
assert action == UNIT
add_to_env('PATH', os.path.join(root, 'test/bin'))
if kind == 'ocaml':
subprocess.check_call(['./test.byte'] + args)
else:
run_nose(args)
except subprocess.CalledProcessError: sys.exit(1)
| lgpl-2.1 | Python |
34529575057f594e474dff3a1b60edaeacfbfb1f | Fix exit status of the test run script | brainly/check-zonesync | run_tests.py | run_tests.py | #!/usr/bin/env python3
# Copyright (c) 2013 Spotify AB
# Copyright (c) 2014 Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
try:
import coverage
except ImportError:
pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
if "coverage" in sys.modules:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
res = testRunner.run(tests)
if "coverage" in sys.modules:
cov.stop()
cov.html_report()
if res.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# Copyright (c) 2013 Spotify AB
# Copyright (c) 2014 Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
try:
import coverage
except ImportError:
pass
import sys
import unittest
import os
def main():
#Cleanup old html report:
for root, dirs, files in os.walk('test/output_coverage_html/'):
for f in files:
if f == '.gitignore' or f == '.empty_dir':
continue
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
#Perform coverage analisys:
if "coverage" in sys.modules:
cov = coverage.coverage()
cov.start()
#Discover the tests and execute them:
loader = unittest.TestLoader()
tests = loader.discover('./test/')
testRunner = unittest.runner.TextTestRunner(descriptions=True, verbosity=1)
testRunner.run(tests)
if "coverage" in sys.modules:
cov.stop()
cov.html_report()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
00fd2ef2bc5987e12eca677110157a07fad49793 | Remove unused imports | bussiere/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs,PressLabs/gitfs,rowhit/gitfs | gitfs/views/history.py | gitfs/views/history.py | import os
from stat import S_IFDIR
from errno import ENOENT
from fuse import FuseOSError
from gitfs.log import log
from .view import View
class HistoryView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
attrs = super(HistoryView, self).getattr(path, fh)
attrs.update({
'st_mode': S_IFDIR | 0775,
'st_nlink': 2
})
return attrs
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
if getattr(self, 'date', None):
log.info('PATH: %s', path)
if path == '/':
available_dates = self.repo.get_commit_dates()
if self.date not in available_dates:
raise FuseOSError(ENOENT)
else:
commits = self.repo.get_commits_by_date(self.date)
dirname = os.path.split(path)[1]
if dirname not in commits:
raise FuseOSError(ENOENT)
else:
if path != '/':
raise FuseOSError(ENOENT)
return 0
def readdir(self, path, fh):
if getattr(self, 'date', None):
additional_entries = self.repo.get_commits_by_date(self.date)
else:
additional_entries = self.repo.get_commit_dates()
dir_entries = ['.', '..'] + additional_entries
for entry in dir_entries:
yield entry
| import os
from datetime import datetime
from stat import S_IFDIR
from errno import ENOENT
from pygit2 import GIT_SORT_TIME
from fuse import FuseOSError
from gitfs.utils import strptime
from gitfs.log import log
from .view import View
class HistoryView(View):
def getattr(self, path, fh=None):
'''
Returns a dictionary with keys identical to the stat C structure of
stat(2).
st_atime, st_mtime and st_ctime should be floats.
NOTE: There is an incombatibility between Linux and Mac OS X
concerning st_nlink of directories. Mac OS X counts all files inside
the directory, while Linux counts only the subdirectories.
'''
attrs = super(HistoryView, self).getattr(path, fh)
attrs.update({
'st_mode': S_IFDIR | 0775,
'st_nlink': 2
})
return attrs
def opendir(self, path):
return 0
def releasedir(self, path, fi):
pass
def access(self, path, amode):
if getattr(self, 'date', None):
log.info('PATH: %s', path)
if path == '/':
available_dates = self.repo.get_commit_dates()
if self.date not in available_dates:
raise FuseOSError(ENOENT)
else:
commits = self.repo.get_commits_by_date(self.date)
dirname = os.path.split(path)[1]
if dirname not in commits:
raise FuseOSError(ENOENT)
else:
if path != '/':
raise FuseOSError(ENOENT)
return 0
def readdir(self, path, fh):
if getattr(self, 'date', None):
additional_entries = self.repo.get_commits_by_date(self.date)
else:
additional_entries = self.repo.get_commit_dates()
dir_entries = ['.', '..'] + additional_entries
for entry in dir_entries:
yield entry
| apache-2.0 | Python |
269fb63409935d85e70b420de1b562280da4f3eb | Update __init__.py file of views module. | kaleidos/django-supertools | supertools/views/__init__.py | supertools/views/__init__.py | from .base import GenericView
from .base import GenericTemplateView
from .ajax import AjaxMixin
from .forms import FormViewMixin
from .paginator import PaginatorMixin
| # -*- coding: utf-8 -*-
| bsd-3-clause | Python |
6b2314ff98bbead0c3a7811fc1429ecc3aec22ce | convert generic form error key from '__all__' to 'generic' | melissiproject/server,melissiproject/server | api/resource.py | api/resource.py | """
Overloading piston resource to provide our own error handling methods
"""
import piston.resource
from piston.utils import rc
from exceptions import APIException
class Resource(piston.resource.Resource):
def form_validation_response(self, e):
resp = rc.BAD_REQUEST
error_list = {}
for key, value in e.form.errors.items():
if key == '__all__':
key = 'generic'
error_list[key] = value
resp._set_content(error_list)
return resp
def error_handler(self, e, request, meth, em_format):
if isinstance(e, APIException):
resp = getattr(rc, e.code)
resp._set_content(e.error)
return resp
else:
return super(Resource, self).error_handler(e, request, meth, em_format)
| """
Overloading piston resource to provide our own error handling methods
"""
import piston.resource
from piston.utils import rc
from exceptions import APIException
class Resource(piston.resource.Resource):
def form_validation_response(self, e):
resp = rc.BAD_REQUEST
error_list = {}
for key, value in e.form.errors.items():
error_list[key] = value
resp._set_content(error_list)
return resp
def error_handler(self, e, request, meth, em_format):
if isinstance(e, APIException):
resp = getattr(rc, e.code)
resp._set_content(e.error)
return resp
else:
return super(Resource, self).error_handler(e, request, meth, em_format)
| agpl-3.0 | Python |
09a615458f5b13b26c6c5891769939f95ef57b20 | Update abusehelper.py | pkug/intelmq,pkug/intelmq,certtools/intelmq,sch3m4/intelmq,certtools/intelmq,pkug/intelmq,sch3m4/intelmq,robcza/intelmq,sch3m4/intelmq,robcza/intelmq,robcza/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq,aaronkaplan/intelmq,sch3m4/intelmq,pkug/intelmq,certtools/intelmq,robcza/intelmq | src/bots/inputs/abusehelper/abusehelper.py | src/bots/inputs/abusehelper/abusehelper.py | import sys
import xmpp
from lib.bot import *
from lib.utils import *
from lib.event import *
from lib.cache import *
# Required parameters:
# - jid
# - password
# - source_room
# - force_tls
class AbuseHelperBot(Bot):
def handle_message(self, xmpp_connection, message):
try:
event = Event.from_unicode(unicode(message.getBody()))
for key in event.keys():
value = event.value(key)
event.clear(key)
key = key.replace(' ','_')
event.add(key, value)
self.send_message(event)
except:
pass
def start(self):
jid = xmpp.JID(self.parameters.jid)
xmpp_connection = xmpp.Client(jid.getDomain(), debug=[])
connection_result = xmpp_connection.connect()
if not connection_result:
# TODO: Log error
return
if self.parameters.force_tls == 'true' and connection_result != 'tls':
# TODO: Log error
return
authentication_result = xmpp_connection.auth(jid.getNode(), self.parameters.password)
if not authentication_result:
# TODO: Log error
return
xmpp_connection.RegisterHandler(name='message', handler=self.handle_message)
xmpp_connection.sendInitPresence()
xmpp_connection.send(xmpp.Presence(to='%s@conference.%s/%s' % (self.parameters.source_room, jid.getDomain(), self.bot_id)))
while True:
if not xmpp_connection.isConnected():
xmpp_connection.reconnectAndReauth()
else:
xmpp_connection.Process()
time.sleep(int(self.parameters.processing_interval))
if __name__ == "__main__":
bot = AbuseHelperBot(sys.argv[1])
bot.start()
| import sys
import xmpp
from lib.bot import *
from lib.utils import *
from lib.event import *
from lib.cache import *
# Required parameters:
# - jid
# - password
# - source_room
# - force_tls
class AbuseHelperBot(Bot):
def handle_message(self, xmpp_connection, message):
try:
event = Event.from_unicode(unicode(message.getBody()))
for key in event.keys():
value = event.value(key)
event.clear(key)
key = key.replace(' ','_')
event.add(key, value)
self.send_message(event)
except:
pass
def start(self):
jid = xmpp.JID(self.parameters.jid)
xmpp_connection = xmpp.Client(jid.getDomain(), debug=[])
connection_result = xmpp_connection.connect()
if not connection_result:
# TODO: Log error
return
if self.parameters.force_tls == 'true' and connection_result != 'tls':
# TODO: Log error
return
authentication_result = xmpp_connection.auth(jid.getNode(), self.parameters.password)
if not authentication_result:
# TODO: Log error
return
xmpp_connection.RegisterHandler(name='message', handler=self.handle_message)
xmpp_connection.sendInitPresence()
xmpp_connection.send(xmpp.Presence(to='%s@conference.%s/%s' % (self.parameters.source_room, jid.getDomain(), self.bot_id)))
while True:
print 'Iteration'
if not xmpp_connection.isConnected():
xmpp_connection.reconnectAndReauth()
else:
xmpp_connection.Process()
time.sleep(int(self.parameters.processing_interval))
if __name__ == "__main__":
bot = AbuseHelperBot(sys.argv[1])
bot.start()
| agpl-3.0 | Python |
a0e8b544569d0aa955dd1698ff020572df9d122f | delete username and name field in registration | codeforamerica/westsac-farm-stand,inaki/farm-stand,codeforamerica/westsac-farm-stand,inaki/farm-stand | app/auth/views.py | app/auth/views.py | from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, current_user
from app import db
from ..email import send_email
from . import auth
from ..models import User
from .forms import LoginForm, RegistrationForm
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account!', 'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.before_app_request
def before_request():
if current_user.is_authenticated():
current_user.ping()
if current_user.is_authenticated() and not current_user.confirmed and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous() or current_user.confirmed:
return redirect('main/index')
return render_template('auth/unconfirmed.html')
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
print current_user.email
send_email(current_user.email, 'Confirm Your Account', 'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
| from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, current_user
from app import db
from ..email import send_email
from . import auth
from ..models import User
from .forms import LoginForm, RegistrationForm
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data.lower(),
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account!', 'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.before_app_request
def before_request():
if current_user.is_authenticated():
current_user.ping()
if current_user.is_authenticated() and not current_user.confirmed and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous() or current_user.confirmed:
return redirect('main/index')
return render_template('auth/unconfirmed.html')
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
print current_user.email
send_email(current_user.email, 'Confirm Your Account', 'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
| mit | Python |
853d93a18919a7cf5805b44c1a6678ffff92461b | add logging.basicConfig() to tests | n0ano/ganttclient | run_tests.py | run_tests.py | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gettext
import os
import unittest
import sys
from nose import config
from nose import result
from nose import core
from nova import log as logging
class NovaTestResult(result.TextTestResult):
def __init__(self, *args, **kw):
result.TextTestResult.__init__(self, *args, **kw)
self._last_case = None
def getDescription(self, test):
return str(test)
def startTest(self, test):
unittest.TestResult.startTest(self, test)
current_case = test.test.__class__.__name__
if self.showAll:
if current_case != self._last_case:
self.stream.writeln(current_case)
self._last_case = current_case
self.stream.write(
' %s' % str(test.test._testMethodName).ljust(60))
self.stream.flush()
class NovaTestRunner(core.TextTestRunner):
def _makeResult(self):
return NovaTestResult(self.stream,
self.descriptions,
self.verbosity,
self.config)
if __name__ == '__main__':
logging.basicConfig()
c = config.Config(stream=sys.stdout,
env=os.environ,
verbosity=3,
plugins=core.DefaultPluginManager())
runner = NovaTestRunner(stream=c.stream,
verbosity=c.verbosity,
config=c)
sys.exit(not core.run(config=c, testRunner=runner))
| #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gettext
import os
import unittest
import sys
from nose import config
from nose import result
from nose import core
class NovaTestResult(result.TextTestResult):
def __init__(self, *args, **kw):
result.TextTestResult.__init__(self, *args, **kw)
self._last_case = None
def getDescription(self, test):
return str(test)
def startTest(self, test):
unittest.TestResult.startTest(self, test)
current_case = test.test.__class__.__name__
if self.showAll:
if current_case != self._last_case:
self.stream.writeln(current_case)
self._last_case = current_case
self.stream.write(
' %s' % str(test.test._testMethodName).ljust(60))
self.stream.flush()
class NovaTestRunner(core.TextTestRunner):
def _makeResult(self):
return NovaTestResult(self.stream,
self.descriptions,
self.verbosity,
self.config)
if __name__ == '__main__':
c = config.Config(stream=sys.stdout,
env=os.environ,
verbosity=3,
plugins=core.DefaultPluginManager())
runner = NovaTestRunner(stream=c.stream,
verbosity=c.verbosity,
config=c)
sys.exit(not core.run(config=c, testRunner=runner))
| apache-2.0 | Python |
998e3441928c32760ec06e330b2e049b535d7bda | Print the exception type separately to avoid it being cut off | xenserver/xscontainer,olivierlambert/xscontainer,robertbreker/xscontainer,xenserver/xscontainer,robertbreker/xscontainer,olivierlambert/xscontainer | src/xscontainer/util/log.py | src/xscontainer/util/log.py | import logging
import logging.handlers
import signal
import sys
import traceback
def configurelogging():
_LOGGER.setLevel(logging.DEBUG)
streamhandler = logging.StreamHandler(sys.stderr)
streamhandler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'xscontainer[%(process)d] - %(levelname)s - %(message)s')
streamhandler.setFormatter(formatter)
_LOGGER.addHandler(streamhandler)
handler = logging.handlers.SysLogHandler(
address='/dev/log', facility=logging.handlers.SysLogHandler.LOG_DAEMON)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
_LOGGER.addHandler(handler)
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def debug(message):
_LOGGER.debug(message)
def info(message):
_LOGGER.info(message)
def warning(message):
_LOGGER.warning(message)
def error(message):
_LOGGER.error(message)
def critical(message):
_LOGGER.critical(message)
def exception(message):
_LOGGER.exception(message)
def handle_unhandled_exceptions(exception_type, exception_value,
exception_traceback):
if not issubclass(exception_type, KeyboardInterrupt):
_LOGGER.error("Nobody caught exception: %s" % (exception_type))
_LOGGER.error(traceback.format_exception(exception_type,
exception_value,
exception_traceback))
sys.__excepthook__(exception_type, exception_value, exception_traceback)
_LOGGER = logging.getLogger()
configurelogging()
sys.excepthook = handle_unhandled_exceptions
| import logging
import logging.handlers
import signal
import sys
import traceback
def configurelogging():
_LOGGER.setLevel(logging.DEBUG)
streamhandler = logging.StreamHandler(sys.stderr)
streamhandler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
'xscontainer[%(process)d] - %(levelname)s - %(message)s')
streamhandler.setFormatter(formatter)
_LOGGER.addHandler(streamhandler)
handler = logging.handlers.SysLogHandler(
address='/dev/log', facility=logging.handlers.SysLogHandler.LOG_DAEMON)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
_LOGGER.addHandler(handler)
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def debug(message):
_LOGGER.debug(message)
def info(message):
_LOGGER.info(message)
def warning(message):
_LOGGER.warning(message)
def error(message):
_LOGGER.error(message)
def critical(message):
_LOGGER.critical(message)
def exception(message):
_LOGGER.exception(message)
def handle_unhandled_exceptions(exception_type, exception_value,
exception_traceback):
if not issubclass(exception_type, KeyboardInterrupt):
_LOGGER.error("Nobody caught exception: %s"
% (traceback.format_exception(exception_type,
exception_value,
exception_traceback)))
sys.__excepthook__(exception_type, exception_value, exception_traceback)
_LOGGER = logging.getLogger()
configurelogging()
sys.excepthook = handle_unhandled_exceptions
| bsd-2-clause | Python |
2a6e610b59e93e5d7e6b00a8c4be5625ef071131 | Update coordinator on every run | martinp/jarvis2,mpolden/jarvis2,mpolden/jarvis2,martinp/jarvis2,martinp/jarvis2,mpolden/jarvis2 | app/jobs/sonos.py | app/jobs/sonos.py | #!/usr/bin/env python
from jobs import AbstractJob
from soco import SoCo
class Sonos(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self._device = SoCo(conf['ip'])
@property
def device(self):
# In case of grouped devices the playback information needs to be
# retrieved from the coordinator device
if self._device.group.coordinator.uid != self._device.uid:
self._device = self._device.group.coordinator
return self._device
def get(self):
zone_name = self.device.get_speaker_info()['zone_name']
np = self.device.get_current_track_info()
current_track = np if np['playlist_position'] != '0' else None
queue = self.device.get_queue(int(np['playlist_position']), 1)
next_item = queue.pop() if len(queue) > 0 else None
next_track = {}
if next_item is not None:
next_track = {
'artist': next_item.creator,
'title': next_item.title,
'album': next_item.album
}
state = self.device.get_current_transport_info()[
'current_transport_state']
return {
'room': zone_name,
'state': state,
'current': current_track,
'next': next_track
}
| #!/usr/bin/env python
from jobs import AbstractJob
from soco import SoCo
class Sonos(AbstractJob):
def __init__(self, conf):
self.interval = conf['interval']
self.sonos = SoCo(conf['ip'])
# In case of grouped devices the playback information needs to be
# retrieved from the coordinator device
if self.sonos.group.coordinator.uid != self.sonos.uid:
self.sonos = self.sonos.group.coordinator
def get(self):
zone_name = self.sonos.get_speaker_info()['zone_name']
np = self.sonos.get_current_track_info()
current_track = np if np['playlist_position'] != '0' else None
queue = self.sonos.get_queue(int(np['playlist_position']), 1)
next_item = queue.pop() if len(queue) > 0 else None
next_track = {}
if next_item is not None:
next_track = {
'artist': next_item.creator,
'title': next_item.title,
'album': next_item.album
}
state = self.sonos.get_current_transport_info()[
'current_transport_state']
return {
'room': zone_name,
'state': state,
'current': current_track,
'next': next_track
}
| mit | Python |
b5c2603ec929433ae8f37299bb811a1d7d17647b | Implement get collection functionality | AmosGarner/PyInventory | collectionOps.py | collectionOps.py | from DataObjects.Collection import Collection
import json
def getCollection(fileData):
try:
fileData = json.loads(fileData)
return generateCollectionOnFileData(fileData)
except:
print('Error: Could not load collection data from file.')
def generateCollectionOnFileData(fileData):
collectionType = fileData['collectionType']
collectionName = fileData['collectionName']
username = fileData['username']
itemData = fileData['items']
itemArr = []
for value in itemData:
if fileData['collectionType'] == 'item':
item = ItemFactory.factory(collectionType,[value['id'], value['name'], value['addedOn'], value['lastEdit']])
itemArr.append(item)
elif fileData['collectionType'] == 'album':
item = ItemFactory.factory(collectionType,[value['id'], value['name'], value['addedOn'], value['lastEdit'], value['artist']])
itemArr.append(item)
elif fileData['collectionType'] == 'book':
item = ItemFactory.factory(collectionType,[value['id'], value['name'], value['addedOn'], value['lastEdit'], value['author']])
itemArr.append(item)
elif fileData['collectionType'] == 'movie':
item = ItemFactory.factory(collectionType,[value['id'], value['name'], value['addedOn'], value['lastEdit'], value['director']])
itemArr.append(item)
return Collection(fileData['username'], fileData['collectionName'], fileData['collectionType'], itemArr)
| def getCollection(filePath):
return None
| apache-2.0 | Python |
2716e3e2263c7fc9b26a2cff7783486bb89a59ed | add test case | abner-xin/email_utils | test/message/test_composer.py | test/message/test_composer.py | import os
import unittest
import tempfile
from message import IMSMessageComposer
from message import IMSMessageParser
from email_resource import email_plain
from email_resource import email_html_attachment
class TestIMSMessageComposer(unittest.TestCase):
def test_compose_a_plain_email(self):
c = IMSMessageComposer()
c.set_subject("test email")
c.add_plain_body("test email")
d = IMSMessageParser().message_from_string(str(c))
self.assertTrue(d.is_equal(IMSMessageParser().message_from_file(email_plain)))
def test_compose_a_html_email_with_attachment(self):
c = IMSMessageComposer()
c.set_subject("plain and html body")
c.add_plain_body("hello world")
c.add_html_body("<h1>hello world<h1>")
attach = os.path.join(tempfile.mkdtemp(), "hello.txt")
with open(attach, 'w') as f:
f.write("hello")
c.append_attachment(attach)
os.remove(attach)
d = IMSMessageParser().message_from_string(str(c))
self.assertTrue(d.is_equal(IMSMessageParser().message_from_file(email_html_attachment)))
def test_compose_email_based_on(self):
m = IMSMessageParser(_class=IMSMessageComposer).message_from_file(email_html_attachment)
m.set_subject("hello 123")
d = IMSMessageParser().message_from_string(str(m))
self.assertEqual("hello 123", d.subject)
self.assertEqual([False, True, True], d.is_equal(IMSMessageParser().message_from_file(email_html_attachment)))
if __name__ == '__main__':
unittest.main()
| import os
import unittest
import tempfile
from message import IMSMessageComposer
from message import IMSMessageParser
from email_resource import email_plain
from email_resource import email_html_attachment
class TestIMSMessageComposer(unittest.TestCase):
def test_compose_a_plain_email(self):
c = IMSMessageComposer()
c.set_subject("test email")
c.add_plain_body("test email")
d = IMSMessageParser().message_from_string(str(c))
self.assertTrue(d.is_equal(IMSMessageParser().message_from_file(email_plain)))
def test_compose_a_html_email_with_attachment(self):
c = IMSMessageComposer()
c.set_subject("plain and html body")
c.add_plain_body("hello world")
c.add_html_body("<h1>hello world<h1>")
attach = os.path.join(tempfile.mkdtemp(), "hello.txt")
with open(attach, 'w') as f:
f.write("hello")
c.append_attachment(attach)
os.remove(attach)
d = IMSMessageParser().message_from_string(str(c))
self.assertTrue(d.is_equal(IMSMessageParser().message_from_file(email_html_attachment)))
if __name__ == '__main__':
unittest.main()
| mit | Python |
0163ada6283613962a0ccf6ce9b2cb73e0f6a980 | Update temporal_memory_wrappers.py | numenta/nupic.research,numenta/nupic.research | packages/columns/src/nupic/research/frameworks/columns/temporal_memory_wrappers.py | packages/columns/src/nupic/research/frameworks/columns/temporal_memory_wrappers.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.research.frameworks.columns.apical_tiebreak_temporal_memory import (
ApicalTiebreakPairMemory,
)
class ApicalTiebreakPairMemoryWrapper(ApicalTiebreakPairMemory):
def __init__(
self,
proximal_n,
proximal_w,
basal_n,
basal_w,
apical_n,
apical_w,
cells_per_column,
activation_threshold,
reduced_basal_threshold,
initial_permanence,
connected_permanence,
matching_threshold,
sample_size,
permanence_increment,
permanence_decrement,
seed
):
"""
wrapper class around ApicalTiebreakPairMemory that uses Pythonic
variables instead of camelCase.
FIXME: need to change variable structure in ApicalTiebreakTemporalMemory
"""
super().__init__(
columnCount=proximal_n,
basalInputSize=basal_n,
apicalInputSize=apical_n,
cellsPerColumn=cells_per_column,
activationThreshold=activation_threshold,
reducedBasalThreshold=reduced_basal_threshold,
initialPermanence=initial_permanence,
connectedPermanence=connected_permanence,
minThreshold=matching_threshold,
sampleSize=sample_size,
permanenceIncrement=permanence_increment,
permanenceDecrement=permanence_decrement,
seed=seed
)
self.proximal_n = proximal_n
self.proximal_w = proximal_w
self.basal_n = basal_n
self.basal_w = basal_w
self.apical_n = apical_n
self.apical_w = apical_w
| # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from nupic.research.frameworks.columns.apical_tiebreak_temporal_memory import (
ApicalTiebreakPairMemory,
)
class ApicalTiebreakPairMemoryWrapper(ApicalTiebreakPairMemory):
def __init__(
self,
proximal_n,
proximal_w,
basal_n,
basal_w,
apical_n,
apical_w,
cells_per_column,
activation_threshold,
reduced_basal_threshold,
initial_permanence,
connected_permanence,
matching_threshold,
sample_size,
permanence_increment,
permanence_decrement,
seed
):
super().__init__(
columnCount=proximal_n,
basalInputSize=basal_n,
apicalInputSize=apical_n,
cellsPerColumn=cells_per_column,
activationThreshold=activation_threshold,
reducedBasalThreshold=reduced_basal_threshold,
initialPermanence=initial_permanence,
connectedPermanence=connected_permanence,
minThreshold=matching_threshold,
sampleSize=sample_size,
permanenceIncrement=permanence_increment,
permanenceDecrement=permanence_decrement,
seed=seed
)
self.proximal_n = proximal_n
self.proximal_w = proximal_w
self.basal_n = basal_n
self.basal_w = basal_w
self.apical_n = apical_n
self.apical_w = apical_w
| agpl-3.0 | Python |
4ba313fe0e9bf040a91e42fbf357cbf34fd18ec8 | Update CTCP.py | devzero-xyz/Andromeda,devzero-xyz/Andromeda | handlers/CTCP.py | handlers/CTCP.py | import time
def on_ctcp(irc, conn, event):
nick = event.source.nick
ctcptype = event.arguments[0]
if len(event.arguments) > 1:
args = event.arguments[1]
else:
args = None
if ctcptype != "ACTION":
log.info("Received CTCP {} from {}".format(ctcptype, event.source))
if ctcptype == "VERSION":
irc.ctcp_reply(nick, "VERSION {}".format(irc.version))
elif ctcptype == "PING":
now = int(time.time())
if args is list and len(args.split()) > 1:
irc.ctcp_reply(nick, "PING {} {}".format(now, args.split()[1]))
else:
irc.ctcp_reply(nick, "PING {}".format(now))
| import time
def on_ctcp(irc, conn, event):
nick = event.source.nick
ctcptype = event.arguments[0]
if len(event.arguments) > 1:
args = event.arguments[1]
else:
args = None
if ctcptype != "ACTION":
log.info("Received CTCP {} from {}".format(ctcptype, event.source))
if ctcptype == "VERSION":
irc.ctcp_reply(nick, "VERSION {}".format(irc.version))
elif ctcptype == "PING":
now = int(time.time())
if len(args.split()) > 1:
irc.ctcp_reply(nick, "PING {} {}".format(now, args.split()[1]))
else:
irc.ctcp_reply(nick, "PING {}".format(now))
| mit | Python |
b0ee6bfc8d2f5fbdcd4528233052d231d969ae1f | bump version to 3.1.0 | KoketsoMabuela92/titanium_mobile,smit1625/titanium_mobile,mvitr/titanium_mobile,sriks/titanium_mobile,formalin14/titanium_mobile,perdona/titanium_mobile,ashcoding/titanium_mobile,pec1985/titanium_mobile,openbaoz/titanium_mobile,FokkeZB/titanium_mobile,falkolab/titanium_mobile,rblalock/titanium_mobile,mano-mykingdom/titanium_mobile,collinprice/titanium_mobile,bright-sparks/titanium_mobile,emilyvon/titanium_mobile,collinprice/titanium_mobile,openbaoz/titanium_mobile,mano-mykingdom/titanium_mobile,linearhub/titanium_mobile,sriks/titanium_mobile,openbaoz/titanium_mobile,ashcoding/titanium_mobile,mvitr/titanium_mobile,rblalock/titanium_mobile,bhatfield/titanium_mobile,pec1985/titanium_mobile,sriks/titanium_mobile,shopmium/titanium_mobile,jvkops/titanium_mobile,cheekiatng/titanium_mobile,taoger/titanium_mobile,pec1985/titanium_mobile,mano-mykingdom/titanium_mobile,pinnamur/titanium_mobile,collinprice/titanium_mobile,benbahrenburg/titanium_mobile,benbahrenburg/titanium_mobile,sriks/titanium_mobile,falkolab/titanium_mobile,ashcoding/titanium_mobile,formalin14/titanium_mobile,jvkops/titanium_mobile,csg-coder/titanium_mobile,cheekiatng/titanium_mobile,jvkops/titanium_mobile,csg-coder/titanium_mobile,emilyvon/titanium_mobile,kopiro/titanium_mobile,emilyvon/titanium_mobile,collinprice/titanium_mobile,emilyvon/titanium_mobile,taoger/titanium_mobile,collinprice/titanium_mobile,pec1985/titanium_mobile,shopmium/titanium_mobile,peymanmortazavi/titanium_mobile,kopiro/titanium_mobile,falkolab/titanium_mobile,kopiro/titanium_mobile,prop/titanium_mobile,linearhub/titanium_mobile,KoketsoMabuela92/titanium_mobile,taoger/titanium_mobile,peymanmortazavi/titanium_mobile,taoger/titanium_mobile,cheekiatng/titanium_mobile,benbahrenburg/titanium_mobile,csg-coder/titanium_mobile,openbaoz/titanium_mobile,mvitr/titanium_mobile,AngelkPetkov/titanium_mobile,openbaoz/titanium_mobile,falkolab/titanium_mobile,mvitr/titanium_mobile,KangaCoders/titanium_mobile,kopiro/titanium_mobile,mvitr/titanium_mobile,linearhub/titanium_mobile,linearhub/titanium_mobile,FokkeZB/titanium_mobile,hieupham007/Titanium_Mobile,pinnamur/titanium_mobile,prop/titanium_mobile,perdona/titanium_mobile,bhatfield/titanium_mobile,pinnamur/titanium_mobile,shopmium/titanium_mobile,prop/titanium_mobile,bhatfield/titanium_mobile,emilyvon/titanium_mobile,perdona/titanium_mobile,perdona/titanium_mobile,pec1985/titanium_mobile,indera/titanium_mobile,pinnamur/titanium_mobile,rblalock/titanium_mobile,KangaCoders/titanium_mobile,KangaCoders/titanium_mobile,indera/titanium_mobile,kopiro/titanium_mobile,openbaoz/titanium_mobile,mano-mykingdom/titanium_mobile,ashcoding/titanium_mobile,linearhub/titanium_mobile,emilyvon/titanium_mobile,AngelkPetkov/titanium_mobile,KoketsoMabuela92/titanium_mobile,benbahrenburg/titanium_mobile,benbahrenburg/titanium_mobile,bhatfield/titanium_mobile,prop/titanium_mobile,peymanmortazavi/titanium_mobile,rblalock/titanium_mobile,csg-coder/titanium_mobile,benbahrenburg/titanium_mobile,bhatfield/titanium_mobile,shopmium/titanium_mobile,KangaCoders/titanium_mobile,cheekiatng/titanium_mobile,peymanmortazavi/titanium_mobile,ashcoding/titanium_mobile,smit1625/titanium_mobile,perdona/titanium_mobile,shopmium/titanium_mobile,AngelkPetkov/titanium_mobile,FokkeZB/titanium_mobile,ashcoding/titanium_mobile,openbaoz/titanium_mobile,jhaynie/titanium_mobile,formalin14/titanium_mobile,peymanmortazavi/titanium_mobile,kopiro/titanium_mobile,KoketsoMabuela92/titanium_mobile,sriks/titanium_mobile,mano-mykingdom/titanium_mobile,prop/titanium_mobile,hieupham007/Titanium_Mobile,bright-sparks/titanium_mobile,formalin14/titanium_mobile,KangaCoders/titanium_mobile,mvitr/titanium_mobile,csg-coder/titanium_mobile,jvkops/titanium_mobile,ashcoding/titanium_mobile,prop/titanium_mobile,rblalock/titanium_mobile,indera/titanium_mobile,indera/titanium_mobile,AngelkPetkov/titanium_mobile,formalin14/titanium_mobile,jhaynie/titanium_mobile,collinprice/titanium_mobile,KangaCoders/titanium_mobile,pinnamur/titanium_mobile,perdona/titanium_mobile,csg-coder/titanium_mobile,csg-coder/titanium_mobile,FokkeZB/titanium_mobile,peymanmortazavi/titanium_mobile,AngelkPetkov/titanium_mobile,shopmium/titanium_mobile,taoger/titanium_mobile,emilyvon/titanium_mobile,smit1625/titanium_mobile,linearhub/titanium_mobile,bhatfield/titanium_mobile,smit1625/titanium_mobile,pinnamur/titanium_mobile,cheekiatng/titanium_mobile,taoger/titanium_mobile,hieupham007/Titanium_Mobile,ashcoding/titanium_mobile,formalin14/titanium_mobile,jvkops/titanium_mobile,KoketsoMabuela92/titanium_mobile,pinnamur/titanium_mobile,rblalock/titanium_mobile,mano-mykingdom/titanium_mobile,sriks/titanium_mobile,peymanmortazavi/titanium_mobile,jhaynie/titanium_mobile,KoketsoMabuela92/titanium_mobile,bright-sparks/titanium_mobile,KoketsoMabuela92/titanium_mobile,kopiro/titanium_mobile,indera/titanium_mobile,smit1625/titanium_mobile,jhaynie/titanium_mobile,indera/titanium_mobile,collinprice/titanium_mobile,FokkeZB/titanium_mobile,rblalock/titanium_mobile,shopmium/titanium_mobile,cheekiatng/titanium_mobile,AngelkPetkov/titanium_mobile,hieupham007/Titanium_Mobile,smit1625/titanium_mobile,AngelkPetkov/titanium_mobile,hieupham007/Titanium_Mobile,falkolab/titanium_mobile,indera/titanium_mobile,linearhub/titanium_mobile,falkolab/titanium_mobile,smit1625/titanium_mobile,pec1985/titanium_mobile,cheekiatng/titanium_mobile,KoketsoMabuela92/titanium_mobile,mano-mykingdom/titanium_mobile,AngelkPetkov/titanium_mobile,KangaCoders/titanium_mobile,jhaynie/titanium_mobile,cheekiatng/titanium_mobile,emilyvon/titanium_mobile,rblalock/titanium_mobile,taoger/titanium_mobile,pec1985/titanium_mobile,mvitr/titanium_mobile,sriks/titanium_mobile,jhaynie/titanium_mobile,taoger/titanium_mobile,bright-sparks/titanium_mobile,mvitr/titanium_mobile,jvkops/titanium_mobile,indera/titanium_mobile,pinnamur/titanium_mobile,jhaynie/titanium_mobile,smit1625/titanium_mobile,bright-sparks/titanium_mobile,linearhub/titanium_mobile,hieupham007/Titanium_Mobile,bhatfield/titanium_mobile,prop/titanium_mobile,mano-mykingdom/titanium_mobile,falkolab/titanium_mobile,pinnamur/titanium_mobile,hieupham007/Titanium_Mobile,kopiro/titanium_mobile,collinprice/titanium_mobile,jvkops/titanium_mobile,jvkops/titanium_mobile,csg-coder/titanium_mobile,FokkeZB/titanium_mobile,formalin14/titanium_mobile,perdona/titanium_mobile,KangaCoders/titanium_mobile,openbaoz/titanium_mobile,sriks/titanium_mobile,shopmium/titanium_mobile,benbahrenburg/titanium_mobile,bright-sparks/titanium_mobile,hieupham007/Titanium_Mobile,perdona/titanium_mobile,jhaynie/titanium_mobile,FokkeZB/titanium_mobile,formalin14/titanium_mobile,pec1985/titanium_mobile,peymanmortazavi/titanium_mobile,benbahrenburg/titanium_mobile,prop/titanium_mobile,falkolab/titanium_mobile,bright-sparks/titanium_mobile,FokkeZB/titanium_mobile,pec1985/titanium_mobile,bhatfield/titanium_mobile,bright-sparks/titanium_mobile | build/titanium_version.py | build/titanium_version.py | version = '3.1.0'
module_apiversion = '2'
| version = '3.0.0'
module_apiversion = '2'
| apache-2.0 | Python |
6610938cb195cd349081c57490d1f4ea60d25ea1 | Stop depending on master on rules_closure. | google/j2cl,google/j2cl,google/j2cl,google/j2cl,google/j2cl | build_defs/repository.bzl | build_defs/repository.bzl | """Bazel rule for loading external repository deps for J2CL."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def load_j2cl_repo_deps():
_github_repo(
name = "io_bazel_rules_closure",
repo = "bazelbuild/rules_closure",
tag = "0.10.0",
)
_github_repo(
name = "bazel_skylib",
repo = "bazelbuild/bazel-skylib",
tag = "0.7.0",
sha256 = "bce240a0749dfc52fab20dce400b4d5cf7c28b239d64f8fd1762b3c9470121d8",
)
def _github_repo(name, repo, tag, sha256 = None):
if native.existing_rule(name):
return
_, project_name = repo.split("/")
http_archive(
name = name,
strip_prefix = "%s-%s" % (project_name, tag),
url = "https://github.com/%s/archive/%s.zip" % (repo, tag),
sha256 = sha256,
)
| """Bazel rule for loading external repository deps for J2CL."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def load_j2cl_repo_deps():
_github_repo(
name = "io_bazel_rules_closure",
repo = "bazelbuild/rules_closure",
tag = "master",
)
_github_repo(
name = "bazel_skylib",
repo = "bazelbuild/bazel-skylib",
tag = "0.7.0",
sha256 = "bce240a0749dfc52fab20dce400b4d5cf7c28b239d64f8fd1762b3c9470121d8",
)
def _github_repo(name, repo, tag, sha256 = None):
if native.existing_rule(name):
return
_, project_name = repo.split("/")
http_archive(
name = name,
strip_prefix = "%s-%s" % (project_name, tag),
url = "https://github.com/%s/archive/%s.zip" % (repo, tag),
sha256 = sha256,
)
| apache-2.0 | Python |
f25ddf153477d6d7034d96ca695ceef168394705 | Fix dependency issue | rmyers/trove-dashboard | trove_dashboard/dbaas/tabs.py | trove_dashboard/dbaas/tabs.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from openstack_dashboard import api
class OverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = ("dbaas/_detail_overview.html")
def get_context_data(self, request):
return {"instance": self.tab_group.kwargs['instance']}
class LogTab(tabs.Tab):
name = _("Log")
slug = "log"
template_name = "dbaas/_detail_log.html"
preload = False
def get_context_data(self, request):
instance = self.tab_group.kwargs['instance']
try:
data = api.nova.server_console_output(request,
instance.id,
tail_length=35)
except:
data = _('Unable to get log for instance "%s".') % instance.id
exceptions.handle(request, ignore=True)
return {"instance": instance,
"console_log": data}
class InstanceDetailTabs(tabs.TabGroup):
slug = "instance_details"
tabs = (OverviewTab, LogTab)
sticky = True
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Rackspace Hosting
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tabs
from trove_dashboard import api
class OverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = ("dbaas/_detail_overview.html")
def get_context_data(self, request):
return {"instance": self.tab_group.kwargs['instance']}
class LogTab(tabs.Tab):
name = _("Log")
slug = "log"
template_name = "dbaas/_detail_log.html"
preload = False
def get_context_data(self, request):
instance = self.tab_group.kwargs['instance']
try:
data = api.nova.server_console_output(request,
instance.id,
tail_length=35)
except:
data = _('Unable to get log for instance "%s".') % instance.id
exceptions.handle(request, ignore=True)
return {"instance": instance,
"console_log": data}
class InstanceDetailTabs(tabs.TabGroup):
slug = "instance_details"
tabs = (OverviewTab, LogTab)
sticky = True
| apache-2.0 | Python |
65a3934afbe5d3dc62d7bab5b77577aa9b423c94 | Add simple Link admin | texas/tx_people,texas/tx_people | tx_people/admin.py | tx_people/admin.py | from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from . import models
class ParentOrganizationFilter(admin.SimpleListFilter):
title = _('Parent Organization')
parameter_name = 'parent'
def lookups(self, request, model_admin):
return list(models.Organization.objects
.annotate(children_count=Count('children'))
.filter(children_count__gt=1)
.values_list('pk', 'name')) + [('none', 'No Parent', ), ]
def queryset(self, request, queryset):
value = self.value()
if value == 'none':
return queryset.filter(parent_id__isnull=True)
elif value:
return queryset.filter(parent__id=value)
return queryset
class ContactDetailAdmin(admin.ModelAdmin):
raw_id_fields = ('sources', )
class IdentifierAdmin(admin.ModelAdmin):
list_display = ('scheme', 'identifier', )
list_display_links = ('identifier', )
list_filter = ('scheme', )
search_fields = ('identifier', 'people__name', )
class LinkAdmin(admin.ModelAdmin):
list_display = ('url', 'note', )
search_fields = ('url', 'note', )
class MembershipAdmin(admin.ModelAdmin):
list_display = ('person', 'organization', 'post', )
list_filter = ('organization', )
raw_id_fields = ('links', 'sources', )
search_fields = ('person__name', 'organization__name', 'post__label', )
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('name', 'parent', )
list_filter = (ParentOrganizationFilter, )
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', )
class PeopleAdmin(admin.ModelAdmin):
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', 'email', )
class PostAdmin(admin.ModelAdmin):
list_display = ('label', 'organization', )
search_fields = ('label', 'organization__name', )
class SourceAdmin(admin.ModelAdmin):
search_fields = ('link', )
admin.site.register(models.ContactDetail, ContactDetailAdmin)
admin.site.register(models.Identifier, IdentifierAdmin)
admin.site.register(models.Link, LinkAdmin)
admin.site.register(models.Membership, MembershipAdmin)
admin.site.register(models.Organization, OrganizationAdmin)
admin.site.register(models.Person, PeopleAdmin)
admin.site.register(models.Post, PostAdmin)
admin.site.register(models.Source, SourceAdmin)
| from django.contrib import admin
from django.db.models import Count
from django.utils.translation import ugettext_lazy as _
from . import models
class ParentOrganizationFilter(admin.SimpleListFilter):
title = _('Parent Organization')
parameter_name = 'parent'
def lookups(self, request, model_admin):
return list(models.Organization.objects
.annotate(children_count=Count('children'))
.filter(children_count__gt=1)
.values_list('pk', 'name')) + [('none', 'No Parent', ), ]
def queryset(self, request, queryset):
value = self.value()
if value == 'none':
return queryset.filter(parent_id__isnull=True)
elif value:
return queryset.filter(parent__id=value)
return queryset
class ContactDetailAdmin(admin.ModelAdmin):
raw_id_fields = ('sources', )
class IdentifierAdmin(admin.ModelAdmin):
list_display = ('scheme', 'identifier', )
list_display_links = ('identifier', )
list_filter = ('scheme', )
search_fields = ('identifier', 'people__name', )
class MembershipAdmin(admin.ModelAdmin):
list_display = ('person', 'organization', 'post', )
list_filter = ('organization', )
raw_id_fields = ('links', 'sources', )
search_fields = ('person__name', 'organization__name', 'post__label', )
class OrganizationAdmin(admin.ModelAdmin):
list_display = ('name', 'parent', )
list_filter = (ParentOrganizationFilter, )
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', )
class PeopleAdmin(admin.ModelAdmin):
raw_id_fields = ('identifiers', 'contact_details', 'links', 'sources', )
search_fields = ('name', 'email', )
class PostAdmin(admin.ModelAdmin):
list_display = ('label', 'organization', )
search_fields = ('label', 'organization__name', )
class SourceAdmin(admin.ModelAdmin):
search_fields = ('link', )
admin.site.register(models.ContactDetail, ContactDetailAdmin)
admin.site.register(models.Identifier, IdentifierAdmin)
admin.site.register(models.Membership, MembershipAdmin)
admin.site.register(models.Organization, OrganizationAdmin)
admin.site.register(models.Person, PeopleAdmin)
admin.site.register(models.Post, PostAdmin)
admin.site.register(models.Source, SourceAdmin)
| apache-2.0 | Python |
e22fe584714f2b025d6de4eda3616d3747f72107 | add pixel_id | istb-mia/miapy | test/test_image/test_image.py | test/test_image/test_image.py | from unittest import TestCase
import SimpleITK as sitk
from miapy.image.image import ImageProperties
class TestImageProperties(TestCase):
def test_is_two_dimensional(self):
x = 10
y = 10
image = sitk.Image([x, y], sitk.sitkUInt8)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), True)
self.assertEqual(dut.is_three_dimensional(), False)
self.assertEqual(dut.is_vector_image(), False)
def test_is_three_dimensional(self):
x = 10
y = 10
z = 3
image = sitk.Image([x, y, z], sitk.sitkUInt8)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), False)
self.assertEqual(dut.is_three_dimensional(), True)
self.assertEqual(dut.is_vector_image(), False)
def test_is_vector_image(self):
x = 10
y = 10
number_of_components_per_pixel = 3
image = sitk.Image([x, y], sitk.sitkVectorUInt8, number_of_components_per_pixel)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), True)
self.assertEqual(dut.is_three_dimensional(), False)
self.assertEqual(dut.is_vector_image(), True)
def test_properties(self):
x = 10
y = 10
z = 3
pixel_id = sitk.sitkUInt8
size = (x, y, z)
direction = (0, 1, 0, 1, 0, 0, 0, 0, 1)
image = sitk.Image([x, y, z], pixel_id)
image.SetOrigin(size)
image.SetSpacing(size)
image.SetDirection(direction)
dut = ImageProperties(image)
self.assertEqual(dut.size, size)
self.assertEqual(dut.origin, size)
self.assertEqual(dut.spacing, size)
self.assertEqual(dut.direction, direction)
self.assertEqual(dut.dimensions, z)
self.assertEqual(dut.number_of_components_per_pixel, 1)
self.assertEqual(dut.pixel_id, pixel_id)
| from unittest import TestCase
import SimpleITK as sitk
from miapy.image.image import ImageProperties
class TestImageProperties(TestCase):
def test_is_two_dimensional(self):
x = 10
y = 10
image = sitk.Image([x, y], sitk.sitkUInt8)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), True)
self.assertEqual(dut.is_three_dimensional(), False)
self.assertEqual(dut.is_vector_image(), False)
def test_is_three_dimensional(self):
x = 10
y = 10
z = 3
image = sitk.Image([x, y, z], sitk.sitkUInt8)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), False)
self.assertEqual(dut.is_three_dimensional(), True)
self.assertEqual(dut.is_vector_image(), False)
def test_is_vector_image(self):
x = 10
y = 10
number_of_components_per_pixel = 3
image = sitk.Image([x, y], sitk.sitkVectorUInt8, number_of_components_per_pixel)
dut = ImageProperties(image)
self.assertEqual(dut.is_two_dimensional(), True)
self.assertEqual(dut.is_three_dimensional(), False)
self.assertEqual(dut.is_vector_image(), True)
def test_properties(self):
x = 10
y = 10
z = 3
size = (x, y, z)
direction = (0, 1, 0, 1, 0, 0, 0, 0, 1)
image = sitk.Image([x, y, z], sitk.sitkUInt8)
image.SetOrigin(size)
image.SetSpacing(size)
image.SetDirection(direction)
dut = ImageProperties(image)
self.assertEqual(dut.size, size)
self.assertEqual(dut.origin, size)
self.assertEqual(dut.spacing, size)
self.assertEqual(dut.direction, direction)
self.assertEqual(dut.dimensions, z)
self.assertEqual(dut.number_of_components_per_pixel, 1)
| apache-2.0 | Python |
2cd5049d2fc495344845ec0fed1e085afd96dfc8 | Use CSV data | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca | ca_on_brantford/people.py | ca_on_brantford/people.py | from utils import CSVScraper
class BrantfordPersonScraper(CSVScraper):
csv_url = 'https://opendata.arcgis.com/datasets/320d27b8b20a467f8283a78835a33003_0.csv'
encoding = 'utf-8-sig'
many_posts_per_area = True
corrections = {
'primary role': {
'Ward 1 Councillor': 'Councillor',
'Ward 2 Councillor': 'Councillor',
'Ward 3 Councillor': 'Councillor',
'Ward 4 Councillor': 'Councillor',
'Ward 5 Councillor': 'Councillor',
},
}
# Not the Represent CSV Schema.
def header_converter(self, s):
return {
'POSITION': 'primary role',
'NAME': 'name',
'WARD': 'district id',
'WARD_NAME': 'district name',
'EMAIL': 'email',
'MOBILE': 'cell',
}.get(s, s)
def is_valid_row(self, row):
return True
| from utils import CanadianScraper, CanadianPerson as Person
import re
from collections import defaultdict
COUNCIL_PAGE = 'http://www.brantford.ca/govt/council/members/Pages/default.aspx'
class BrantfordPersonScraper(CanadianScraper):
def scrape(self):
seat_numbers = defaultdict(int)
page = self.lxmlize(COUNCIL_PAGE)
yield self.scrape_mayor()
councillors = page.xpath('//div[@id="centre_content"]//tr')
assert len(councillors), 'No councillors found'
for councillor in councillors:
if 'Position' in councillor.text_content():
continue
ward = councillor.xpath('./td')[0].text_content().replace('Councillor', '')
seat_numbers[ward] += 1
district = '{} (seat {})'.format(ward, seat_numbers[ward])
name = councillor.xpath('./td')[1].text_content()
url = councillor.xpath('./td/a')[0].attrib['href']
p = Person(primary_org='legislature', name=name, district=district, role='Councillor')
p.add_source(COUNCIL_PAGE)
p.add_source(url)
page = self.lxmlize(url)
content = page.xpath('//div[@id="centre_content"]')[0]
email = self.get_email(content)
p.add_contact('email', email)
p.add_contact('voice', self.get_phone(content, area_codes=[226, 519]), 'legislature')
p.image = page.xpath('string(//div[@id="centre_content"]//img/@src)') # can be empty
if len(page.xpath('//div[@id="centre_content"]//a')) > 2:
p.add_link(page.xpath('//div[@id="centre_content"]//a')[-1].attrib['href'])
yield p
def scrape_mayor(self):
mayor_url = 'http://mayor.brantford.ca/Pages/default.aspx'
page = self.lxmlize(mayor_url)
name = re.findall(r'(?<=Mayor )(.*)(?=\r)', page.xpath('//div[@id="main_content"]/h1/text()')[0])[0]
p = Person(primary_org='legislature', name=name, district='Brantford', role='Mayor')
p.add_source(mayor_url)
contact_url = page.xpath('.//a[contains(text(),"Contact")]/@href')[0]
page = self.lxmlize(contact_url)
p.add_source(contact_url)
address = ' '.join(page.xpath('//div[@id="main_content"]/p/text()'))
address = re.sub(r'\s{2,}', ' ', address).strip()
email = self.get_email(page)
p.add_contact('address', address, 'legislature')
p.add_contact('email', email)
return p
| mit | Python |
faccefc513e8b20d16e9923dd65c5a17fcaef2d3 | Use app route for status | ayushgoel/flock-message-reporter,ayushgoel/flock-message-reporter,ayushgoel/flock-message-reporter,ayushgoel/flock-message-reporter | src/start.py | src/start.py | from flask import Flask
from flask import Blueprint
from flask import request
from flask import abort
from flask import jsonify
from flask import send_from_directory
import events
import config
app = Flask(__name__)
bp = Blueprint('report-message', __name__)
@app.route("/status")
def status():
return "Up!"
@bp.route("/events", methods=['POST'])
def eventsRoute():
if request.method == 'POST':
name = request.json["name"]
if name == "app.install":
events.handle_app_install(request)
if name == "app.uninstall":
events.handle_app_uninstall(request)
if name == "client.messageAction":
events.handle_message_action(request)
return ""
@bp.route("/UID", methods=['POST'])
def UIDRoute():
if request.method == 'POST':
UID = request.json['UID']
details = events.messageDetailsForUID(UID)
if details:
print details
print "Returning details ", details.__class__
return jsonify(details)
abort(404)
@bp.route("/history", methods=['POST'])
def historyRoute():
if request.method == 'POST':
print request.headers
print request.json
month = request.json['month']
UIDs = events.UIDsForMonth(month)
if UIDs:
print UIDs
print "Returning UIDs ", UIDs.__class__
return jsonify(UIDs)
abort(404)
@bp.route("/configure")
def configureRoute():
return "Configuration called! We don't handle this yet."
@bp.route("/")
def baseRoute():
return send_from_directory('static', 'index.html')
@bp.route('/js/<path:path>')
def sendJS(path):
return send_from_directory('static', path)
if __name__ == "__main__":
print "Starting app"
app.register_blueprint(bp, url_prefix='/report-message')
app.run(host=config.app_config["host"], port=config.app_config["port"])
| from flask import Flask
from flask import Blueprint
from flask import request
from flask import abort
from flask import jsonify
from flask import send_from_directory
import events
import config
app = Flask(__name__)
bp = Blueprint('report-message', __name__)
@bp.route("/status")
def status():
return "Up!"
@bp.route("/events", methods=['POST'])
def eventsRoute():
if request.method == 'POST':
name = request.json["name"]
if name == "app.install":
events.handle_app_install(request)
if name == "app.uninstall":
events.handle_app_uninstall(request)
if name == "client.messageAction":
events.handle_message_action(request)
return ""
@bp.route("/UID", methods=['POST'])
def UIDRoute():
if request.method == 'POST':
UID = request.json['UID']
details = events.messageDetailsForUID(UID)
if details:
print details
print "Returning details ", details.__class__
return jsonify(details)
abort(404)
@bp.route("/history", methods=['POST'])
def historyRoute():
if request.method == 'POST':
print request.headers
print request.json
month = request.json['month']
UIDs = events.UIDsForMonth(month)
if UIDs:
print UIDs
print "Returning UIDs ", UIDs.__class__
return jsonify(UIDs)
abort(404)
@bp.route("/configure")
def configureRoute():
return "Configuration called! We don't handle this yet."
@bp.route("/")
def baseRoute():
return send_from_directory('static', 'index.html')
@bp.route('/js/<path:path>')
def sendJS(path):
return send_from_directory('static', path)
if __name__ == "__main__":
print "Starting app"
app.register_blueprint(bp, url_prefix='/report-message')
app.run(host=config.app_config["host"], port=config.app_config["port"])
| mit | Python |
fd103a3c690af6f8191e82576a9a2db41ce755c2 | Declare extensionServer for generated devtools_extension_api.js | primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs,primiano/blink-gitcs | Source/devtools/scripts/generate_devtools_extension_api.py | Source/devtools/scripts/generate_devtools_extension_api.py | #!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
def write_devtools_extension_api(output, input_names):
output.write("""(function() {
""")
for input_name in input_names:
input = open(input_name, 'r')
output.write(input.read())
output.write("""
var tabId;
var extensionInfo = {};
var extensionServer;
platformExtensionAPI(injectedExtensionAPI("remote-" + window.parent.frames.length));
})();""")
def main(argv):
if len(argv) < 3:
print('usage: %s output_js input_files ...' % argv[0])
return 1
output_name = argv[1]
output = open(output_name, 'w')
write_devtools_extension_api(output, argv[2:])
output.close()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| #!/usr/bin/env python
#
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
def write_devtools_extension_api(output, input_names):
output.write("""(function() {
""")
for input_name in input_names:
input = open(input_name, 'r')
output.write(input.read())
output.write("""
var tabId;
var extensionInfo = {};
platformExtensionAPI(injectedExtensionAPI("remote-" + window.parent.frames.length));
})();""")
def main(argv):
if len(argv) < 3:
print('usage: %s output_js input_files ...' % argv[0])
return 1
output_name = argv[1]
output = open(output_name, 'w')
write_devtools_extension_api(output, argv[2:])
output.close()
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | Python |
ba7333ee7551642a9247a5958e1a2881dd7d4c6a | Remove nick from batch end as well | Heufneutje/txircd | txircd/modules/ircv3/batch.py | txircd/modules/ircv3/batch.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implementer
from typing import Any, Callable, Dict, List, Optional, Tuple
import random, string
@implementer(IPlugin, IModuleData)
class Batch(ModuleData):
name = "Batch"
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("startbatchsend", 10, self.startBatch),
("outgoingmessagetags", 10, self.addBatchTag),
("endbatchsend", 10, self.endBatch),
("capabilitylist", 10, self.addCapability) ]
def load(self) -> None:
if "unloading-batch" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-batch"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("batch")
def unload(self) -> Optional["Deferred"]:
self.ircd.dataCache["unloading-batch"] = True
def fullUnload(self) -> Optional["Deferred"]:
del self.ircd.dataCache["unloading-batch"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("batch")
def addCapability(self, user: "IRCUser", capList: List[str]) -> None:
capList.append("batch")
def startBatch(self, user: "IRCUser", batchName: str, batchType: str, batchParameters: List[Any]) -> None:
if "capabilities" not in user.cache or "batch" not in user.cache["capabilities"]:
return
uniqueReferenceTagParts = [ random.choice(string.ascii_letters) ]
for i in range(2, 10):
uniqueReferenceTagParts.append(random.choice(string.ascii_letters + string.digits))
uniqueReferenceTag = "".join(uniqueReferenceTagParts)
user.cache["currentBatch"] = uniqueReferenceTag
user.sendMessage("BATCH", "+{}".format(uniqueReferenceTag), batchType, *batchParameters, to=None)
def addBatchTag(self, user: "IRCUser", command: str, to: str, tags: Dict[str, Optional[str]]) -> None:
if "currentBatch" in user.cache:
tags["batch"] = user.cache["currentBatch"]
def endBatch(self, user: "IRCUser", batchName: str, batchType: str, batchParameters: List[Any]) -> None:
if "currentBatch" not in user.cache:
return
uniqueReferenceTag = user.cache["currentBatch"]
del user.cache["currentBatch"]
user.sendMessage("BATCH", "-{}".format(uniqueReferenceTag), to=None)
batch = Batch() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implementer
from typing import Any, Callable, Dict, List, Optional, Tuple
import random, string
@implementer(IPlugin, IModuleData)
class Batch(ModuleData):
name = "Batch"
def actions(self) -> List[Tuple[str, int, Callable]]:
return [ ("startbatchsend", 10, self.startBatch),
("outgoingmessagetags", 10, self.addBatchTag),
("endbatchsend", 10, self.endBatch),
("capabilitylist", 10, self.addCapability) ]
def load(self) -> None:
if "unloading-batch" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-batch"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("batch")
def unload(self) -> Optional["Deferred"]:
self.ircd.dataCache["unloading-batch"] = True
def fullUnload(self) -> Optional["Deferred"]:
del self.ircd.dataCache["unloading-batch"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("batch")
def addCapability(self, user: "IRCUser", capList: List[str]) -> None:
capList.append("batch")
def startBatch(self, user: "IRCUser", batchName: str, batchType: str, batchParameters: List[Any]) -> None:
if "capabilities" not in user.cache or "batch" not in user.cache["capabilities"]:
return
uniqueReferenceTagParts = [ random.choice(string.ascii_letters) ]
for i in range(2, 10):
uniqueReferenceTagParts.append(random.choice(string.ascii_letters + string.digits))
uniqueReferenceTag = "".join(uniqueReferenceTagParts)
user.cache["currentBatch"] = uniqueReferenceTag
user.sendMessage("BATCH", "+{}".format(uniqueReferenceTag), batchType, *batchParameters, to=None)
def addBatchTag(self, user: "IRCUser", command: str, to: str, tags: Dict[str, Optional[str]]) -> None:
if "currentBatch" in user.cache:
tags["batch"] = user.cache["currentBatch"]
def endBatch(self, user: "IRCUser", batchName: str, batchType: str, batchParameters: List[Any]) -> None:
if "currentBatch" not in user.cache:
return
uniqueReferenceTag = user.cache["currentBatch"]
del user.cache["currentBatch"]
user.sendMessage("BATCH", "-{}".format(uniqueReferenceTag))
batch = Batch() | bsd-3-clause | Python |
83099dad7ec753946b63e9bc936fa670067ba39a | Fix parent reference in transform (incorrectly referred to body as parent) | PyCQA/astroid | astroid/brain/brain_attrs.py | astroid/brain/brain_attrs.py | # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Astroid hook for the attrs library
Without this hook pylint reports unsupported-assignment-operation
for atrrs classes
"""
import astroid
from astroid import MANAGER
ATTR_IB = 'attr.ib'
def is_decorated_with_attrs(
node, decorator_names=('attr.s', 'attr.attrs', 'attr.attributes')):
"""Return True if a decorated node has
an attr decorator applied."""
if not node.decorators:
return False
for decorator_attribute in node.decorators.nodes:
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
decorator_attribute = decorator_attribute.func
if decorator_attribute.as_string() in decorator_names:
return True
return False
def attr_attributes_transform(node):
"""Given that the ClassNode has an attr decorator,
rewrite class attributes as instance attributes
"""
# Astroid can't infer this attribute properly
# Prevents https://github.com/PyCQA/pylint/issues/1884
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
for cdefbodynode in node.body:
if not isinstance(cdefbodynode, astroid.Assign):
continue
if isinstance(cdefbodynode.value, astroid.Call):
if cdefbodynode.value.func.as_string() != ATTR_IB:
continue
for target in cdefbodynode.targets:
rhs_node = astroid.Unknown(
lineno=cdefbodynode.lineno,
col_offset=cdefbodynode.col_offset,
parent=cdefbodynode
)
node.locals[target.name] = [rhs_node]
MANAGER.register_transform(
astroid.Class,
attr_attributes_transform,
is_decorated_with_attrs)
| # Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Astroid hook for the attrs library
Without this hook pylint reports unsupported-assignment-operation
for atrrs classes
"""
import astroid
from astroid import MANAGER
ATTR_IB = 'attr.ib'
def is_decorated_with_attrs(
node, decorator_names=('attr.s', 'attr.attrs', 'attr.attributes')):
"""Return True if a decorated node has
an attr decorator applied."""
if not node.decorators:
return False
for decorator_attribute in node.decorators.nodes:
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
decorator_attribute = decorator_attribute.func
if decorator_attribute.as_string() in decorator_names:
return True
return False
def attr_attributes_transform(node):
"""Given that the ClassNode has an attr decorator,
rewrite class attributes as instance attributes
"""
# Astroid can't infer this attribute properly
# Prevents https://github.com/PyCQA/pylint/issues/1884
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node.body)]
for cdefbodynode in node.body:
if not isinstance(cdefbodynode, astroid.Assign):
continue
if isinstance(cdefbodynode.value, astroid.Call):
if cdefbodynode.value.func.as_string() != ATTR_IB:
continue
for target in cdefbodynode.targets:
rhs_node = astroid.Unknown(
lineno=cdefbodynode.lineno,
col_offset=cdefbodynode.col_offset,
parent=cdefbodynode
)
node.locals[target.name] = [rhs_node]
MANAGER.register_transform(
astroid.Class,
attr_attributes_transform,
is_decorated_with_attrs)
| lgpl-2.1 | Python |
b4ae2046eb938dc7283af3faa4945a2c4b8ef57d | Make some moderator columns not nullable | Floens/uchan,Floens/uchan,Floens/uchan,Floens/uchan,Floens/uchan | uchan/lib/models/moderator.py | uchan/lib/models/moderator.py | from sqlalchemy import Column, String, LargeBinary
from sqlalchemy import Integer
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import relationship, deferred
from uchan.lib.database import ModelBase
from uchan.lib.models import MutableList, BoardModerator
def create_board_for_proxy(board):
board_moderator = BoardModerator()
board_moderator.board = board
board_moderator.roles = []
return board_moderator
class Moderator(ModelBase):
__tablename__ = 'moderator'
id = Column(Integer(), primary_key=True)
username = Column(String(), nullable=False, unique=True)
password = deferred(Column(LargeBinary(), nullable=False))
roles = Column(MutableList.as_mutable(ARRAY(String)), nullable=False, index=True)
# Bans given by this moderator
given_bans = relationship('Ban', backref='moderator')
posts = relationship('Post', backref='moderator')
boards = association_proxy('board_moderators', 'board', creator=create_board_for_proxy)
logs = relationship('ModeratorLog', backref='moderator')
| from sqlalchemy import Column, String, LargeBinary
from sqlalchemy import Integer
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import relationship, deferred
from uchan.lib.database import ModelBase
from uchan.lib.models import MutableList, BoardModerator
def create_board_for_proxy(board):
board_moderator = BoardModerator()
board_moderator.board = board
board_moderator.roles = []
return board_moderator
class Moderator(ModelBase):
__tablename__ = 'moderator'
id = Column(Integer(), primary_key=True)
username = Column(String(), unique=True)
password = deferred(Column(LargeBinary()))
roles = Column(MutableList.as_mutable(ARRAY(String)), index=True)
# Bans given by this moderator
given_bans = relationship('Ban', backref='moderator')
posts = relationship('Post', backref='moderator')
boards = association_proxy('board_moderators', 'board', creator=create_board_for_proxy)
| mit | Python |
9351dbe5f0ba1a445dbfc1f8802d4bad6e2fb5e7 | change v1 name around code | kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh | cfgov/v1/wagtail_hooks.py | cfgov/v1/wagtail_hooks.py | from django.http import Http404
from django.conf import settings
from v1.models import CFGOVPage
from wagtail.wagtailcore import hooks
@hooks.register('after_create_page')
@hooks.register('after_edit_page')
def share_the_page(request, page):
parent_page = page.get_ancestors(inclusive=False).reverse()[0].specific
parent_page_perms = parent_page.permissions_for_user(request.user)
is_publishing = bool(request.POST.get('action-publish')) and parent_page_perms.can_publish()
is_sharing = bool(request.POST.get('action-share')) and parent_page_perms.can_publish()
if is_sharing or is_publishing:
if isinstance(page, CFGOVPage):
page.shared = True
else:
page.shared = False
page.save()
revision = page.save_revision()
if is_publishing:
revision.publish()
@hooks.register('before_serve_page')
def check_request_site(page, request, serve_args, serve_kwargs):
if request.site.hostname == settings.STAGING_HOSTNAME:
if isinstance(page, CFGOVPage):
if not page.shared:
raise Http404
| from django.http import Http404
from django.conf import settings
from v1.models import V1Page
from wagtail.wagtailcore import hooks
@hooks.register('after_create_page')
@hooks.register('after_edit_page')
def share_the_page(request, page):
parent_page = page.get_ancestors(inclusive=False).reverse()[0].specific
parent_page_perms = parent_page.permissions_for_user(request.user)
is_publishing = bool(request.POST.get('action-publish')) and parent_page_perms.can_publish()
is_sharing = bool(request.POST.get('action-share')) and parent_page_perms.can_publish()
if is_sharing or is_publishing:
if isinstance(page, V1Page):
page.shared = True
else:
page.shared = False
page.save()
revision = page.save_revision()
if is_publishing:
revision.publish()
@hooks.register('before_serve_page')
def check_request_site(page, request, serve_args, serve_kwargs):
if request.site.hostname == settings.STAGING_HOSTNAME:
if isinstance(page, V1Page):
if not page.shared:
raise Http404
| cc0-1.0 | Python |
cd34d9ff8ae7cc2bf65cdb759f60e21ecc39c104 | Correct problem with missing files (hiprec) | dbeyer/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,IljaZakharov/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,dbeyer/benchexec,dbeyer/benchexec,IljaZakharov/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec | benchexec/tools/hiprec.py | benchexec/tools/hiprec.py | #!/usr/bin/env python
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for HIPrec.
"""
REQUIRED_PATHS = [
"fixcalc",
"hiprec",
"hiprec_run.sh",
"oc",
"prelude.ss",
"z3-4.3.2"
]
def executable(self):
executable = util.find_executable('hiprec')
return executable
def name(self):
return 'HIPrec'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
return [executable] + options + tasks + ['--debug']
def determine_result(self, returncode, returnsignal, output, isTimeout):
status = result.RESULT_UNKNOWN
for line in output:
if line.startswith('Verification result:('):
line = line[21:].strip()
if line.startswith('TRUE'):
status = result.RESULT_TRUE_PROP
elif line.startswith('FALSE'):
status = result.RESULT_FALSE_REACH
else:
status = result.RESULT_UNKNOWN
return status
| #!/usr/bin/env python
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for HIPrec.
"""
REQUIRED_PATHS = [
"fixcalc",
"hiprec",
"hiprec_run.sh",
"oc",
"prelude.ss",
"z3"
]
def executable(self):
executable = util.find_executable('hiprec')
return executable
def name(self):
return 'HIPrec'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
return [executable] + options + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
status = result.RESULT_UNKNOWN
for line in output:
if line.startswith('Verification result:('):
line = line[21:].strip()
if line.startswith('TRUE'):
status = result.RESULT_TRUE_PROP
elif line.startswith('FALSE'):
status = result.RESULT_FALSE_REACH
else:
status = result.RESULT_UNKNOWN
return status
| apache-2.0 | Python |
477d65ce4c63ae025c1649f034b27cd081f15cef | fix to check if directory savegames exists and if not create the directory. And it is the 300th commit!! :) :) | HRODEV/Frequency | Frequency/Main.py | Frequency/Main.py | import os
import pygame
from sys import exit
from Game import Game
from Helpers.EventHelpers import EventExist
pygame.init()
clock = pygame.time.Clock()
# make necessary directory
if not os.path.exists("./savegames/"):
os.makedirs("./savegames/")
def Main():
pygame.display.init()
# Music
pygame.mixer.init()
pygame.mixer.music.load('Resources/menu.mp3')
pygame.mixer.music.play()
game = Game()
while True:
events = pygame.event.get()
if EventExist(events, pygame.QUIT):
pygame.quit()
exit()
game = game.Update(events)
game.Draw()
clock.tick()
pygame.display.set_caption('Frequency | FPS: %i' % int(clock.get_fps() + 100))
pygame.display.flip()
Main()
| import pygame
from sys import exit
from Game import Game
from Helpers.EventHelpers import EventExist
pygame.init()
clock = pygame.time.Clock()
def Main():
pygame.display.init()
# Music
pygame.mixer.init()
pygame.mixer.music.load('Resources/menu.mp3')
pygame.mixer.music.play()
game = Game()
while True:
events = pygame.event.get()
if EventExist(events, pygame.QUIT):
pygame.quit()
exit()
game = game.Update(events)
game.Draw()
clock.tick()
pygame.display.set_caption('Frequency | FPS: %i' % int(clock.get_fps() + 100))
pygame.display.flip()
Main()
| mit | Python |
20790f6c9e5fde727fcb0e9c76061cdc10c0f5c6 | remove container if param available | pbelmann/command-line-interface,pbelmann/command-line-interface,bioboxes/command-line-interface,bioboxes/command-line-interface,michaelbarton/command-line-interface,michaelbarton/command-line-interface | biobox_cli/command/run.py | biobox_cli/command/run.py | """
biobox run - Run a biobox Docker image with input parameters
Usage:
biobox run <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
Available Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import biobox_cli.util as util
def run(argv):
opts = util.parse_docopt(__doc__, argv, True)
module = util.select_module("biobox_type", opts["<biobox_type>"])
ctnr = module.run(argv)
if not '--no-rm-container' in argv:
module.remove(ctnr) | """
biobox run - Run a biobox Docker image with input parameters
Usage:
biobox run <biobox_type> <image> [<args>...]
Options:
-h, --help Show this screen.
Available Biobox types:
short_read_assembler Assemble short reads into contigs
"""
import biobox_cli.util as util
def run(argv):
opts = util.parse_docopt(__doc__, argv, True)
util.select_module("biobox_type", opts["<biobox_type>"]).run(argv)
| mit | Python |
e359f1486236b420b12e41c00bb09d95ee1afa79 | Patch for example "test" | zapion/combo-runner,zapion/combo-runner,Mozilla-TWQA/combo-runner,Mozilla-TWQA/combo-runner | examples/test/test_action_runner.py | examples/test/test_action_runner.py | from comborunner import action_decorator
from comborunner.base_action_runner import BaseActionRunner
class TestActionRunner(BaseActionRunner):
action = action_decorator.action
@action
def do_test_pre(self, action=False):
if action:
self.pre_commands.append('rm -rf .env; virtualenv .env; source .env/bin/activate; pip install mozdownload')
self.pre_commands.append('mozdownload -h')
self.pre_commands.append('export TEAM=MOZTWQA')
self.pre_commands.append('./test_pre.sh')
return self
# TODO: for testing
@action
def do_test(self, action=False):
if action:
self.commands.append('./test.sh')
return self
# TODO: for testing
@action
def do_test_post(self, action=False):
if action:
self.post_commands.append('mozdownload -h')
self.post_commands.append('./test_post.sh')
return self
| import action_decorator
from base_action_runner import BaseActionRunner
class TestActionRunner(BaseActionRunner):
action = action_decorator.action
@action
def do_test_pre(self, action=False):
if action:
self.pre_commands.append('rm -rf .env; virtualenv .env; source .env/bin/activate; pip install mozdownload')
self.pre_commands.append('mozdownload -h')
self.pre_commands.append('export TEAM=MOZTWQA')
self.pre_commands.append('./test_pre.sh')
return self
# TODO: for testing
@action
def do_test(self, action=False):
if action:
self.commands.append('./test.sh')
return self
# TODO: for testing
@action
def do_test_post(self, action=False):
if action:
self.post_commands.append('mozdownload -h')
self.post_commands.append('./test_post.sh')
return self
| mpl-2.0 | Python |
12b1ca0f976be91ff48ba8e51f0679314df7212b | add a missing piece of single-command | veloutin/papas,veloutin/papas | lib6ko/protocols/console.py | lib6ko/protocols/console.py | import re
import logging
import pexpect
from cStringIO import StringIO
from lib6ko import parameters as _P
from lib6ko.protocol import Protocol
from lib6ko.architecture import Architecture
_LOG = logging.getLogger("protocols.console")
class ConsoleProtocol(Protocol):
""" Console Protocol """
def __init__(self, parameters):
super(ConsoleProtocol, self).__init__(parameters)
self.arch = Architecture()
self.child = None
self.EXIT_CMD = self.require_param(
_P.CONSOLE_EXIT,
default="exit",
)
self.priv_password = None
@property
def allow_output(self):
return self.arch.console.allow_output
@allow_output.setter
def allow_output(self, value):
self.arch.console.allow_output = value
@property
def child(self):
return self.arch.console.child
@child.setter
def child(self, value):
self.arch.console.child = value
@child.deleter
def child(self):
del self.arch.console.child
@property
def connected(self):
return self.child is not None
def disconnect(self):
if not self.connected:
_LOG.warn("Already Disconnected")
return
_LOG.info("Disconnecting")
self.arch.console.prompt()
#Do not use execute_command as it will raise EOF
self.child.sendline(self.EXIT_CMD)
index = self.child.expect([
self.arch.console.CLOSED,
pexpect.EOF,
pexpect.TIMEOUT,
], timeout = 15 )
self.child.close()
self.child = None
def prompt(self):
self.arch.console.prompt(consume=True)
def execute_command(self, text, expect_noecho=False):
self.arch.console.execute_command(text, expect_noecho)
return self.arch.console.consume_output()
def execute_text(self, text, expect_noecho=False):
return "".join((self.execute_command(line) for line in text.splitlines()))
def get_full_output(self):
return self.arch.console.output
def send_if_no_echo(self, text):
self.arch.console.send_password(text)
self.arch.console.prompt(consume=False)
| import re
import logging
import pexpect
from cStringIO import StringIO
from lib6ko import parameters as _P
from lib6ko.protocol import Protocol
from lib6ko.architecture import Architecture
_LOG = logging.getLogger("protocols.console")
class ConsoleProtocol(Protocol):
""" Console Protocol """
def __init__(self, parameters):
super(ConsoleProtocol, self).__init__(parameters)
self.arch = Architecture()
self.child = None
self.EXIT_CMD = self.require_param(
_P.CONSOLE_EXIT,
default="exit",
)
self.priv_password = None
@property
def allow_output(self):
return self.arch.console.allow_output
@allow_output.setter
def allow_output(self, value):
self.arch.console.allow_output = value
@property
def child(self):
return self.arch.console.child
@child.setter
def child(self, value):
self.arch.console.child = value
@child.deleter
def child(self):
del self.arch.console.child
@property
def connected(self):
return self.child is not None
def disconnect(self):
if not self.connected:
_LOG.warn("Already Disconnected")
return
_LOG.info("Disconnecting")
self.arch.console.prompt()
#Do not use execute_command as it will raise EOF
self.child.sendline(self.EXIT_CMD)
index = self.child.expect([
self.arch.console.CLOSED,
pexpect.EOF,
pexpect.TIMEOUT,
], timeout = 15 )
self.child.close()
self.child = None
def prompt(self):
self.arch.console.prompt(consume=True)
def execute_text(self, text, expect_noecho=False):
for line in text.splitlines():
self.arch.console.execute_command(line, expect_noecho)
return self.arch.console.consume_output()
def get_full_output(self):
return self.arch.console.output
def send_if_no_echo(self, text):
self.arch.console.send_password(text)
self.arch.console.prompt(consume=False)
| agpl-3.0 | Python |
8ddf791b0f7960da089c61b63f996c375ea80ac0 | Fix 0-byte dupe file bug | eldarion/django-chunked-uploads,IRI-Research/django-chunked-uploads,IRI-Research/django-chunked-uploads,eldarion/django-chunked-uploads,IRI-Research/django-chunked-uploads | chunked_uploads/models.py | chunked_uploads/models.py | import datetime
import os
from django.conf import settings
from django.core.files.base import ContentFile
from django.db import models
from django.contrib.auth.models import User
from uuidfield import UUIDField
STORAGE_CLASS = getattr(settings, "CHUNKED_UPLOADS_STORAGE_CLASS", None)
if STORAGE_CLASS:
storage = STORAGE_CLASS()
else:
storage = None
def storage_path(obj, filename):
if isinstance(obj, Upload):
return os.path.join(obj.path_prefix(), filename).replace("/", "-")
# @@@ this replacement is a hack to work around bug in django-storages cloud files backend
# @@@ is this still necessary with cumulus?
return os.path.join(obj.upload.path_prefix(), "chunk")
class Upload(models.Model):
STATE_UPLOADING = 1
STATE_COMPLETE = 2
STATE_CHOICES = [
(STATE_UPLOADING, "Uploading"),
(STATE_COMPLETE, "Complete")
]
user = models.ForeignKey(User, related_name="uploads")
uuid = UUIDField(auto=True, unique=True)
filename = models.CharField(max_length=250)
filesize = models.IntegerField()
upload = models.FileField(storage=storage, upload_to=storage_path)
state = models.IntegerField(choices=STATE_CHOICES, default=STATE_UPLOADING)
created_at = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.upload
def path_prefix(self):
s = str(self.uuid)
return os.path.join(s[:2], s[2:4], s[4:6], s)
def stitch_chunks(self):
fname = os.path.join(settings.MEDIA_ROOT, "tmp-" + storage_path(self, self.filename))
f = open(fname, "wb")
for chunk in self.chunks.all().order_by("pk"):
f.write(chunk.chunk.read())
f.close()
f = ContentFile(open(f.name, "rb").read())
self.upload.save(self.filename, f)
self.state = Upload.STATE_COMPLETE
self.save()
f.close()
os.remove(fname)
def uploaded_size(self):
return self.chunks.all().aggregate(models.Sum("chunk_size")).get("chunk_size__sum")
class Chunk(models.Model):
upload = models.ForeignKey(Upload, related_name="chunks")
chunk = models.FileField(upload_to=storage_path)
chunk_size = models.IntegerField()
created_at = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.chunk
| import datetime
import os
from django.conf import settings
from django.core.files.uploadedfile import UploadedFile
from django.db import models
from django.contrib.auth.models import User
from uuidfield import UUIDField
STORAGE_CLASS = getattr(settings, "CHUNKED_UPLOADS_STORAGE_CLASS", None)
if STORAGE_CLASS:
storage = STORAGE_CLASS()
else:
storage = None
def storage_path(obj, filename):
if isinstance(obj, Upload):
return os.path.join(obj.path_prefix(), filename).replace("/", "-")
# @@@ this replacement is a hack to work around bug in django-storages cloud files backend
# @@@ is this still necessary with cumulus?
return os.path.join(obj.upload.path_prefix(), "chunk")
class Upload(models.Model):
STATE_UPLOADING = 1
STATE_COMPLETE = 2
STATE_CHOICES = [
(STATE_UPLOADING, "Uploading"),
(STATE_COMPLETE, "Complete")
]
user = models.ForeignKey(User, related_name="uploads")
uuid = UUIDField(auto=True, unique=True)
filename = models.CharField(max_length=250)
filesize = models.IntegerField()
upload = models.FileField(storage=storage, upload_to=storage_path)
state = models.IntegerField(choices=STATE_CHOICES, default=STATE_UPLOADING)
created_at = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.upload
def path_prefix(self):
s = str(self.uuid)
return os.path.join(s[:2], s[2:4], s[4:6], s)
def stitch_chunks(self):
f = open(os.path.join(settings.MEDIA_ROOT, storage_path(self, self.filename)), "wb")
for chunk in self.chunks.all().order_by("pk"):
f.write(chunk.chunk.read())
f.close()
f = UploadedFile(open(f.name, "rb"))
self.upload.save(self.filename, f)
self.state = Upload.STATE_COMPLETE
self.save()
f.close()
def uploaded_size(self):
return self.chunks.all().aggregate(models.Sum("chunk_size")).get("chunk_size__sum")
class Chunk(models.Model):
upload = models.ForeignKey(Upload, related_name="chunks")
chunk = models.FileField(upload_to=storage_path)
chunk_size = models.IntegerField()
created_at = models.DateTimeField(default=datetime.datetime.now)
def __unicode__(self):
return self.chunk
| bsd-3-clause | Python |
36888cbc7916d09370f057e03338c81bd640a536 | Add sumcheck verification for train, valid and test subsets | dmitriy-serdyuk/fuel,mjwillson/fuel,bouthilx/fuel,chrishokamp/fuel,lamblin/fuel,markusnagel/fuel,bouthilx/fuel,dwf/fuel,udibr/fuel,dhruvparamhans/fuel,rizar/fuel,orhanf/fuel,chrishokamp/fuel,aalmah/fuel,rodrigob/fuel,aalmah/fuel,rodrigob/fuel,dmitriy-serdyuk/fuel,ejls/fuel,codeaudit/fuel,laurent-dinh/fuel,rizar/fuel,dribnet/fuel,mila-udem/fuel,janchorowski/fuel,markusnagel/fuel,glewis17/fuel,harmdevries89/fuel,hantek/fuel,janchorowski/fuel,EderSantana/fuel,EderSantana/fuel,vdumoulin/fuel,dribnet/fuel,hantek/fuel,dwf/fuel,jbornschein/fuel,laurent-dinh/fuel,glewis17/fuel,dhruvparamhans/fuel,orhanf/fuel,mjwillson/fuel,lamblin/fuel,vdumoulin/fuel,codeaudit/fuel,capybaralet/fuel,mila-udem/fuel,udibr/fuel,harmdevries89/fuel,capybaralet/fuel,ejls/fuel,jbornschein/fuel | tests/test_binarized_mnist.py | tests/test_binarized_mnist.py | import hashlib
import os
from numpy.testing import assert_raises
from fuel import config
from fuel.datasets import BinarizedMNIST
from tests import skip_if_not_available
def test_binarized_mnist_train():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('train', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert hashlib.md5(data).hexdigest() == '0922fefc9a9d097e3b086b89107fafce'
assert dataset.num_examples == 50000
dataset.close(handle)
def test_binarized_mnist_valid():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('valid', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert hashlib.md5(data).hexdigest() == '65e8099613162b3110a7618037011617'
assert dataset.num_examples == 10000
dataset.close(handle)
def test_binarized_mnist_test():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('test', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert hashlib.md5(data).hexdigest() == '0fa539ed8cb008880a61be77f744f06a'
assert dataset.num_examples == 10000
dataset.close(handle)
def test_binarized_mnist_invalid_split():
assert_raises(ValueError, BinarizedMNIST, 'dummy')
def test_binarized_mnist_data_path():
assert BinarizedMNIST('train').data_path == os.path.join(
config.data_path, 'binarized_mnist.hdf5')
| import os
from numpy.testing import assert_raises
from fuel import config
from fuel.datasets import BinarizedMNIST
from tests import skip_if_not_available
def test_binarized_mnist_train():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('train', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert dataset.num_examples == 50000
dataset.close(handle)
def test_binarized_mnist_valid():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('valid', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert dataset.num_examples == 10000
dataset.close(handle)
def test_binarized_mnist_test():
skip_if_not_available(datasets=['binarized_mnist.hdf5'])
dataset = BinarizedMNIST('test', load_in_memory=False)
handle = dataset.open()
data, = dataset.get_data(handle, slice(0, 10))
assert data.dtype == 'uint8'
assert data.shape == (10, 1, 28, 28)
assert dataset.num_examples == 10000
dataset.close(handle)
def test_binarized_mnist_invalid_split():
assert_raises(ValueError, BinarizedMNIST, 'dummy')
def test_binarized_mnist_data_path():
assert BinarizedMNIST('train').data_path == os.path.join(
config.data_path, 'binarized_mnist.hdf5')
| mit | Python |
22f63a8fa80eb83982ddc46944ca49599646ed20 | Bring test coverage of RollingCounter to 100% | ryansb/disq,ryansb/disq | tests/test_rolling_counter.py | tests/test_rolling_counter.py | # Copyright 2015 Ryan Brown <sb@ryansb.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from disq.rolling_counter import RollingCounter
class TestRollingCounter(object):
def test_rank(self):
rc = RollingCounter()
for _ in range(100):
rc.add('foo')
for _ in range(10):
rc.add('bar')
for _ in range(40):
rc.add('baz')
for _ in range(60):
rc.add('quux')
assert rc.max() == 'foo'
assert rc.min() == 'bar'
assert [x[0] for x in rc.ranked()] == ['bar', 'baz', 'quux', 'foo']
def test_expiration(self):
rc = RollingCounter(ttl_secs=0.5)
for _ in range(10):
rc.add('foo')
for _ in range(5):
rc.add('bar')
assert len(rc.keys()) == 2
assert rc.count('foo') == 10
time.sleep(1)
assert len(rc.keys()) == 0
assert rc.max() is None
assert rc.min() is None
assert not rc.ranked()
assert rc.count('foo') == 0
| # Copyright 2015 Ryan Brown <sb@ryansb.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import time
from disq.rolling_counter import RollingCounter
class TestRollingCounter(object):
def test_rank(self):
rc = RollingCounter()
for _ in range(100):
rc.add('foo')
for _ in range(10):
rc.add('bar')
for _ in range(40):
rc.add('baz')
for _ in range(60):
rc.add('quux')
assert rc.max() == 'foo'
assert rc.min() == 'bar'
def test_expiration(self):
rc = RollingCounter(ttl_secs=0.5)
for _ in range(10):
rc.add('foo')
for _ in range(5):
rc.add('bar')
assert len(rc.keys()) == 2
time.sleep(1)
assert len(rc.keys()) == 0
| apache-2.0 | Python |
d5549b384e10839d1112de48a9a016ee1da79a2f | Fix self versioning test. | bhodorog/pytest-vts | tests/test_self_versioning.py | tests/test_self_versioning.py | import shlex
import subprocess
import pytest
import pkg_resources
import pytest_vts
@pytest.fixture
def git_describe():
cmd = shlex.split("git describe --tags --long --match='v*'")
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
described = out.decode().strip("\n")
reversed_described = described[::-1]
rev_gitref, rev_no_of_commits, rev_tag = reversed_described.split("-", 2)
tag = rev_tag[::-1]
no_of_commits = rev_no_of_commits[::-1]
return tag, no_of_commits
def test_version_needs_bumping(git_describe):
last_prod_tag, no_of_commits = git_describe
pv = pkg_resources.parse_version
err_msg_prefix = "Bump the version following PEP440 rules"
err_msg = ("{}: bondi.__version__({}) should be > than last prod tag "
"({})").format(err_msg_prefix,
pytest_vts.__version__, last_prod_tag)
version_greater_than_tag = pv(pytest_vts.__version__) > pv(last_prod_tag)
no_newer_commits_over_tag = no_of_commits == "0"
assert (version_greater_than_tag or no_newer_commits_over_tag), "{}: {}".format(err_msg, "")
| import shlex
import subprocess
import pytest
import pkg_resources
import pytest_vts
@pytest.fixture
def git_describe():
cmd = shlex.split("git describe --tags --long --match='v*'")
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
described = out.decode().strip("\n")
reversed_described = described[::-1]
rev_gitref, rev_no_of_commits, rev_tag = reversed_described.split("-", 2)
tag = rev_tag[::-1]
no_of_commits = rev_no_of_commits[::-1]
return tag, no_of_commits
def test_version_needs_bumping(git_describe):
last_prod_tag, no_of_commits = git_describe
pv = pkg_resources.parse_version
err_msg_prefix = "Bump the version following PEP440 rules"
err_msg = ("{}: bondi.__version__({}) should be > than last prod tag "
"({})").format(err_msg_prefix,
pytest_vts.__version__, last_prod_tag)
version_greater_than_tag = pv(pytest_vts.__version__) > pv(last_prod_tag)
no_newer_commits_over_tag = no_of_commits == "0"
assert (version_greater_than_tag or no_newer_commits_over_tag,
"{}: {}".format(err_msg, ""))
| mit | Python |
eab5cf884aeb09fff0799f5dfa70f6995be30627 | Reorganize rstate so lint can infer the return value easier. | mwhoffman/mwhutils | mwhutils/random/random.py | mwhutils/random/random.py | """
Sample from low-discrepancy sequences.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# local imports
from ._sobol import i4_sobol_generate
# global imports
import numpy as np
# exported symbols
__all__ = ['rstate', 'uniform', 'latin', 'sobol']
def rstate(rng=None):
"""
Return a numpy RandomState object. If an integer value is given then a new
RandomState will be returned with this seed. If None is given then the
global numpy state will be returned. If an already instantiated state is
given this will be passed back.
"""
if rng is None:
return np.random.mtrand._rand
elif isinstance(rng, int):
return np.random.RandomState(rng)
raise ValueError('unknown seed given to rstate')
def uniform(bounds, n, rng=None):
"""
Sample n points uniformly at random from the specified region, given by
a list of [(lo,hi), ..] bounds in each dimension.
"""
# if given a seed or an instantiated RandomState make sure that we use
# it here, but also within the sample_spectrum code.
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random values.
d = len(bounds)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * rng.rand(n, d)
return X
def latin(bounds, n, rng=None):
"""
Sample n points from a latin hypercube within the specified region, given
by a list of [(lo,hi), ..] bounds in each dimension.
"""
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random samples.
d = len(bounds)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * (np.arange(n)[:, None] + rng.rand(n, d)) / n
# shuffle each dimension.
for i in xrange(d):
X[:, i] = rng.permutation(X[:, i])
return X
def sobol(bounds, n, rng=None):
"""
Sample n points from a sobol sequence within the specified region, given by
a list of [(lo,hi), ..] bounds in each dimension.
"""
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random samples.
d = len(bounds)
skip = rng.randint(100, 200)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * i4_sobol_generate(d, n, skip).T
return X
| """
Sample from low-discrepancy sequences.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# local imports
from ._sobol import i4_sobol_generate
# global imports
import numpy as np
# exported symbols
__all__ = ['rstate', 'uniform', 'latin', 'sobol']
def rstate(rng=None):
"""
Return a numpy RandomState object. If an integer value is given then a new
RandomState will be returned with this seed. If None is given then the
global numpy state will be returned. If an already instantiated state is
given this will be passed back.
"""
if rng is None:
rng = np.random.mtrand._rand
elif isinstance(rng, int):
rng = np.random.RandomState(rng)
elif not isinstance(rng, np.random.RandomState):
raise ValueError('unknown seed given to rstate')
return rng
def uniform(bounds, n, rng=None):
"""
Sample n points uniformly at random from the specified region, given by
a list of [(lo,hi), ..] bounds in each dimension.
"""
# if given a seed or an instantiated RandomState make sure that we use
# it here, but also within the sample_spectrum code.
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random values.
d = len(bounds)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * rng.rand(n, d)
return X
def latin(bounds, n, rng=None):
"""
Sample n points from a latin hypercube within the specified region, given
by a list of [(lo,hi), ..] bounds in each dimension.
"""
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random samples.
d = len(bounds)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * (np.arange(n)[:, None] + rng.rand(n, d)) / n
# shuffle each dimension.
for i in xrange(d):
X[:, i] = rng.permutation(X[:, i])
return X
def sobol(bounds, n, rng=None):
"""
Sample n points from a sobol sequence within the specified region, given by
a list of [(lo,hi), ..] bounds in each dimension.
"""
rng = rstate(rng)
bounds = np.array(bounds, ndmin=2, copy=False)
# generate the random samples.
d = len(bounds)
skip = rng.randint(100, 200)
w = bounds[:, 1] - bounds[:, 0]
X = bounds[:, 0] + w * i4_sobol_generate(d, n, skip).T
return X
| bsd-2-clause | Python |
d8fd39a9dc4cc48e73a9f1d63972327431b3f05d | Add restore messages to gerrit auto-expire script | dhiana/puppet-gerrit,open-switch/infra_puppet-gerrit,dhiana/puppet-gerrit,open-switch/infra_puppet-gerrit,open-switch/infra_puppet-gerrit,dhiana/puppet-gerrit | files/scripts/expire_old_reviews.py | files/scripts/expire_old_reviews.py | #!/usr/bin/env python
# Copyright (c) 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is designed to expire old code reviews that have not been touched
# using the following rules:
# 1. if open and no activity in 2 weeks, expire
# 2. if negative comment and no activity in 1 week, expire
import os
import paramiko
import json
import logging
GERRIT_USER = os.environ.get('GERRIT_USER', 'launchpadsync')
GERRIT_SSH_KEY = os.environ.get('GERRIT_SSH_KEY',
'/home/gerrit2/.ssh/launchpadsync_rsa')
logging.basicConfig(format='%(asctime)-6s: %(name)s - %(levelname)s - %(message)s', filename='/var/log/gerrit/expire_reviews.log')
logger= logging.getLogger('expire_reviews')
logger.setLevel(logging.INFO)
logger.info('Starting expire reviews')
logger.info('Connecting to Gerrit')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect('localhost', username=GERRIT_USER, key_filename=GERRIT_SSH_KEY, port=29418)
def expire_patch_set(patch_id, patch_subject, has_negative):
if has_negative:
message= 'code review expired after 1 week of no activity after a negative review, it can be restored using the \`Restore Change\` button above'
else:
message= 'code review expired after 2 weeks of no activity, it can be restored using the \`Restore Change\` button above'
command='gerrit review --abandon --message="{0}" {1}'.format(message, patch_id)
logger.info('Expiring: %s - %s: %s', patch_id, patch_subject, message)
stdin, stdout, stderr = ssh.exec_command(command)
if stdout.channel.recv_exit_status() != 0:
logger.error(stderr.read())
# Query all open with no activity for 2 weeks
logger.info('Searching no activity for 2 weeks')
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --format JSON status:open age:2w')
for line in stdout:
row= json.loads(line)
if not row.has_key('rowCount'):
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], False)
# Query all reviewed with no activity for 1 week
logger.info('Searching no activity on negative review for 1 week')
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --all-approvals --format JSON status:reviewed age:1w')
for line in stdout:
row= json.loads(line)
if not row.has_key('rowCount'):
# Search for negative approvals
for approval in row['currentPatchSet']['approvals']:
if approval['value'] == '-1':
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], True)
break
logger.info('End expire review')
| #!/usr/bin/env python
# Copyright (c) 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script is designed to expire old code reviews that have not been touched
# using the following rules:
# 1. if open and no activity in 2 weeks, expire
# 2. if negative comment and no activity in 1 week, expire
import os
import paramiko
import json
import logging
GERRIT_USER = os.environ.get('GERRIT_USER', 'launchpadsync')
GERRIT_SSH_KEY = os.environ.get('GERRIT_SSH_KEY',
'/home/gerrit2/.ssh/launchpadsync_rsa')
logging.basicConfig(format='%(asctime)-6s: %(name)s - %(levelname)s - %(message)s', filename='/var/log/gerrit/expire_reviews.log')
logger= logging.getLogger('expire_reviews')
logger.setLevel(logging.INFO)
logger.info('Starting expire reviews')
logger.info('Connecting to Gerrit')
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect('localhost', username=GERRIT_USER, key_filename=GERRIT_SSH_KEY, port=29418)
def expire_patch_set(patch_id, patch_subject, has_negative):
if has_negative:
message= 'code review expired after 1 week of no activity after a negative review'
else:
message= 'code review expired after 2 weeks of no activity'
command='gerrit review --abandon --message="{0}" {1}'.format(message, patch_id)
logger.info('Expiring: %s - %s: %s', patch_id, patch_subject, message)
stdin, stdout, stderr = ssh.exec_command(command)
if stdout.channel.recv_exit_status() != 0:
logger.error(stderr.read())
# Query all open with no activity for 2 weeks
logger.info('Searching no activity for 2 weeks')
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --format JSON status:open age:2w')
for line in stdout:
row= json.loads(line)
if not row.has_key('rowCount'):
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], False)
# Query all reviewed with no activity for 1 week
logger.info('Searching no activity on negative review for 1 week')
stdin, stdout, stderr = ssh.exec_command('gerrit query --current-patch-set --all-approvals --format JSON status:reviewed age:1w')
for line in stdout:
row= json.loads(line)
if not row.has_key('rowCount'):
# Search for negative approvals
for approval in row['currentPatchSet']['approvals']:
if approval['value'] == '-1':
expire_patch_set(row['currentPatchSet']['revision'], row['subject'], True)
break
logger.info('End expire review')
| apache-2.0 | Python |
d99f1b4a10d4c2c918a939a4671583411a3df466 | Remove unnecessary logging from migration 019 | saeki-masaki/glance,paramite/glance,SUSE-Cloud/glance,redhat-openstack/glance,scripnichenko/glance,klmitch/glance,darren-wang/gl,JioCloud/glance,sigmavirus24/glance,rickerc/glance_audit,ntt-sic/glance,sigmavirus24/glance,citrix-openstack-build/glance,openstack/glance,SUSE-Cloud/glance,openstack/glance,wkoathp/glance,jumpstarter-io/glance,jumpstarter-io/glance,redhat-openstack/glance,klmitch/glance,takeshineshiro/glance,paramite/glance,cloudbau/glance,rickerc/glance_audit,rajalokan/glance,dims/glance,cloudbau/glance,tanglei528/glance,stevelle/glance,ozamiatin/glance,kfwang/Glance-OVA-OVF,akash1808/glance,rajalokan/glance,citrix-openstack-build/glance,openstack/glance,JioCloud/glance,vuntz/glance,wkoathp/glance,scripnichenko/glance,kfwang/Glance-OVA-OVF,ozamiatin/glance,saeki-masaki/glance,ntt-sic/glance,tanglei528/glance,dims/glance,takeshineshiro/glance,vuntz/glance,akash1808/glance,darren-wang/gl,stevelle/glance | glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py | glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
def get_images_table(meta):
return sqlalchemy.Table('images', meta, autoload=True)
def get_image_locations_table(meta):
return sqlalchemy.Table('image_locations', meta, autoload=True)
def upgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
image_locations_table = get_image_locations_table(meta)
image_records = images_table.select().execute().fetchall()
for image in image_records:
if image.location is not None:
values = {
'image_id': image.id,
'value': image.location,
'created_at': image.created_at,
'updated_at': image.updated_at,
'deleted': image.deleted,
'deleted_at': image.deleted_at,
}
image_locations_table.insert(values=values).execute()
def downgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
image_locations_table = get_image_locations_table(meta)
image_records = image_locations_table.select().execute().fetchall()
for image_location in image_records:
images_table.update(values={'location': image_location.value})\
.where(images_table.c.id == image_location.image_id)\
.execute()
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
import logging as base_logging
import glance.openstack.common.log as logging
LOG = logging.getLogger(__name__)
sa_logger = base_logging.getLogger('sqlalchemy.engine')
sa_logger.setLevel(base_logging.DEBUG)
def get_images_table(meta):
return sqlalchemy.Table('images', meta, autoload=True)
def get_image_locations_table(meta):
return sqlalchemy.Table('image_locations', meta, autoload=True)
def upgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
image_locations_table = get_image_locations_table(meta)
image_records = images_table.select().execute().fetchall()
for image in image_records:
if image.location is not None:
values = {
'image_id': image.id,
'value': image.location,
'created_at': image.created_at,
'updated_at': image.updated_at,
'deleted': image.deleted,
'deleted_at': image.deleted_at,
}
image_locations_table.insert(values=values).execute()
def downgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
image_locations_table = get_image_locations_table(meta)
image_records = image_locations_table.select().execute().fetchall()
for image_location in image_records:
images_table.update(values={'location': image_location.value})\
.where(images_table.c.id == image_location.image_id)\
.execute()
| apache-2.0 | Python |
84ecdcb9408f10a8acc78a00d31aec9684a9bc34 | Fix assets URL | koorukuroo/findaconf,cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf,cuducos/findaconf | findaconf/tests/test_file_routes.py | findaconf/tests/test_file_routes.py | # coding: utf-8
import unittest
from random import randrange
from findaconf import app
class TestFileRoutes(unittest.TestCase):
def setUp(self):
# init
app.testing = True
self.app = app.test_client()
def tearDown(self):
pass
# test routes from blueprint/file_routes.py
def test_poster(self):
resp = self.app.get('/poster.png', data={'rand': randrange(1000, 9999)})
assert resp.status_code == 200
assert resp.mimetype == 'image/png'
def test_favicon(self):
types = ['image/vnd.microsoft.icon', 'image/x-icon']
resp = self.app.get('/favicon.ico')
assert resp.status_code == 200
assert resp.mimetype in types
def test_robots(self):
resp = self.app.get('/robots.txt')
assert resp.status_code == 200
assert resp.mimetype == 'text/plain'
def test_foundation_icons(self):
base_url = '/assets/'
extensions = ['eot', 'svg', 'ttf', 'woff', 'py']
types = ['application/vnd.ms-fontobject',
'application/octet-stream',
'application/x-font-woff',
'image/svg+xml']
for ext in extensions:
path = '{}foundation-icons.{}'.format(base_url, ext)
resp = self.app.get(path)
if ext != 'py':
assert resp.status_code == 200
assert resp.mimetype in types
else:
assert resp.status_code == 404
| # coding: utf-8
import unittest
from random import randrange
from findaconf import app
class TestFileRoutes(unittest.TestCase):
def setUp(self):
# init
app.testing = True
self.app = app.test_client()
def tearDown(self):
pass
# test routes from blueprint/file_routes.py
def test_poster(self):
resp = self.app.get('/poster.png', data={'rand': randrange(1000, 9999)})
assert resp.status_code == 200
assert resp.mimetype == 'image/png'
def test_favicon(self):
types = ['image/vnd.microsoft.icon', 'image/x-icon']
resp = self.app.get('/favicon.ico')
assert resp.status_code == 200
assert resp.mimetype in types
def test_robots(self):
resp = self.app.get('/robots.txt')
assert resp.status_code == 200
assert resp.mimetype == 'text/plain'
def test_foundation_icons(self):
base_url = '/assets/css/'
extensions = ['eot', 'svg', 'ttf', 'woff', 'py']
types = ['application/vnd.ms-fontobject',
'application/octet-stream',
'application/x-font-woff',
'image/svg+xml']
for ext in extensions:
path = '{}foundation-icons.{}'.format(base_url, ext)
resp = self.app.get(path)
if ext != 'py':
assert resp.status_code == 200
assert resp.mimetype in types
else:
assert resp.status_code == 404
| mit | Python |
9efe63d87c9fcbdf36e0a47e006779bb64014f36 | add poor sketch of operation store/load to factory module | genome/flow-workflow,genome/flow-workflow,genome/flow-workflow | flow_workflow/operations/factory.py | flow_workflow/operations/factory.py | import pkg_resources
import re
MODULE = None
_NEXT_OPERATION_ID = 0
def adapter(operation_type, *args, **kwargs):
global _NEXT_OPERATION_ID
for ep in pkg_resources.iter_entry_points('flow_workflow.adapters',
sanitize_operation_type(operation_type)):
cls = ep.load()
obj = cls(operation_id=_NEXT_OPERATION_ID, *args, **kwargs)
_NEXT_OPERATION_ID += 1
return obj
else:
raise RuntimeError('Could not find adapter for operation type: %s (%s)'
% (operation_type, sanitize_operation_type(operation_type)))
def adapter_from_xml(xml, *args, **kwargs):
return adapter(get_operation_type(xml), *args, xml=xml, **kwargs)
def get_operation_type(xml):
operation_type_node = xml.find('operationtype')
return operation_type_node.attrib['typeClass']
def sanitize_operation_type(operation_type_string):
return re.sub(' ', '_',
re.sub('^Workflow::OperationType::', '', operation_type_string))
# XXX use pkg_resources
def load_operation(net, operation_id):
if operation_id is None:
# XXX Is this the behavior we want?
return NullOperation()
operation_dict = net.variables[operation_variable_name(operation_id)]
cls = getattr(MODULE, operation_dict.pop('_class'))
return cls(**operation_dict)
def store_operation(net, operation):
net.variables[operation.variable_name] = operation.as_dict
def operation_variable_name(operation_id):
return '_wf_op_%operation_dict' % operation_id
| import pkg_resources
import re
_NEXT_OPERATION_ID = 0
def adapter(operation_type, *args, **kwargs):
global _NEXT_OPERATION_ID
for ep in pkg_resources.iter_entry_points('flow_workflow.adapters',
sanitize_operation_type(operation_type)):
cls = ep.load()
obj = cls(operation_id=_NEXT_OPERATION_ID, *args, **kwargs)
_NEXT_OPERATION_ID += 1
return obj
else:
raise RuntimeError('Could not find adapter for operation type: %s (%s)'
% (operation_type, sanitize_operation_type(operation_type)))
def adapter_from_xml(xml, *args, **kwargs):
return adapter(get_operation_type(xml), *args, xml=xml, **kwargs)
def get_operation_type(xml):
operation_type_node = xml.find('operationtype')
return operation_type_node.attrib['typeClass']
def sanitize_operation_type(operation_type_string):
return re.sub(' ', '_',
re.sub('^Workflow::OperationType::', '', operation_type_string))
| agpl-3.0 | Python |
cdba51a7b0013c9a6eea2a761c733bce3218ea4c | fix error with version | airtonix/tasty-social-pie | tasty_social_pie/__init__.py | tasty_social_pie/__init__.py | __version__ = "0.0.1" | version = "0.0.1" | mit | Python |
b78a34bc1152b6da18068393b7e6470a220084f9 | set true bit | AppGeo/ckanext-agsview,AppGeo/ckanext-agsview,AppGeo/ckanext-agsview,AppGeo/ckanext-agsview | ckanext/agsview/plugin.py | ckanext/agsview/plugin.py | # encoding: utf-8
import logging
import ckan.plugins as p
log = logging.getLogger(__name__)
ignore_empty = p.toolkit.get_validator('ignore_empty')
DEFAULT_AGS_FORMATS = ['ags']
class AGSView(p.SingletonPlugin):
'''This plugin makes views of arcgis online resources'''
p.implements(p.IConfigurer, inherit=True)
p.implements(p.IResourceView, inherit=True)
def update_config(self, config):
p.toolkit.add_public_directory(config, 'public')
p.toolkit.add_template_directory(config, 'templates')
p.toolkit.add_resource('public', 'ckanext-agsview')
def info(self):
return {'name': 'ags_view',
'title': p.toolkit._('ArcGIS Server'),
'icon': 'compass',
'schema': {
'ags_url': [ignore_empty, unicode]
},
'iframed': true,
'default_title': p.toolkit._('ArcGIS Server'),
}
def can_view(self, data_dict):
return (data_dict['resource'].get('format', '').lower()
in DEFAULT_AGS_FORMATS)
def view_template(self, context, data_dict):
return 'ags_view.html'
def form_template(self, context, data_dict):
return 'ags_form.html'
| # encoding: utf-8
import logging
import ckan.plugins as p
log = logging.getLogger(__name__)
ignore_empty = p.toolkit.get_validator('ignore_empty')
DEFAULT_AGS_FORMATS = ['ags']
class AGSView(p.SingletonPlugin):
'''This plugin makes views of arcgis online resources'''
p.implements(p.IConfigurer, inherit=True)
p.implements(p.IResourceView, inherit=True)
def update_config(self, config):
p.toolkit.add_public_directory(config, 'public')
p.toolkit.add_template_directory(config, 'templates')
p.toolkit.add_resource('public', 'ckanext-agsview')
def info(self):
return {'name': 'ags_view',
'title': p.toolkit._('ArcGIS Server'),
'icon': 'compass',
'schema': {
'ags_url': [ignore_empty, unicode]
},
'iframed': True,
'default_title': p.toolkit._('ArcGIS Server'),
}
def can_view(self, data_dict):
return (data_dict['resource'].get('format', '').lower()
in DEFAULT_AGS_FORMATS)
def view_template(self, context, data_dict):
return 'ags_view.html'
def form_template(self, context, data_dict):
return 'ags_form.html'
| mit | Python |
ec9e5866b65a6dffd8a529491460da69185b64cf | Add os and arch prediction code | trackmon/trackmon-server,trackmon/trackmon-server | manager/trackmon_manager.py | manager/trackmon_manager.py | import sys
import os
from subprocess import call
import urllib.request
import json
from pprint import pprint
# User needs to install postgres first
trackmon_server_api_info = "https://api.github.com/repos/atom/atom/releases/latest"
current_os = ""
current_arch = ""
if sys.platform.startswith('linux'):
current_os = "linux"
elif sys.platform.startswith('win32'):
current_os = "windows"
elif sys.platform.startswith('darwin'):
current_os = "darwin"
else:
print("Your system is not supported by this installer.")
sys.exit(0)
def is_os_64bit():
return platform.machine().endswith('64')
if is_os_64bit == True:
current_arch = 64
class color:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def split(string, splitters): #MAY RESOLVE ALL PROBLEMS WITH CSV
final = [string]
for x in splitters:
for i,s in enumerate(final):
if x in s and x != s:
left, right = s.split(x, 1)
final[i] = left
final.insert(i + 1, x)
final.insert(i + 2, right)
return final
def download(url, path):
with urllib.request.urlopen(url) as response, open(path, 'wb') as output:
shutil.copyfileobj(response, output)
def get_dl_from_gh_api(url):
response = urllib.request.urlopen(url)
data = response.read()
jsonresp = json.loads(data.decode('utf-8'))
#pprint(jsonresp)
#print(jsonresp["assets"])
for asset in jsonresp["assets"]:
assetname = str(asset["name"])
splitted_assetname = split(assetname, "_")
sys_and_arch = split(splitted_assetname[2], ["-", ".") # Dot for windows versions
if sys_and_arch[0] == current_os and sys_and_arch[2] == current_arch:
print("Downloading server...", end='')
download(str(asset["browser_download_url"]), assetname)
print("done.")
return
print("Didn't find any fitting version, you might have to download it manually")
def main():
if "-install" in sys.argv:
print("Installing everything")
# TODO: Verify that postgres exist
# TODO: Download trackmon server
get_dl_from_gh_api(trackmon_server_api_info)
elif "-installapi" in sys.argv:
print("Installing API backend only")
# TODO: Download trackmon server
elif "-installdb" in sys.argv:
print("Installing database only")
# TODO: Verify that postgres exist
elif "-installfrontend" in sys.argv:
print("Installing frontend only")
# TODO: Later...
elif "-update" in sys.argv:
print("Updating components")
if __name__ == "__main__":
main()
| import sys
import os
from subprocess import call
import urllib.request
import json
#from pprint import pprint
# User needs to install postgres first
trackmon_server_api_info = "https://api.github.com/repos/paulkramme/roverpi/releases/latest"
def download(url, path):
with urllib.request.urlopen(url) as response, open(path, 'wb') as output:
shutil.copyfileobj(response, output)
def get_dl_from_gh_api(url):
response = urllib.request.urlopen(url)
data = response.read()
jsonresp = json.loads(data.decode('utf-8'))
#pprint(json)
for asset in jsonresp["assets"]:
print(str(asset["name"])) # BUG: Nothing prints here...
print("Done.")
def main():
if "-install" in sys.argv:
print("Installing everything")
# TODO: Verify that postgres exist
# TODO: Download trackmon server
get_dl_from_gh_api(trackmon_server_api_info)
elif "-installapi" in sys.argv:
print("Installing API backend only")
# TODO: Download trackmon server
elif "-installdb" in sys.argv:
print("Installing database only")
# TODO: Verify that postgres exist
elif "-installfrontend" in sys.argv:
print("Installing frontend only")
# TODO: Later...
elif "-update" in sys.argv:
print("Updating components")
if __name__ == "__main__":
main()
| bsd-2-clause | Python |
e5a21a40f73978359d3e6a26fcbbe9b74269ac57 | Fix alembic migration history | alfredhq/alfred-db | alfred_db/migrations/versions/29a56dc34a2b_add_permissions.py | alfred_db/migrations/versions/29a56dc34a2b_add_permissions.py | """Add permissions
Revision ID: 29a56dc34a2b
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:06:24.088307
"""
# revision identifiers, used by Alembic.
revision = '29a56dc34a2b'
down_revision = '30c0aec2ca06'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('permissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('admin', sa.Boolean(), nullable=False),
sa.Column('push', sa.Boolean(), nullable=False),
sa.Column('pull', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
['repository_id'], ['repositories.id'], ondelete='CASCADE',
),
sa.ForeignKeyConstraint(
['user_id'], ['users.id'], ondelete='CASCADE',
),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('permissions')
| """Add permissions
Revision ID: 29a56dc34a2b
Revises: 4fdf1059c4ba
Create Date: 2012-09-02 14:06:24.088307
"""
# revision identifiers, used by Alembic.
revision = '29a56dc34a2b'
down_revision = '5245d0b46f8'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('permissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('admin', sa.Boolean(), nullable=False),
sa.Column('push', sa.Boolean(), nullable=False),
sa.Column('pull', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
['repository_id'], ['repositories.id'], ondelete='CASCADE',
),
sa.ForeignKeyConstraint(
['user_id'], ['users.id'], ondelete='CASCADE',
),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('permissions')
| isc | Python |
bb85acf74a01a093246f9aab105dee66bfd57d10 | fix apache.wsgi | hoehnp/sirius,hoehnp/sirius,claritylab/sirius,hoehnp/sirius,claritylab/sirius,claritylab/sirius,claritylab/sirius,hoehnp/sirius,claritylab/sirius,claritylab/sirius,claritylab/sirius,hoehnp/sirius,hoehnp/sirius,hoehnp/sirius | lucida/commandcenter/apache/apache.wsgi | lucida/commandcenter/apache/apache.wsgi | import sys
import os
import logging
logging.basicConfig(stream=sys.stderr)
current_dir = os.path.abspath(os.path.dirname(__file__))
parent_dir = os.path.abspath(current_dir + "/../")
sys.path.insert(0, parent_dir)
with open(current_dir + "/envs.txt") as f:
for line in f:
os.environ[line.split("=")[0]] = line.split("=")[1][:-1]
from app import app as application
| import sys
import os
current_dir = os.path.abspath(os.path.dirname(__file__))
parent_dir = os.path.abspath(current_dir + "/../")
sys.path.insert(0, parent_dir)
from app import app as application
| bsd-3-clause | Python |
a7c3f819dafe34a765cb930a7ffc5eaac85bfaa4 | add short command options | rtluckie/seria | seria/cli.py | seria/cli.py | # -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', '-x', 'out_fmt', flag_value='xml')
@click.option('--yaml', '--yml', '-y', 'out_fmt', flag_value='yaml')
@click.option('--yml', 'out_fmt', flag_value='yaml')
@click.option('--json', '-j', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | # -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | mit | Python |
8dd6223485fb2d59d1adde236db061c8c4fd6f0f | Bump version | thombashi/sqlitebiter,thombashi/sqlitebiter | sqlitebiter/__version__.py | sqlitebiter/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.28.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.28.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
75c0ad4147bd3e0a56a06a340d9a4a812c1fe6b1 | Add [blank,null]=True to Image model to prevent errors in bulk creation of image | lo-windigo/fragdev,lo-windigo/fragdev | images/models.py | images/models.py | # This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
# This file is part of FragDev.
#
# FragDev is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FragDev is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FragDev. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.utils.text import slugify
from imghdr import what
class Image(models.Model):
'''
Represents a single uploaded image
'''
title = models.CharField(max_length=250)
desc = models.TextField()
date = models.DateTimeField(auto_now_add=True)
imgFile = models.FileField(upload_to='img/')
slug = models.SlugField()
content_type = models.CharField(max_length=30,
blank=True,
null=True)
def save(self, *args, **kwargs):
# Create a slug for this image
if not self.id and self.slug is '':
self.slug = slugify(self.title)
# Generate different versions
# TODO
# Save the content type (required for headers later)
self.content_type = what(self.imgFile)
super(Image, self).save(*args, **kwargs)
def get_absolute_url(self):
return self.imgFile.url
def __str__(self):
return self.title
| # This file is part of the FragDev Website.
#
# the FragDev Website is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# the FragDev Website is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the FragDev Website. If not, see <http://www.gnu.org/licenses/>.
# This file is part of FragDev.
#
# FragDev is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FragDev is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FragDev. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.utils.text import slugify
from imghdr import what
class Image(models.Model):
'''
Represents a single uploaded image
'''
title = models.CharField(max_length=250)
desc = models.TextField()
date = models.DateTimeField(auto_now_add=True)
imgFile = models.FileField(upload_to='img/')
slug = models.SlugField()
content_type = models.CharField(max_length=30)
def save(self, *args, **kwargs):
# Create a slug for this image
if not self.id and self.slug is '':
self.slug = slugify(self.title)
# Generate different versions
# TODO
# Save the content type (required for headers later)
self.content_type = what(self.imgFile)
super(Image, self).save(*args, **kwargs)
def get_absolute_url(self):
return self.imgFile.url
def __str__(self):
return self.title
| agpl-3.0 | Python |
f7dcb7fc3ecdb35711e5a7488599ee4b0a501053 | Add protocol detection basic logic | rjschwei/WALinuxAgent,hglkrijger/WALinuxAgent,rjschwei/WALinuxAgent,andyliuliming/WALinuxAgent,andyliuliming/WALinuxAgent,nathanleclaire/WALinuxAgent,nathanleclaire/WALinuxAgent,Azure/WALinuxAgent,hglkrijger/WALinuxAgent,Azure/WALinuxAgent | azure/linuxagent/protocol.py | azure/linuxagent/protocol.py | #!/usr/bin/env python
#
# Windows Azure Linux Agent
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.4+ and Openssl 1.0+
#
from azure.linuxagent.util import *
from azure.linuxagent.logger import *
__ProtocolV1FilePath = os.path.join(LibDir, 'protocolv1')
__ProtocolV2FilePath = os.path.join(LibDir, 'protocolv2')
__SleepDurations = [0, 10, 30, 60, 60]
def DetectEndpoint():
detected = False
for duration in __SleepDurations:
Log("Detect endpoint...")
OpenPortForDhcp()
if(_DetectEndpoint()):
detected = True
break
sleep(duration)
RestartNetwork()
if not detected:
raise Exception("Detect endpoint failed.")
def _DetectEndpoint():
metadataServer = DetectMetadataServer()
if metadataServer:
SetFileContent(__ProtocolV2FilePath, '')
return True
else:
os.remove(__ProtocolV2FilePath)
wireServer = DetectWireServer()
if wireServer:
SetFileContent(__ProtocolV1FilePath, wireServer)
return True
else:
os.remove(__ProtocolV1FilePath)
return False
__MeatadataServerAddr=''
def DetectMetadataServer():
pass
def DetectWireServer():
pass
def GetProtocol():
if os.path.isfile(__ProtocolV2FilePath):
return ProtocolV2()
elif os.path.isfile(__ProtocolV1FilePath):
wireServer = GetFileContent(__ProtocolV1FilePath)
return ProtocolV1(wireServer)
else:
raise Exeption("Endpoint not detected")
class ProtocolV1(Protocol):
def __init__(self, endpoint):
self.endpoint = endpoint
def getVmInfo(self):
pass
def getCerts(self):
pass
def getExtensions(self):
pass
def getOvf(self):
pass
def reportProvisionStatus(self):
pass
def reportAgentStatus(self):
pass
def reportExtensionStatus(self):
pass
def reportEvent(self):
pass
class ProtocolV2(protocol):
def getVmInfo(self):
pass
def getCerts(self):
pass
def getExtensions(self):
pass
def getOvf(self):
pass
def reportProvisionStatus(self):
pass
def reportAgentStatus(self):
pass
def reportExtensionStatus(self):
pass
def reportEvent(self):
pass
| #!/usr/bin/env python
#
# Windows Azure Linux Agent
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.4+ and Openssl 1.0+
#
ProtocolV1File = os.path.join(LibDir, 'protocolv1')
ProtocolV2File = os.path.join(LibDir, 'protocolv2')
def DetectEndpoint():
pass
def GetProtocol():
if os.path.isfile(ProtocolV2File):
return ProtocolV2()
elif os.path.isfile(ProtocolV1File):
return ProtocolV1()
else:
raise Exeption("Endpoint not detected")
class Protocol():
def getVmInfo(self):
pass
def getCerts(self):
pass
def getExtensions(self):
pass
def getOvf(self):
pass
def reportProvisionStatus(self):
pass
def reportAgentStatus(self):
pass
def reportExtensionStatus(self):
pass
def reportEvent(self):
pass
class ProtocolV1(Protocol):
pass
class ProtocolV2(protocol):
pass
| apache-2.0 | Python |
9373accc4381a2582838638f95301076b5684563 | set --follow-imports silent for mypy linter | bosondata/badwolf,bosondata/badwolf,bosondata/badwolf | badwolf/lint/linters/mypy.py | badwolf/lint/linters/mypy.py | # -*- coding: utf-8 -*-
import logging
from badwolf.utils import run_command
from badwolf.lint import Problem
from badwolf.lint.linters import PythonLinter
from badwolf.lint.utils import in_path
logger = logging.getLogger(__name__)
class MypyLinter(PythonLinter):
name = 'mypy'
default_pattern = '*.py *.pyi'
def is_usable(self):
if not in_path('mypy'):
return False
# mypy only avaiable in Python 3
python_version = self.python_name[6:]
major, *_ = python_version.split('.', 1)
if int(major) < 3:
return False
return True
def lint_files(self, files):
command = [
self.python_name,
'-m',
'mypy',
'--follow-imports',
'silent',
]
command += files
_, output = run_command(command, split=True, include_errors=True, cwd=self.working_dir)
if not output:
raise StopIteration()
for line in output:
filename, line, level, message = self._parse_line(line)
if level == 'note':
continue
is_error = level == 'error'
yield Problem(filename, line, message, self.name, is_error=is_error)
def _parse_line(self, line):
"""mypy only generates results as stdout.
Parse the output for real data."""
parts = line.split(':', 3)
return parts[0], int(parts[1]), parts[2].strip(), parts[3].strip()
| # -*- coding: utf-8 -*-
import logging
from badwolf.utils import run_command
from badwolf.lint import Problem
from badwolf.lint.linters import PythonLinter
from badwolf.lint.utils import in_path
logger = logging.getLogger(__name__)
class MypyLinter(PythonLinter):
name = 'mypy'
default_pattern = '*.py *.pyi'
def is_usable(self):
if not in_path('mypy'):
return False
# mypy only avaiable in Python 3
python_version = self.python_name[6:]
major, *_ = python_version.split('.', 1)
if int(major) < 3:
return False
return True
def lint_files(self, files):
command = [
self.python_name,
'-m',
'mypy',
]
command += files
_, output = run_command(command, split=True, include_errors=True, cwd=self.working_dir)
if not output:
raise StopIteration()
for line in output:
filename, line, level, message = self._parse_line(line)
is_error = level == 'error'
yield Problem(filename, line, message, self.name, is_error=is_error)
def _parse_line(self, line):
"""mypy only generates results as stdout.
Parse the output for real data."""
parts = line.split(':', 3)
return parts[0], int(parts[1]), parts[2].strip(), parts[3].strip()
| mit | Python |
736b92ee87fef7cf11c0d741aed7a15ec3e0c37e | Update timetable generator. | alfredo/microdash,alfredo/microdash | microdash/core/timetable.py | microdash/core/timetable.py | import requests
from datetime import datetime
from BeautifulSoup import BeautifulSoup
USER_AGENT = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:31.0) '
'Gecko/20100101 Firefox/31.0')
HEADERS = {
'User-Agent': USER_AGENT,
}
URL_PREFIX = ('http://www.abelliogreateranglia.co.uk/travel-information'
'/journey-planning/live-departures/station/')
# 7-8 minutes to commute to station
STATION_COMMUTE = 60 * 8
def parse_time(time_str):
"""Generates today datetime from a HH:MM formatted time."""
now = datetime.now()
hour_str, minutes_str = time_str.split(':')
return datetime(
now.year, now.month, now.day, int(hour_str), int(minutes_str))
def parse_response(content, valid_stations):
"""Parse the content response."""
columns = ['destination', 'time', 'status', 'origin', 'operator']
soup = BeautifulSoup(content)
timetable = []
for row in soup.findAll('tr'):
# Ignore header row:
if row.find('th'):
continue
data = dict([(k, v.text) for k, v in zip(columns, row.findAll('td'))])
# Ignore non related stations:
if data['destination'].lower() in valid_stations:
timetable.append(data)
return timetable
def get_train_datetime(item):
"""Parse the train time of arrival."""
time_str = item['status'] if ':' in item['status'] else item['time']
return parse_time(time_str)
def update_schedule(item):
"""Add more detail to the schedule.."""
item['destination'] = item['destination'].upper()
train_datetime = get_train_datetime(item)
time_diff = train_datetime - datetime.now()
home_departure = time_diff.total_seconds() - STATION_COMMUTE
if home_departure > 0:
depart_in = 'Depart in %s mins.' % int((home_departure / 60))
else:
minutes = int(time_diff.total_seconds() / 60)
depart_in = ('Unlikely. Train in %s mins.' % minutes)
item['depart_in'] = depart_in
return item
def get_timetable(shortcode, valid_stations):
"""Prepares the timetable to be rendered."""
url = '%s%s' % (URL_PREFIX, shortcode)
response = requests.get(url, headers=HEADERS)
if not response.status_code == 200:
response = requests.get(url, headers=HEADERS)
raw_timetable = parse_response(response.content, valid_stations)
return map(update_schedule, raw_timetable)
| import requests
from datetime import datetime
from BeautifulSoup import BeautifulSoup
USER_AGENT = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:31.0) '
'Gecko/20100101 Firefox/31.0')
HEADERS = {
'User-Agent': USER_AGENT,
}
URL_PREFIX = ('http://www.abelliogreateranglia.co.uk/travel-information'
'/journey-planning/live-departures/station/')
# 7-8 minutes to commute to station
STATION_COMMUTE = 60 * 8
def get_datetime(now, time_str):
hour_str, minutes_str = time_str.split(':')
return datetime(
now.year, now.month, now.day, int(hour_str), int(minutes_str))
def parse_response(content, valid_stations):
columns = ['destination', 'time', 'status', 'origin', 'operator']
soup = BeautifulSoup(content)
timetable = []
for row in soup.findAll('tr'):
# Ignore header row:
if row.find('th'):
continue
data = dict([(k, v.text) for k, v in zip(columns, row.findAll('td'))])
# Ignore non related stations:
if data['destination'].lower() in valid_stations:
timetable.append(data)
return timetable
def get_schedule(raw_timetable):
now = datetime.now()
for item in raw_timetable:
item['destination'] = item['destination'].upper()
train_datetime = get_datetime(now, item['time'])
diff = train_datetime - now
item['datetime'] = train_datetime
home_departure = diff.total_seconds() - STATION_COMMUTE
item['home_departure'] = home_departure
if home_departure > 0:
depart_in = 'Depart in %s mins.' % int((home_departure / 60))
else:
depart_in = ('Unlikely. Train in %s mins.'
% int(diff.total_seconds() / 60))
item['depart_in'] = depart_in
return raw_timetable
def get_timetable(shortcode, valid_stations):
"""Prepares the timetable."""
url = '%s%s' % (URL_PREFIX, shortcode)
response = requests.get(url, headers=HEADERS)
if not response.status_code == 200:
response = requests.get(url, headers=HEADERS)
raw_timetable = parse_response(response.content, valid_stations)
return get_schedule(raw_timetable)
| bsd-3-clause | Python |
1bf3695213623926219297d2b441297bd0afb2e1 | Fix response | ben174/bart-crime,ben174/bart-crime | reports/views.py | reports/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from crime import settings
from reports.models import Report, Incident, Comment
from reports import scraper
from django.http import HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
def report_webhook(request):
if request.GET.get('trigger') != settings.get_secret('TRIGGER_KEY'):
return HttpResponse('go away')
report = Report.objects.create(body=request.body)
report.create_incidents()
return HttpResponse('incident created')
def do_scrape(request):
if request.GET.get('trigger') != settings.get_secret('TRIGGER_KEY'):
return HttpResponse('go away')
scraper.scrape()
return HttpResponse('done scraping')
def home(request):
date = datetime.datetime.now()
return listing(request, date)
def about(request):
return render(request, 'about.html')
def date(request, year, month, day):
date = datetime.date(int(year), int(month), int(day))
return listing(request, date)
def listing(request, date):
tomorrow = datetime.datetime.now() + datetime.timedelta(days=1)
curr_date = Incident.objects.filter(
incident_date__lte=date,
).latest('incident_dt').incident_date
try:
next_date = Incident.objects.filter(
incident_date__gt=curr_date,
incident_date__lt=tomorrow,
).earliest('incident_dt').incident_date
except Incident.DoesNotExist:
next_date = None
try:
prev_date = Incident.objects.filter(
incident_date__lt=curr_date,
).latest('incident_dt').incident_date
except Incident.DoesNotExist:
prev_date = None
incidents = Incident.objects.filter(
incident_dt__isnull=False,
incident_date=curr_date,
).order_by('-incident_dt')
return render(request, 'home.html', {
'curr_date': curr_date,
'incidents': incidents,
'prev_date': prev_date,
'next_date': next_date,
})
def incident(request, incident_id):
incident = get_object_or_404(Incident, pk=incident_id)
if request.method == 'POST':
Comment.objects.create(incident=incident, text=request.POST.get('comment'))
return redirect('incident', incident_id=incident_id)
return render(request, 'incident.html', {'incident': incident})
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from crime import settings
from reports.models import Report, Incident, Comment
from reports import scraper
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt
def report_webhook(request):
if request.GET.get('trigger') != settings.get_secret('TRIGGER_KEY'):
return None
report = Report.objects.create(body=request.body)
report.create_incidents()
def do_scrape(request):
if request.GET.get('trigger') != settings.get_secret('TRIGGER_KEY'):
return None
scraper.scrape()
def home(request):
date = datetime.datetime.now()
return listing(request, date)
def about(request):
return render(request, 'about.html')
def date(request, year, month, day):
date = datetime.date(int(year), int(month), int(day))
return listing(request, date)
def listing(request, date):
tomorrow = datetime.datetime.now() + datetime.timedelta(days=1)
curr_date = Incident.objects.filter(
incident_date__lte=date,
).latest('incident_dt').incident_date
try:
next_date = Incident.objects.filter(
incident_date__gt=curr_date,
incident_date__lt=tomorrow,
).earliest('incident_dt').incident_date
except Incident.DoesNotExist:
next_date = None
try:
prev_date = Incident.objects.filter(
incident_date__lt=curr_date,
).latest('incident_dt').incident_date
except Incident.DoesNotExist:
prev_date = None
incidents = Incident.objects.filter(
incident_dt__isnull=False,
incident_date=curr_date,
).order_by('-incident_dt')
return render(request, 'home.html', {
'curr_date': curr_date,
'incidents': incidents,
'prev_date': prev_date,
'next_date': next_date,
})
def incident(request, incident_id):
incident = get_object_or_404(Incident, pk=incident_id)
if request.method == 'POST':
Comment.objects.create(incident=incident, text=request.POST.get('comment'))
return redirect('incident', incident_id=incident_id)
return render(request, 'incident.html', {'incident': incident})
| mit | Python |
cdb6b46db15fc5d5a4c517682a609dfff9530173 | Complete intelmq_psql_initdb script | certtools/intelmq,aaronkaplan/intelmq,pkug/intelmq,certtools/intelmq,robcza/intelmq,pkug/intelmq,certtools/intelmq,robcza/intelmq,aaronkaplan/intelmq,pkug/intelmq,robcza/intelmq,robcza/intelmq,aaronkaplan/intelmq,pkug/intelmq | intelmq/bin/intelmq_psql_initdb.py | intelmq/bin/intelmq_psql_initdb.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Generates a SQL command file with commands to create the events table.
Reads the Data-Harmonization.md document from
`/opt/intelmq/docs/Data-Harmonization.md` and generates an SQL command from it.
The SQL file is saved in `/tmp/initdb.sql`.
"""
from __future__ import print_function, unicode_literals
import json
import sys
from intelmq import HARMONIZATION_CONF_FILE
def main():
OUTPUTFILE = "/tmp/initdb.sql"
FIELDS = dict()
try:
print("INFO - Reading %s file" % HARMONIZATION_CONF_FILE)
with open(HARMONIZATION_CONF_FILE, 'r') as fp:
DATA = json.load(fp)['event']
except IOError:
print("ERROR - Could not find %s" % HARMONIZATION_CONF_FILE)
print("ERROR - Make sure that you have intelmq installed.")
sys.exit(-1)
for field in DATA.keys():
value = DATA[field]
if value['type'] in ('String', 'Base64', 'URL', 'FQDN', 'JSON',
'MalwareName', 'ClassificationType'):
dbtype = 'varchar({})'.format(value.get('length', 2000))
elif value['type'] in ('IPAddress', 'IPNetwork'):
dbtype = 'inet'
elif value['type'] == 'DateTime':
dbtype = 'timestamp with time zone'
elif value['type'] == 'Boolean':
dbtype = 'boolean'
elif value['type'] == 'Integer':
dbtype = 'integer'
elif value['type'] in ('Float', 'Accuracy'):
dbtype = 'real'
elif value['type'] == 'UUID':
dbtype = 'UUID'
else:
print('Unknow type {!r}, assuming varchar(2000) by default'
''.format(value['type']))
dbtype = 'varchar(2000)'
FIELDS[field] = dbtype
initdb = """CREATE table events (
"id" BIGSERIAL UNIQUE PRIMARY KEY,"""
for field, field_type in sorted(FIELDS.items()):
initdb += '\n "{name}" {type},'.format(name=field, type=field_type)
initdb = initdb[:-1] # remove last ','
initdb += "\n);"
with open(OUTPUTFILE, 'w') as fp:
print("INFO - Writing %s file" % OUTPUTFILE)
fp.write(initdb)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Generates a SQL command file with commands to create the events table.
Reads the Data-Harmonization.md document from
`/opt/intelmq/docs/Data-Harmonization.md` and generates an SQL command from it.
The SQL file is saved in `/tmp/initdb.sql`.
"""
from __future__ import print_function, unicode_literals
import json
import sys
from intelmq import HARMONIZATION_CONF_FILE
def main():
OUTPUTFILE = "/tmp/initdb.sql"
FIELDS = dict()
try:
print("INFO - Reading %s file" % HARMONIZATION_CONF_FILE)
with open(HARMONIZATION_CONF_FILE, 'r') as fp:
DATA = json.load(fp)['event']
except IOError:
print("ERROR - Could not find %s" % HARMONIZATION_CONF_FILE)
print("ERROR - Make sure that you have intelmq installed.")
sys.exit(-1)
for field in DATA.keys():
value = DATA[field]
if value['type'] in ('String', 'Base64', 'URL', 'FQDN'):
dbtype = 'varchar({})'.format(value.get('length', 2000))
elif value['type'] in ('IPAddress', 'IPNetwork'):
dbtype = 'inet'
elif value['type'] == 'DateTime':
dbtype = 'timestamp with time zone'
elif value['type'] == 'Boolean':
dbtype = 'boolean'
elif value['type'] == 'Integer':
dbtype = 'integer'
elif value['type'] == 'Float':
dbtype = 'real'
elif value['type'] == 'UUID':
dbtype = 'UUID'
else:
print('Unknow type {!r}, assuming varchar(2000) by default'
''.format(value['type']))
dbtype = 'varchar(2000)'
FIELDS[field] = dbtype
# TODO: ClassificationType
# TODO: MalwareName
initdb = """CREATE table events (
"id" BIGSERIAL UNIQUE PRIMARY KEY,"""
for field, field_type in sorted(FIELDS.items()):
initdb += '\n "{name}" {type},'.format(name=field, type=field_type)
print(initdb[-1])
initdb = initdb[:-1]
initdb += "\n);"
with open(OUTPUTFILE, 'w') as fp:
print("INFO - Writing %s file" % OUTPUTFILE)
fp.write(initdb)
if __name__ == '__main__':
main()
| agpl-3.0 | Python |
0107a8919d264b522faa825c36f0be0644681fb3 | create incremental table if it doesn't exist | nave91/dbt,nave91/dbt,analyst-collective/dbt,analyst-collective/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt,fishtown-analytics/dbt | dbt/templates.py | dbt/templates.py |
class BaseCreateTemplate(object):
template = """
create {materialization} "{schema}"."{identifier}" {dist_qualifier} {sort_qualifier} as (
{query}
);"""
incremental_template = """
create temporary table "{identifier}__dbt_incremental_tmp" as (
SELECT * FROM (
{query}
) as tmp LIMIT 0
);
create table if not exists "{schema}"."{identifier}" (like "{identifier}__dbt_incremental_tmp");
insert into "{schema}"."{identifier}" (
with dbt_inc_sbq as (
select max("{incremental_field}") as dbt_max from "{schema}"."{identifier}"
), dbt_raw_sbq as (
{query}
)
select dbt_raw_sbq.* from dbt_raw_sbq
join dbt_inc_sbq on dbt_raw_sbq."{incremental_field}" > dbt_inc_sbq.dbt_max or dbt_inc_sbq.dbt_max is null
order by dbt_raw_sbq."{incremental_field}"
);
"""
label = "build"
@classmethod
def model_name(cls, base_name):
return base_name
def wrap(self, opts):
if opts['materialization'] in ('table', 'view'):
return self.template.format(**opts)
elif opts['materialization'] == 'incremental':
return self.incremental_template.format(**opts)
else:
raise RuntimeError("Invalid materialization parameter ({})".format(opts['materialization']))
class TestCreateTemplate(object):
template = """
create view "{schema}"."{identifier}" {dist_qualifier} {sort_qualifier} as (
SELECT * FROM (
{query}
) as tmp LIMIT 0
);"""
label = "test"
@classmethod
def model_name(cls, base_name):
return 'test_{}'.format(base_name)
def wrap(self, opts):
return self.template.format(**opts)
|
class BaseCreateTemplate(object):
template = """
create {materialization} "{schema}"."{identifier}" {dist_qualifier} {sort_qualifier} as (
{query}
);"""
incremental_template = """
insert into "{schema}"."{identifier}" (
with dbt_inc_sbq as (
select max("{incremental_field}") as dbt_max from "{schema}"."{identifier}"
), dbt_raw_sbq as (
{query}
)
select dbt_raw_sbq.* from dbt_raw_sbq
join dbt_inc_sbq on dbt_raw_sbq."{incremental_field}" > dbt_inc_sbq.dbt_max or dbt_inc_sbq.dbt_max is null
order by dbt_raw_sbq."{incremental_field}"
);
"""
label = "build"
@classmethod
def model_name(cls, base_name):
return base_name
def wrap(self, opts):
if opts['materialization'] in ('table', 'view'):
return self.template.format(**opts)
elif opts['materialization'] == 'incremental':
return self.incremental_template.format(**opts)
else:
raise RuntimeError("Invalid materialization parameter ({})".format(opts['materialization']))
class TestCreateTemplate(object):
template = """
create view "{schema}"."{identifier}" {dist_qualifier} {sort_qualifier} as (
SELECT * FROM (
{query}
) as tmp LIMIT 0
);"""
label = "test"
@classmethod
def model_name(cls, base_name):
return 'test_{}'.format(base_name)
def wrap(self, opts):
return self.template.format(**opts)
| apache-2.0 | Python |
57cbc821e8278c45c7fa48d05661c6e3d73a0a67 | Update twitch.py | TingPing/plugins,TingPing/plugins | HexChat/twitch.py | HexChat/twitch.py | import hexchat
__module_name__ = 'Twitch'
__module_author__ = 'TingPing'
__module_version__ = '2'
__module_description__ = 'Better integration with Twitch.tv'
# Very much a work in progress...
# Commands from http://help.twitch.tv/customer/portal/articles/659095-chat-moderation-commands
# /ban may conflict with other scripts nothing we can do about that
# /clear is an existing command, just override it
commands = ('timeout', 'slow', 'slowoff', 'subscribers', 'subscribersoff',
'mod', 'unmod', 'mods', 'clear', 'ban', 'unban', 'commercial')
aliases = {'op':'mod', 'deop':'unmod'}
def twitchOnly(func):
def is_twitch(*args, **kwargs):
server = hexchat.get_info('server')
if 'twitch.tv' in server or 'justin.tv' in server:
return func(*args, **kwargs)
else:
return hexchat.EAT_NONE
return is_twitch
# Twitch returns a lot of 'unknown command' errors, ignore them.
@twitchOnly
def servererr_cb(word, word_eol, userdata):
return hexchat.EAT_ALL
# Print jtv messages in server tab.
@twitchOnly
def privmsg_cb(word, word_eol, userdata):
if word[0][1:4] == 'jtv':
hexchat.find_context(channel=hexchat.get_info('network')).set()
hexchat.emit_print('Server Text', word_eol[3][1:])
return hexchat.EAT_ALL
# Eat any message starting with a '.', twitch eats all of them too.
@twitchOnly
def yourmsg_cb(word, word_eol, userdata):
if word[1][0] == '.':
return hexchat.EAT_ALL
# Just prefix with a '.'.
@twitchOnly
def command_cb(word, word_eol, alias):
if alias:
if len(word_eol) > 1:
hexchat.command('say .{} {}'.format(alias, word_eol[1]))
else:
hexchat.command('say .{}'.format(alias))
else:
hexchat.command('say .{}'.format(word_eol[0]))
return hexchat.EAT_ALL
for command in commands:
hexchat.hook_command(command, command_cb)
for command, alias in aliases.items():
hexchat.hook_command(command, command_cb, alias)
hexchat.hook_print('Your Message', yourmsg_cb)
hexchat.hook_server('421', servererr_cb)
hexchat.hook_server('PRIVMSG', privmsg_cb)
| import hexchat
__module_name__ = 'Twitch'
__module_author__ = 'TingPing'
__module_version__ = '1'
__module_description__ = 'Better integration with Twitch.tv'
# Very much a work in progress...
# Commands from http://help.twitch.tv/customer/portal/articles/659095-chat-moderation-commands
# /ban may conflict with other scripts nothing we can do about that
# /clear is an existing command, just override it
commands = ('timeout', 'slow', 'slowoff', 'subscribers', 'subscribersoff',
'mod', 'unmod', 'mods', 'clear', 'ban', 'unban', 'commercial')
aliases = {'op':'mod', 'deop':'unmod'}
def is_twitch():
server = hexchat.get_info('server')
if 'twitch.tv' in server or 'justin.tv' in server:
return True
else: return False
# Twitch returns a lot of 'unknown command' errors, ignore them.
def servererr_cb(word, word_eol, userdata):
if is_twitch():
return hexchat.EAT_ALL
# Print jtv messages in server tab.
def privmsg_cb(word, word_eol, userdata):
if is_twitch():
if word[0][1:4] == 'jtv':
hexchat.find_context(channel=hexchat.get_info('network')).set()
hexchat.emit_print('Server Text', word_eol[3][1:])
return hexchat.EAT_ALL
# Eat any message starting with a '.', twitch eats all of them too.
def yourmsg_cb(word, word_eol, userdata):
if is_twitch() and word[1][0] == '.':
return hexchat.EAT_ALL
# Just prefix with a '.'.
def command_cb(word, word_eol, alias):
if is_twitch():
if alias:
if len(word_eol) > 1:
hexchat.command('say .{} {}'.format(alias, word_eol[1]))
else:
hexchat.command('say .{}'.format(alias))
else:
hexchat.command('say .{}'.format(word_eol[0]))
return hexchat.EAT_ALL
for command in commands:
hexchat.hook_command(command, command_cb)
for command, alias in aliases.items():
hexchat.hook_command(command, command_cb, alias)
hexchat.hook_print('Your Message', yourmsg_cb)
hexchat.hook_server('421', servererr_cb)
hexchat.hook_server('PRIVMSG', privmsg_cb)
| mit | Python |
509c3fe88e1d0d096ad18bb10750466d61bfd7a6 | Update wordhl.py | TingPing/plugins,TingPing/plugins | HexChat/wordhl.py | HexChat/wordhl.py | import hexchat
__module_name__ = 'wordhl'
__module_author__ = 'TingPing'
__module_version__ = '1'
__module_description__ = 'Highlights some words of importance'
# When you want to notice something, but not really get 'highlighted'
hlwords = ('hexchat', )
edited = False
def print_cb(word, word_eol, userdata, attr):
global edited
if edited or attr.time: # Ignore our own events or bouncer playback
return
if any(_word in word[1] for _word in hlwords):
msg = word[1]
for _word in hlwords:
msg = msg.replace(_word, '\00319' + _word + '\00399').strip() # Color green
edited = True
hexchat.emit_print('Channel Message', word[0], msg)
edited = False
hexchat.command('gui color 3')
return hexchat.EAT_ALL
hexchat.hook_print_attrs('Channel Message', print_cb)
| import hexchat
__module_name__ = 'wordhl'
__module_author__ = 'TingPing'
__module_version__ = '1'
__module_description__ = 'Highlights some words of importance'
# When you want to notice something, but not really get 'highlighted'
hlwords = ('hexchat', )
edited = False
def print_cb(word, word_eol, userdata, attr):
global edited
if edited or attr.time: # Ignore our own events or bouncer playback
return
if any(_word in word[1] for _word in hlwords):
for _word in hlwords:
msg = word[1].replace(_word, '\00319' + _word + '\00399').strip() # Color green
edited = True
hexchat.emit_print('Channel Message', word[0], msg)
edited = False
hexchat.command('gui color 3')
return hexchat.EAT_ALL
hexchat.hook_print_attrs('Channel Message', print_cb)
| mit | Python |
8521837cc3f57e11278fc41bfd0e5d106fc140fe | Simplify database query when looking up an alias | jbittel/django-deflect | deflect/views.py | deflect/views.py | from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import ShortURLAlias
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = ShortURLAlias.objects.get(alias=key.lower())
key_id = alias.redirect_id
except ShortURLAlias.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect: %s" % e)
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
| from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import ShortURLAlias
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = ShortURLAlias.objects.select_related().get(alias=key.lower())
key_id = alias.redirect.id
except ShortURLAlias.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect: %s" % e)
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
| bsd-3-clause | Python |
91d53c4e00c3ca7a2f520b1133b15088589c25d9 | Fix logging | wengole/nasman,wengole/nasman,wengole/nasman,wengole/nasman,wengole/nasman | nasman/snapshots/tasks.py | nasman/snapshots/tasks.py | from datetime import datetime
import logging
from celery import shared_task
from nasman.snapshots.models import File
from .utils.zfs import ZFSUtil
logger = logging.getLogger(__name__)
def build_file_list(path):
"""
Build a list a list of files (and directories) by iterating recursively
over the given path
:param path: The path to iterate over
:type path: pathlib.Path
:return: A tuple of directories and files
:rtype: tuple(list, list)
"""
dirs = []
files = []
for x in path.iterdir():
try:
if x.is_symlink():
continue
elif x.is_dir():
dirs.append(x)
new_dirs, new_files = build_file_list(x)
dirs.extend(new_dirs)
files.extend(new_files)
elif x.is_file():
files.append(x)
except PermissionError:
continue
return dirs, files
def collect_files(path):
"""
Recursively add all files and directories of the given path to the
database
:param path: The path to iterate over recursively
:type path: pathlib.Path
"""
logger.info('Building file list...')
start_time = datetime.now()
dirs, files = build_file_list(path)
seconds = (datetime.now() - start_time).total_seconds()
logger.info(
'Found %d files and directories in %.3fs',
(len(dirs) + len(files)),
seconds
)
return dirs, files
@shared_task
def index_snapshot(snap_name):
snap = ZFSUtil.get_snapshot(snap_name)
if not snap.is_mounted:
snap.mount()
dirs, files = collect_files(snap.mountpoint)
logger.info('Saving files to database')
for x in dirs + files:
obj = File(
full_path=x,
snapshot_name=snap_name
)
obj.save()
| from datetime import datetime
import logging
from celery import shared_task
from nasman.snapshots.models import File
from .utils.zfs import ZFSUtil
logger = logging.getLogger(__name__)
def build_file_list(path):
"""
Build a list a list of files (and directories) by iterating recursively
over the given path
:param path: The path to iterate over
:type path: pathlib.Path
:return: A tuple of directories and files
:rtype: tuple(list, list)
"""
dirs = []
files = []
for x in path.iterdir():
try:
if x.is_symlink():
continue
elif x.is_dir():
dirs.append(x)
new_dirs, new_files = build_file_list(x)
dirs.extend(new_dirs)
files.extend(new_files)
elif x.is_file():
files.append(x)
except PermissionError:
continue
return dirs, files
def collect_files(path):
"""
Recursively add all files and directories of the given path to the
database
:param path: The path to iterate over recursively
:type path: pathlib.Path
"""
logger.info('Building file list...')
start_time = datetime.now()
dirs, files = build_file_list(path)
seconds = (datetime.now() - start_time).total_seconds()
logger.info(
'Found {0} files and directories in {1:.3}s'.format(
len(dirs) + len(files),
seconds
)
)
return dirs, files
@shared_task
def index_snapshot(snap_name):
snap = ZFSUtil.get_snapshot(snap_name)
if not snap.is_mounted:
snap.mount()
dirs, files = collect_files(snap.mountpoint)
logger.info('Saving files to database')
for x in dirs + files:
obj = File(
full_path=x,
snapshot_name=snap_name
)
obj.save()
| bsd-3-clause | Python |
52962f30f1215b705bee0beb70b40819ddf0164e | Reorder subplots | ofgulban/scikit-image,bennlich/scikit-image,paalge/scikit-image,almarklein/scikit-image,chintak/scikit-image,newville/scikit-image,pratapvardhan/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,emon10005/scikit-image,paalge/scikit-image,almarklein/scikit-image,chintak/scikit-image,paalge/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image,chintak/scikit-image,Britefury/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,chriscrosscutler/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,youprofit/scikit-image,oew1v07/scikit-image,keflavich/scikit-image,SamHames/scikit-image,juliusbierk/scikit-image,keflavich/scikit-image,Midafi/scikit-image,rjeli/scikit-image,dpshelio/scikit-image,vighneshbirodkar/scikit-image,WarrenWeckesser/scikits-image,robintw/scikit-image,michaelaye/scikit-image,warmspringwinds/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,GaZ3ll3/scikit-image,michaelpacer/scikit-image,ajaybhat/scikit-image,youprofit/scikit-image,blink1073/scikit-image,ClinicalGraphics/scikit-image,Midafi/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,chintak/scikit-image,jwiggins/scikit-image,jwiggins/scikit-image,ajaybhat/scikit-image,SamHames/scikit-image,rjeli/scikit-image,newville/scikit-image,Britefury/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,GaZ3ll3/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,bsipocz/scikit-image,vighneshbirodkar/scikit-image | doc/examples/plot_denoise.py | doc/examples/plot_denoise.py | """
=============================
Denoising the picture of Lena
=============================
In this example, we denoise a noisy version of the picture of Lena using the
total variation and bilateral denoising filter.
These algorithms typically produce "posterized" images with flat domains
separated by sharp edges. It is possible to change the degree of posterization
by controlling the tradeoff between denoising and faithfulness to the original
image.
Total variation filter
----------------------
The result of this filter is an image that has a minimal total variation norm,
while being as close to the initial image as possible. The total variation is
the L1 norm of the gradient of the image, and minimizing the total variation.
Bilateral filter
----------------
A bilateral filter is an edge-preserving and noise reducing denoising filter.
It averages pixel based on their spatial closeness and radiometric similarity.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data, color, img_as_float
from skimage.filter import tv_denoise, denoise_bilateral
lena = img_as_float(data.lena())
lena = lena[220:300, 220:320]
noisy = lena + 0.5 * lena.std() * np.random.random(lena.shape)
noisy = np.clip(noisy, 0, 1)
fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5))
ax[0, 0].imshow(noisy)
ax[0, 0].axis('off')
ax[0, 0].set_title('noisy')
ax[0, 1].imshow(tv_denoise(noisy, weight=0.1))
ax[0, 1].axis('off')
ax[0, 1].set_title('TV')
ax[0, 2].imshow(denoise_bilateral(noisy, sigma_color=0.03, sigma_range=15))
ax[0, 2].axis('off')
ax[0, 2].set_title('Bilateral')
ax[1, 0].imshow(tv_denoise(noisy, weight=0.2))
ax[1, 0].axis('off')
ax[1, 0].set_title('(more) TV')
ax[1, 1].imshow(denoise_bilateral(noisy, sigma_color=0.06, sigma_range=15))
ax[1, 1].axis('off')
ax[1, 1].set_title('(more) Bilateral')
ax[1, 2].imshow(lena)
ax[1, 2].axis('off')
ax[1, 2].set_title('original')
fig.subplots_adjust(wspace=0.02, hspace=0.2,
top=0.9, bottom=0.05, left=0, right=1)
plt.show()
| """
=============================
Denoising the picture of Lena
=============================
In this example, we denoise a noisy version of the picture of Lena using the
total variation and bilateral denoising filter.
These algorithms typically produce "posterized" images with flat domains
separated by sharp edges. It is possible to change the degree of posterization
by controlling the tradeoff between denoising and faithfulness to the original
image.
Total variation filter
----------------------
The result of this filter is an image that has a minimal total variation norm,
while being as close to the initial image as possible. The total variation is
the L1 norm of the gradient of the image, and minimizing the total variation.
Bilateral filter
----------------
A bilateral filter is an edge-preserving and noise reducing denoising filter.
It averages pixel based on their spatial closeness and radiometric similarity.
"""
import numpy as np
import matplotlib.pyplot as plt
from skimage import data, color, img_as_float
from skimage.filter import tv_denoise, denoise_bilateral
lena = img_as_float(data.lena())
lena = lena[220:300, 220:320]
noisy = lena + 0.5 * lena.std() * np.random.random(lena.shape)
noisy = np.clip(noisy, 0, 1)
fig, ax = plt.subplots(nrows=2, ncols=3, figsize=(8, 5))
ax[0, 0].imshow(lena)
ax[0, 0].axis('off')
ax[0, 0].set_title('original')
ax[0, 1].imshow(tv_denoise(noisy, weight=0.02))
ax[0, 1].axis('off')
ax[0, 1].set_title('TV')
ax[0, 2].imshow(tv_denoise(noisy, weight=0.05))
ax[0, 2].axis('off')
ax[0, 2].set_title('(more) TV')
ax[1, 0].imshow(noisy)
ax[1, 0].axis('off')
ax[1, 0].set_title('original')
ax[1, 1].imshow(denoise_bilateral(noisy, sigma_color=0.02, sigma_range=15))
ax[1, 1].axis('off')
ax[1, 1].set_title('Bilateral')
ax[1, 2].imshow(denoise_bilateral(noisy, sigma_color=0.05, sigma_range=15))
ax[1, 2].axis('off')
ax[1, 2].set_title('(more) Bilateral')
fig.subplots_adjust(wspace=0.02, hspace=0.2,
top=0.9, bottom=0.05, left=0, right=1)
plt.show()
| bsd-3-clause | Python |
1848f7a9a8a4cba76324cf6b6032ea027a389c39 | revert to old version of jquery | philchristensen/modu,philchristensen/modu,philchristensen/modu | src/modu/assets/__init__.py | src/modu/assets/__init__.py | # modu
# Copyright (c) 2006-2010 Phil Christensen
# http://modu.bubblehouse.org
#
#
# See LICENSE for details
from modu.util import tags
# DEFAULT_JQUERY_VERSION = '1.11.0'
# DEFAULT_JQUERY_UI_VERSION = '1.9.2'
DEFAULT_JQUERY_VERSION = '1.4.2'
DEFAULT_JQUERY_UI_VERSION = '1.7.1'
def activate_jquery(req):
req.content.report('header', tags.script(type="text/javascript", src="//code.jquery.com/jquery-%s.min.js" % DEFAULT_JQUERY_VERSION)[''])
def activate_jquery_ui(req):
req.content.report('header', tags.script(type="text/javascript", src="//code.jquery.com/ui/%s/jquery-ui.min.js" % DEFAULT_JQUERY_UI_VERSION)[''])
| # modu
# Copyright (c) 2006-2010 Phil Christensen
# http://modu.bubblehouse.org
#
#
# See LICENSE for details
from modu.util import tags
DEFAULT_JQUERY_VERSION = '1.11.0'
DEFAULT_JQUERY_UI_VERSION = '1.9.2'
def activate_jquery(req):
req.content.report('header', tags.script(type="text/javascript", src="//ajax.googleapis.com/ajax/libs/jquery/%s/jquery.min.js" % DEFAULT_JQUERY_VERSION)[''])
def activate_jquery_ui(req):
req.content.report('header', tags.script(type="text/javascript", src="//ajax.googleapis.com/ajax/libs/jqueryui/%s/jquery-ui.min.js" % DEFAULT_JQUERY_UI_VERSION)[''])
| mit | Python |
c322e4f2202f3b004a4f41bd4c2786f88292cf37 | Validate the presence of CONTENT_STORE. | ktbartholomew/preparer-sphinx,ktbartholomew/preparer-sphinx,deconst/preparer-sphinx,deconst/preparer-sphinx | deconstrst/deconstrst.py | deconstrst/deconstrst.py | # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import sys
import os
from builder import DeconstJSONBuilder
from sphinx.application import Sphinx
from sphinx.builders import BUILTIN_BUILDERS
def build(argv):
"""
Invoke Sphinx with locked arguments to generate JSON content.
"""
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--submit",
help="Submit results to the content store.",
action="store_true")
args = parser.parse_args(argv[1:])
content_store_url = os.getenv("CONTENT_STORE")
if args.submit and not content_store_url:
print("Please set CONTENT_STORE if submitting results.",
file=sys.stderr)
sys.exit(1)
# I am a terrible person
BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder
# Lock source and destination to the same paths as the Makefile.
srcdir, destdir = '.', '_build/deconst'
doctreedir = os.path.join(destdir, '.doctrees')
app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir,
doctreedir=doctreedir, buildername="deconst",
confoverrides={}, status=sys.stdout, warning=sys.stderr,
freshenv=True, warningiserror=False, tags=[], verbosity=0,
parallel=1)
app.build(True, [])
if app.statuscode != 0 or not args.submit:
return app.statuscode
print("submit active")
return 0
| # -*- coding: utf-8 -*-
import argparse
import sys
from os import path
from builder import DeconstJSONBuilder
from sphinx.application import Sphinx
from sphinx.builders import BUILTIN_BUILDERS
def build(argv):
"""
Invoke Sphinx with locked arguments to generate JSON content.
"""
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--submit",
help="Submit results to the content store.",
action="store_true")
args = parser.parse_args(argv[1:])
# I am a terrible person
BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder
# Lock source and destination to the same paths as the Makefile.
srcdir, destdir = '.', '_build/deconst'
doctreedir = path.join(destdir, '.doctrees')
app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir,
doctreedir=doctreedir, buildername="deconst",
confoverrides={}, status=sys.stdout, warning=sys.stderr,
freshenv=True, warningiserror=False, tags=[], verbosity=0,
parallel=1)
app.build(True, [])
if app.statuscode != 0 or not args.submit:
return app.statuscode
print("submit active")
return 0
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.