commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
3e280e64874d1a68b6bc5fc91a8b6b28968b74e3 | Store project and module componentes separately | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin | meinberlin/apps/dashboard2/contents.py | meinberlin/apps/dashboard2/contents.py | class DashboardContents:
_registry = {'project': {}, 'module': {}}
def __getitem__(self, identifier):
component = self._registry['project'].get(identifier, None)
if not component:
component = self._registry['module'].get(identifier)
return component
def __contains__(self, identifier):
return (identifier in self._registry['project'] or
identifier in self._registry['module'])
def register_project(self, component):
self._registry['project'][component.identifier] = component
def register_module(self, component):
self._registry['module'][component.identifier] = component
def get_project_components(self):
return self._registry['project'].items()
def get_module_components(self):
return self._registry['module'].items()
content = DashboardContents()
| class DashboardContents:
_registry = {}
content = DashboardContents()
| agpl-3.0 | Python |
4af13edc87fee4083491fcc14197040300b4575f | Remove hardcoded url | almeidapaulopt/frappe,frappe/frappe,StrellaGroup/frappe,mhbu50/frappe,saurabh6790/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,yashodhank/frappe,adityahase/frappe,adityahase/frappe,almeidapaulopt/frappe,mhbu50/frappe,adityahase/frappe,mhbu50/frappe,mhbu50/frappe,adityahase/frappe,frappe/frappe,almeidapaulopt/frappe,frappe/frappe,saurabh6790/frappe,saurabh6790/frappe,yashodhank/frappe,saurabh6790/frappe,yashodhank/frappe,StrellaGroup/frappe | frappe/integrations/frappe_providers/__init__.py | frappe/integrations/frappe_providers/__init__.py | # imports - module imports
from frappe.integrations.frappe_providers.frappecloud import frappecloud_migrator
def migrate_to(local_site, frappe_provider):
if frappe_provider in ("frappe.cloud", "frappecloud.com"):
return frappecloud_migrator(local_site, frappe_provider)
else:
print("{} is not supported yet".format(frappe_provider))
sys.exit(1)
| # imports - module imports
from frappe.integrations.frappe_providers.frappecloud import frappecloud_migrator
def migrate_to(local_site, frappe_provider):
if frappe_provider in ("frappe.cloud", "frappecloud.com"):
frappe_provider = "staging.frappe.cloud"
return frappecloud_migrator(local_site, frappe_provider)
else:
print("{} is not supported yet".format(frappe_provider))
sys.exit(1)
| mit | Python |
46ee9dad4030c8628d951abb84a667c7398dd834 | Fix error when multiple objects were returned for coordinators in admin | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | src/coordinators/models.py | src/coordinators/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
# superusers and managers see everything
if (user.is_superuser
or hasattr(user, 'coordinator') and user.coordinator.is_manager):
return qs
# don't show anything to unconfigured users
if not hasattr(user, 'coordinator') or not user.coordinator.district:
return qs.none()
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs).distinct()
| from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
# superusers and managers see everything
if (user.is_superuser
or hasattr(user, 'coordinator') and user.coordinator.is_manager):
return qs
# don't show anything to unconfigured users
if not hasattr(user, 'coordinator') or not user.coordinator.district:
return qs.none()
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| mit | Python |
199d0cdc681675d5e20b2167424becaef8391391 | Fix typo | vuolter/pyload,vuolter/pyload,vuolter/pyload | module/plugins/hoster/ZippyshareCom.py | module/plugins/hoster/ZippyshareCom.py | # -*- coding: utf-8 -*-
import re
from os import path
from urllib import unquote
from urlparse import urljoin
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
__version__ = "0.59"
__pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
__description__ = """Zippyshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
NAME_PATTERN = r'var linkz =.*/(?P<N>.+)";'
SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'>File does not exist on this server<'
COOKIES = [(".zippyshare.com", "ziplocale", "en")]
def setup(self):
self.multiDL = True
self.chunkLimit = -1
self.resumeDownload = True
def handleFree(self):
url = self.get_link()
self.download(url)
def getFileInfo(self):
info = super(ZippyshareCom, self).getFileInfo()
self.pyfile.name = info['name'] = unquote(info['name'])
return info
def get_checksum(self):
try:
a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups())
c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups())
b = (a1 % a2) * (c1 % c2)
except:
self.error(_("Unable to calculate checksum"))
else:
return b + 18
def get_link(self):
checksum = self.get_checksum()
p_url = path.join("d", self.info['KEY'], str(checksum), self.pyfile.name)
dl_link = urljoin(self.info['HOST'], p_url)
return dl_link
getInfo = create_getInfo(ZippyshareCom)
| # -*- coding: utf-8 -*-
import re
from os import path
from urllib import unquote
from urlparse import urljoin
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class ZippyshareCom(SimpleHoster):
__name__ = "ZippyshareCom"
__type__ = "hoster"
__version__ = "0.58"
__pattern__ = r'(?P<HOST>http://www\d{0,2}\.zippyshare\.com)/v(?:/|iew\.jsp.*key=)(?P<KEY>\d+)'
__description__ = """Zippyshare.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
NAME_PATTERN = r'var linkz =.*/(?P<N>.+)";'
SIZE_PATTERN = r'>Size:.+?">(?P<S>[\d.,]+) (?P<U>[\w^_]+)'
OFFLINE_PATTERN = r'>File does not exist on this server<'
COOKIES = [(".zippyshare.com", "ziplocale", "en")]
def setup(self):
self.multiDL = True
self.chunkLimit = -1
self.resumeDownload = True
def handleFree(self):
url = self.get_link()
self.download(url)
def getFileInfo(self):
info = super(ZippyshareCom, self).getFileInfo()
self.pyfile.name = info['name'] = unquote(info['name'])
return info
def get_checksum(self):
try:
a1, a2 = map(int, re.search(r'\(\'downloadB\'\).omg = (\d+)%(\d+)', self.html).groups)
c1, c2 = map(int, re.search(r'\(\'downloadB\'\).omg\) \* \((\d+)%(\d+)', self.html).groups)
b = (a1 % a2) * (c1 % c2)
except:
self.error(_("Unable to calculate checksum"))
else:
return b + 18
def get_link(self):
checksum = self.get_checksum()
p_url = path.join("d", self.info['KEY'], str(checksum), self.pyfile.name)
dl_link = urljoin(self.info['HOST'], p_url)
return dl_link
getInfo = create_getInfo(ZippyshareCom)
| agpl-3.0 | Python |
21b53578b90896c358f43339fccdce6df722682d | Remove depractated login view | chirilo/remo,tsmrachel/remo,johngian/remo,chirilo/remo,Mte90/remo,johngian/remo,akatsoulas/remo,flamingspaz/remo,abdullah2891/remo,tsmrachel/remo,abdullah2891/remo,johngian/remo,chirilo/remo,tsmrachel/remo,Mte90/remo,flamingspaz/remo,akatsoulas/remo,chirilo/remo,tsmrachel/remo,Mte90/remo,Mte90/remo,akatsoulas/remo,mozilla/remo,mozilla/remo,akatsoulas/remo,abdullah2891/remo,abdullah2891/remo,mozilla/remo,mozilla/remo,johngian/remo,flamingspaz/remo,flamingspaz/remo | remo/profiles/views.py | remo/profiles/views.py | from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.contrib.auth.views import login as django_login
from django.views.generic.simple import direct_to_template
from session_csrf import anonymous_csrf
from django.contrib.auth.models import User
@anonymous_csrf
def main(request):
return direct_to_template(request, template="main.html")
@login_required
def edit(request, display_name=None):
return direct_to_template(request, template="profiles_edit.html")
def list_profiles(request):
return render_to_response("profiles_people.html",
{'people' : User.objects.filter(is_active=True)}
)
def view_profile(request, display_name):
return direct_to_template(request, template="profiles_view.html")
def invite(request):
return direct_to_template(request, template="profiles_invite.html")
@login_required
def view_my_profile(request):
return view_profile(request,
display_name=request.user.userprofile.display_name)
@anonymous_csrf
def plainlogin(request, template_name):
return django_login(request, template_name=template_name)
| from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.contrib.auth.views import login as django_login
from django.views.generic.simple import direct_to_template
from session_csrf import anonymous_csrf
from django.contrib.auth.models import User
@anonymous_csrf
def main(request):
return direct_to_template(request, template="main.html")
@login_required
def edit(request, display_name=None):
return direct_to_template(request, template="profiles_edit.html")
def list_profiles(request):
return render_to_response("profiles_people.html",
{'people' : User.objects.filter(is_active=True)}
)
def view_profile(request, display_name):
return direct_to_template(request, template="profiles_view.html")
def invite(request):
return direct_to_template(request, template="profiles_invite.html")
@login_required
def view_my_profile(request):
return view_profile(request,
display_name=request.user.userprofile.display_name)
@anonymous_csrf
def plainlogin(request, template_name):
return django_login(request, template_name=template_name)
@anonymous_csrf
def login(request):
return direct_to_template(request, template='login.html')
| bsd-3-clause | Python |
d2bff2f612c6d9b32a6a08f3c76f808e3d70d122 | Fix a bug in MultipleDatabaseModelDocument | SlideAtlas/SlideAtlas-Server,SlideAtlas/SlideAtlas-Server,SlideAtlas/SlideAtlas-Server,SlideAtlas/SlideAtlas-Server | slideatlas/models/common/multiple_database_model_document.py | slideatlas/models/common/multiple_database_model_document.py | # coding=utf-8
from mongoengine.connection import get_db
from .model_document import ModelDocument, ModelQuerySet
################################################################################
__all__ = ('MultipleDatabaseModelDocument',)
################################################################################
class MultipleDatabaseModelQuerySet(ModelQuerySet):
def __init__(self, document, collection):
# Make a local copy of the Document class for this QuerySet, to prevent database
# info from being changed once a context manager exists
new_document = self.copy_class(document)
# '_db_alias' is possibly inherited from a base class of 'document',
# but we need to ensure that its set as a direct property of 'new_document'
new_document._db_alias = document._db_alias
new_document._collection = document._collection
super(MultipleDatabaseModelQuerySet, self).__init__(new_document, collection)
@staticmethod
def copy_class(cls):
# TODO: move this to a common utils
new_cls_dict = dict(cls.__dict__)
new_cls_dict['meta'] = new_cls_dict.pop('_meta')
return type(cls.__name__, cls.__bases__, new_cls_dict)
class MultipleDatabaseModelDocument(ModelDocument):
"""
An abstract class for documents that may reside in one of multiple databases.
"""
# TODO: prevent this class from being instantiated directly
meta = {
'abstract': True,
'allow_inheritance': False,
'db_alias': None, # do not override this in any subclasses
'queryset_class': MultipleDatabaseModelQuerySet,
}
# TODO: reattach _db_alias to the g object, to make changing it thread-safe
_db_alias = None
@property
def database(self):
# the import is required here to prevent circular imports
# TODO: remove this import statement
from .. import database
return database.Database.objects.with_id(self._db_alias)
@classmethod
def _get_db(cls):
# this function is only called on class instances
# instantiated objects have this method patched by self.switch_db
db_alias = cls._db_alias
if db_alias is None:
raise NotImplemented('A "%s" must be used inside a "Database" context (\'with\' statement).' % cls.__name__)
return get_db(db_alias)
def __init__(self, *args, **kwargs):
super(MultipleDatabaseModelDocument, self).__init__(*args, **kwargs)
# make the new database persistent to this instance
cls_db_alias = type(self)._db_alias
if cls_db_alias is None:
raise NotImplemented('"%s._db_alias" should be set before a new "%s" is instantiated.' % (type(self).__name___, type(self).__name___))
self._db_alias = cls_db_alias # copy the value from the class to the instance
self.switch_db(self._db_alias)
| # coding=utf-8
from mongoengine.connection import get_db
from .model_document import ModelDocument, ModelQuerySet
################################################################################
__all__ = ('MultipleDatabaseModelDocument',)
################################################################################
class MultipleDatabaseModelQuerySet(ModelQuerySet):
def __init__(self, document, collection):
# Make a local copy of the Document class for this QuerySet, to prevent database
# info from being changed once a context manager exists
new_document = self.copy_class(document)
# '_db_alias' is possibly inherited from a base class of 'document',
# but we need to ensure that its set as a direct property of 'new_document'
new_document._db_alias = document._db_alias
new_document._collection = document._collection
super(MultipleDatabaseModelQuerySet, self).__init__(new_document, collection)
@staticmethod
def copy_class(cls):
# TODO: move this to a common utils
return type(cls.__name__, cls.__bases__, dict(cls.__dict__))
class MultipleDatabaseModelDocument(ModelDocument):
"""
An abstract class for documents that may reside in one of multiple databases.
"""
# TODO: prevent this class from being instantiated directly
meta = {
'abstract': True,
'allow_inheritance': False,
'db_alias': None, # do not override this in any subclasses
'queryset_class': MultipleDatabaseModelQuerySet,
}
# TODO: reattach _db_alias to the g object, to make changing it thread-safe
_db_alias = None
@property
def database(self):
# the import is required here to prevent circular imports
# TODO: remove this import statement
from .. import database
return database.Database.objects.with_id(self._db_alias)
@classmethod
def _get_db(cls):
# this function is only called on class instances
# instantiated objects have this method patched by self.switch_db
db_alias = cls._db_alias
if db_alias is None:
raise NotImplemented('A "%s" must be used inside a "Database" context (\'with\' statement).' % cls.__name__)
return get_db(db_alias)
def __init__(self, *args, **kwargs):
super(MultipleDatabaseModelDocument, self).__init__(*args, **kwargs)
# make the new database persistent to this instance
cls_db_alias = type(self)._db_alias
if cls_db_alias is None:
raise NotImplemented('"%s._db_alias" should be set before a new "%s" is instantiated.' % (type(self).__name___, type(self).__name___))
self._db_alias = cls_db_alias # copy the value from the class to the instance
self.switch_db(self._db_alias)
| apache-2.0 | Python |
63c1a1553638aec8da380d41313d0d94b3244163 | update import | csdev/datacheck,csdev/datacheck | datacheck/__init__.py | datacheck/__init__.py | from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from datacheck.core import (validate, Validator, Type, List,
Required, Optional, Dict)
__all__ = [
'validate',
'Validator',
'Type',
'List',
'Required',
'Optional',
'Dict',
]
| from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from datacheck.core import validate, Type, List, Required, Optional, Dict
__all__ = [
'validate',
'Type',
'List',
'Required',
'Optional',
'Dict',
]
| mit | Python |
5a05c2fc0e5560463ade239492d5252db3f701be | set maturity to Beta | OCA/account-fiscal-rule,OCA/account-fiscal-rule | account_avatax/__manifest__.py | account_avatax/__manifest__.py | {
"name": "Taxes using Avalara Avatax API",
"version": "13.0.1.0.0",
"author": "Open Source Integrators, Fabrice Henrion, Odoo SA,"
" Odoo Community Association (OCA)",
"summary": "Automatic Tax application using the Avalara Avatax Service",
"license": "AGPL-3",
"category": "Accounting",
"depends": ["account", "sale_stock", "base_geolocalize"],
"data": [
"security/avalara_salestax_security.xml",
"security/ir.model.access.csv",
"data/avalara_salestax_data.xml",
"data/avalara_salestax_exemptions.xml",
"wizard/avalara_salestax_ping_view.xml",
"wizard/avalara_salestax_address_validate_view.xml",
"views/avalara_salestax_view.xml",
"views/partner_view.xml",
"views/product_view.xml",
"views/account_move_action.xml",
"views/account_move_view.xml",
"views/account_tax_view.xml",
"views/account_fiscal_position_view.xml",
],
"installable": True,
"application": True,
"external_dependencies": {"python": ["Avalara"]},
"development_status": "Beta",
}
| {
"name": "Taxes using Avalara Avatax API",
"version": "13.0.1.0.0",
"author": "Open Source Integrators, Fabrice Henrion, Odoo SA,"
" Odoo Community Association (OCA)",
"summary": "Automatic Tax application using the Avalara Avatax Service",
"license": "AGPL-3",
"category": "Accounting",
"depends": ["account", "sale_stock", "base_geolocalize"],
"data": [
"security/avalara_salestax_security.xml",
"security/ir.model.access.csv",
"data/avalara_salestax_data.xml",
"data/avalara_salestax_exemptions.xml",
"wizard/avalara_salestax_ping_view.xml",
"wizard/avalara_salestax_address_validate_view.xml",
"views/avalara_salestax_view.xml",
"views/partner_view.xml",
"views/product_view.xml",
"views/account_move_action.xml",
"views/account_move_view.xml",
"views/account_tax_view.xml",
"views/account_fiscal_position_view.xml",
],
"installable": True,
"application": True,
"external_dependencies": {"python": ["Avalara"]},
}
| agpl-3.0 | Python |
19183dd5dd50b29ed0ee63f506904d6f693cad21 | Allow the website to be accessed by any host on local server | tm-kn/farmers-api | farmers_api/config/settings/local.py | farmers_api/config/settings/local.py | from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'local'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
| from .base import *
DEBUG = True
SECRET_KEY = 'local'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
| bsd-2-clause | Python |
d5cfb00c7d70853fd6ba3c2f5091defce13afaa2 | Fix ps_calcs hedu dependency | DataViva/dataviva-scripts,DataViva/dataviva-scripts | scripts/hedu/_calc_rca.py | scripts/hedu/_calc_rca.py | import sys, os
import pandas as pd
import numpy as np
file_path = os.path.dirname(os.path.realpath(__file__))
ps_calcs_lib_path = os.path.abspath(os.path.join(file_path, "../../", "lib/ps_calcs"))
sys.path.insert(0, ps_calcs_lib_path)
import ps_calcs
def calc_rca(ybuc, year):
ybc = ybuc.groupby(level=["year", "bra_id", "course_hedu_id"]).sum()
ybc = ybc[["enrolled"]]
ybc = ybc.reset_index()
ybc = ybc.drop("year", axis=1)
rcas = ybc.pivot(index="bra_id", columns="course_hedu_id", values="enrolled")
rcas = ps_calcs.rca(rcas)
rcas = pd.DataFrame(rcas.stack(), columns=["enrolled_rca"])
rcas = rcas.replace(0, np.nan)
rcas = rcas.dropna(how="all")
rcas["year"] = int(year)
rcas = rcas.set_index("year", append=True)
rcas = rcas.swaplevel("year", "course_hedu_id")
rcas = rcas.swaplevel("year", "bra_id")
return rcas | import sys, os
import pandas as pd
import numpy as np
ps_calcs_lib_path = os.path.abspath(os.path.join(file_path, "../../", "lib/ps_calcs"))
sys.path.insert(0, ps_calcs_lib_path)
import ps_calcs
def calc_rca(ybuc, year):
ybc = ybuc.groupby(level=["year", "bra_id", "course_hedu_id"]).sum()
ybc = ybc[["enrolled"]]
ybc = ybc.reset_index()
ybc = ybc.drop("year", axis=1)
rcas = ybc.pivot(index="bra_id", columns="course_hedu_id", values="enrolled")
rcas = ps_calcs.rca(rcas)
rcas = pd.DataFrame(rcas.stack(), columns=["enrolled_rca"])
rcas = rcas.replace(0, np.nan)
rcas = rcas.dropna(how="all")
rcas["year"] = int(year)
rcas = rcas.set_index("year", append=True)
rcas = rcas.swaplevel("year", "course_hedu_id")
rcas = rcas.swaplevel("year", "bra_id")
return rcas | mit | Python |
0282c7ffc43c5330da27515a42a3392dfa20c57e | Add Universe Test | duggym122/conference-room-manager | room_calendar/tests.py | room_calendar/tests.py | from django.test import TestCase
# Create your tests here.
def test_answer():
assert 1 == 1
| from django.test import TestCase
# Create your tests here.
| agpl-3.0 | Python |
75b2a861f6585580f7244c4eb8d112a4548e32a5 | Add force_tuple argument | kashif/chainer,hvy/chainer,wkentaro/chainer,okuta/chainer,rezoo/chainer,keisuke-umezawa/chainer,niboshi/chainer,ronekko/chainer,okuta/chainer,keisuke-umezawa/chainer,ktnyt/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,jnishi/chainer,aonotas/chainer,kiyukuta/chainer,ktnyt/chainer,anaruse/chainer,ktnyt/chainer,chainer/chainer,niboshi/chainer,cupy/cupy,hvy/chainer,chainer/chainer,ktnyt/chainer,hvy/chainer,hvy/chainer,ysekky/chainer,wkentaro/chainer,keisuke-umezawa/chainer,tkerola/chainer,wkentaro/chainer,chainer/chainer,cupy/cupy,pfnet/chainer,cupy/cupy,delta2323/chainer,niboshi/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,niboshi/chainer,cupy/cupy,okuta/chainer,benob/chainer,kikusu/chainer,kikusu/chainer,benob/chainer,chainer/chainer | chainer/functions/array/transpose_sequence.py | chainer/functions/array/transpose_sequence.py | import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _transpose(xs):
xp = cuda.get_array_module(*xs)
lengths = numpy.zeros(len(xs[0]), dtype='i')
for i, x in enumerate(xs):
lengths[0:len(x)] = i + 1
dtype = xs[0].dtype
unit = xs[0].shape[1:]
outs = tuple(xp.empty((l,) + unit, dtype=dtype) for l in lengths)
for i, x in enumerate(xs):
for p, xi in enumerate(x):
outs[p][i] = xi
return outs
class TransposeSequence(function.Function):
def check_type_forward(self, xs_type):
type_check.expect(xs_type.size() >= 1)
for p, n in zip(xs_type, xs_type[1:]):
type_check.expect(
p.shape[0] >= n.shape[0],
p.shape[1:] == n.shape[1:],
)
def forward(self, xs):
return _transpose(xs)
def backward(self, xs, gs):
return _transpose(gs)
def transpose_sequence(xs, force_tuple=True):
ys = TransposeSequence()(*xs)
if force_tuple and not isinstance(ys, tuple):
ys = (ys,)
return ys
| import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
def _transpose(xs):
xp = cuda.get_array_module(*xs)
lengths = numpy.zeros(len(xs[0]), dtype='i')
for i, x in enumerate(xs):
lengths[0:len(x)] = i + 1
dtype = xs[0].dtype
unit = xs[0].shape[1:]
outs = tuple(xp.empty((l,) + unit, dtype=dtype) for l in lengths)
for i, x in enumerate(xs):
for p, xi in enumerate(x):
outs[p][i] = xi
return outs
class TransposeSequence(function.Function):
def check_type_forward(self, xs_type):
type_check.expect(xs_type.size() >= 1)
for p, n in zip(xs_type, xs_type[1:]):
type_check.expect(
p.shape[0] >= n.shape[0],
p.shape[1:] == n.shape[1:],
)
def forward(self, xs):
return _transpose(xs)
def backward(self, xs, gs):
return _transpose(gs)
def transpose_sequence(*xs):
return TransposeSequence()(*xs)
| mit | Python |
51134c8136bf799a5d6745a53f6f4b97d72d3e57 | Update API_VERSION = '48.0' | django-salesforce/django-salesforce,hynekcer/django-salesforce,hynekcer/django-salesforce,django-salesforce/django-salesforce,hynekcer/django-salesforce,django-salesforce/django-salesforce | salesforce/__init__.py | salesforce/__init__.py | # django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
A database backend for the Django ORM.
Allows access to all Salesforce objects accessible via the SOQL API.
"""
import logging
from salesforce.dbapi.exceptions import ( # NOQA pylint:disable=unused-import,useless-import-alias
IntegrityError as IntegrityError, DatabaseError as DatabaseError, SalesforceError as SalesforceError,
)
__version__ = "0.9"
log = logging.getLogger(__name__)
# Default version of Force.com API.
# It can be set by setattr() to any lower or higher supported value.
# (The highest version can be set by "salesforce.utils.set_highest_api_version()".
# It is useful for development, a constant version is for production.)
# Example for settings.py:
# >>> import salesforce
# >>> setattr(salesforce, 'API_VERSION', '37.0')
API_VERSION = '48.0' # Spring '20
| # django-salesforce
#
# by Phil Christensen
# (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org)
# See LICENSE.md for details
#
"""
A database backend for the Django ORM.
Allows access to all Salesforce objects accessible via the SOQL API.
"""
import logging
from salesforce.dbapi.exceptions import ( # NOQA pylint:disable=unused-import,useless-import-alias
IntegrityError as IntegrityError, DatabaseError as DatabaseError, SalesforceError as SalesforceError,
)
__version__ = "0.9"
log = logging.getLogger(__name__)
# Default version of Force.com API.
# It can be set by setattr() to any lower or higher supported value.
# (The highest version can be set by "salesforce.utils.set_highest_api_version()".
# It is useful for development, a constant version is for production.)
# Example for settings.py:
# >>> import salesforce
# >>> setattr(salesforce, 'API_VERSION', '37.0')
API_VERSION = '47.0' # Winter '20
| mit | Python |
932854849299b19264eb6ba47ac65e87a604a5eb | add import of missing AuthError | fredsod/NIPAP,SpriteLink/NIPAP,SpriteLink/NIPAP,bbaja42/NIPAP,SoundGoof/NIPAP,garberg/NIPAP,garberg/NIPAP,garberg/NIPAP,fredsod/NIPAP,bbaja42/NIPAP,SoundGoof/NIPAP,SoundGoof/NIPAP,bbaja42/NIPAP,fredsod/NIPAP,SpriteLink/NIPAP,bbaja42/NIPAP,ettrig/NIPAP,SoundGoof/NIPAP,SpriteLink/NIPAP,SpriteLink/NIPAP,fredsod/NIPAP,SoundGoof/NIPAP,garberg/NIPAP,fredsod/NIPAP,ettrig/NIPAP,ettrig/NIPAP,SoundGoof/NIPAP,ettrig/NIPAP,ettrig/NIPAP,bbaja42/NIPAP,garberg/NIPAP,garberg/NIPAP,fredsod/NIPAP,bbaja42/NIPAP,ettrig/NIPAP,SpriteLink/NIPAP | nipap-www/nipapwww/controllers/auth.py | nipap-www/nipapwww/controllers/auth.py | import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from nipapwww.lib.base import BaseController, render
from nipap.authlib import AuthFactory, AuthError
from nipap.nipapconfig import NipapConfig
from ConfigParser import NoOptionError
log = logging.getLogger(__name__)
class AuthController(BaseController):
""" Deals with authentication.
"""
requires_auth = False
def login(self):
""" Show login form.
"""
if request.method != 'POST':
cfg = NipapConfig()
try:
c.welcome_message = cfg.get('www', 'welcome_message')
except NoOptionError:
pass
return render('login.html')
# Verify username and password.
auth_fact = AuthFactory()
auth = auth_fact.get_auth(request.params.get('username'), request.params.get('password'), 'nipap')
try:
if not auth.authenticate():
c.error = 'Invalid username or password'
return render('login.html')
except AuthError as exc:
c.error = 'Authentication error'
return render('login.html')
# Mark user as logged in
session['user'] = auth.username
session['full_name'] = auth.full_name
session['readonly'] = auth.readonly
session['current_vrfs'] = {}
session.save()
# Send user back to the page he originally wanted to get to
if session.get('path_before_login'):
redirect(session['path_before_login'])
else:
# if previous target is unknown just send the user to a welcome page
redirect(url(controller='prefix', action='list'))
def logout(self):
""" Log out the user and display a confirmation message.
"""
# remove session
session.delete()
return render('login.html')
| import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from nipapwww.lib.base import BaseController, render
from nipap.authlib import AuthFactory
from nipap.nipapconfig import NipapConfig
from ConfigParser import NoOptionError
log = logging.getLogger(__name__)
class AuthController(BaseController):
""" Deals with authentication.
"""
requires_auth = False
def login(self):
""" Show login form.
"""
if request.method != 'POST':
cfg = NipapConfig()
try:
c.welcome_message = cfg.get('www', 'welcome_message')
except NoOptionError:
pass
return render('login.html')
# Verify username and password.
auth_fact = AuthFactory()
auth = auth_fact.get_auth(request.params.get('username'), request.params.get('password'), 'nipap')
try:
if not auth.authenticate():
c.error = 'Invalid username or password'
return render('login.html')
except AuthError as exc:
c.error = 'Authentication error'
return render('login.html')
# Mark user as logged in
session['user'] = auth.username
session['full_name'] = auth.full_name
session['readonly'] = auth.readonly
session['current_vrfs'] = {}
session.save()
# Send user back to the page he originally wanted to get to
if session.get('path_before_login'):
redirect(session['path_before_login'])
else:
# if previous target is unknown just send the user to a welcome page
redirect(url(controller='prefix', action='list'))
def logout(self):
""" Log out the user and display a confirmation message.
"""
# remove session
session.delete()
return render('login.html')
| mit | Python |
346582df1eda50a7f120871c9b010971997480fe | Add full version number in nxdrive package | arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,IsaacYangSLA/nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,ssdi-drive/nuxeo-drive,ssdi-drive/nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,rsoumyassdi/nuxeo-drive,arameshkumar/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/base-nuxeo-drive,DirkHoffmann/nuxeo-drive,IsaacYangSLA/nuxeo-drive,rsoumyassdi/nuxeo-drive,loopingz/nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/base-nuxeo-drive,loopingz/nuxeo-drive,IsaacYangSLA/nuxeo-drive,IsaacYangSLA/nuxeo-drive,ssdi-drive/nuxeo-drive,arameshkumar/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,DirkHoffmann/nuxeo-drive,DirkHoffmann/nuxeo-drive | nuxeo-drive-client/nxdrive/__init__.py | nuxeo-drive-client/nxdrive/__init__.py | __version__ = '1-dev'
FULL_VERSION = '1.2.0-' + __version__
| __version__ = '1-dev'
| lgpl-2.1 | Python |
b04975af2e1f3fe1d21c18fac239da338cb85027 | Improve __init__ | Diaoul/subliminal,h3llrais3r/subliminal,ravselj/subliminal,t4lwh/subliminal,kbkailashbagaria/subliminal,hpsbranco/subliminal,juanmhidalgo/subliminal,ofir123/subliminal,goll/subliminal,fernandog/subliminal,nvbn/subliminal,Elettronik/subliminal,bogdal/subliminal,neo1691/subliminal,ratoaq2/subliminal,getzze/subliminal,oxan/subliminal,pums974/subliminal,SickRage/subliminal | subliminal/__init__.py | subliminal/__init__.py | # -*- coding: utf-8 -*-
#
# Subliminal - Subtitles, faster than your thoughts
# Copyright (c) 2011 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of Subliminal.
#
# Subliminal is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ['Subliminal']
from core import Subliminal
| # -*- coding: utf-8 -*-
#
# Subliminal - Subtitles, faster than your thoughts
# Copyright (c) 2011 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of Subliminal.
#
# Subliminal is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from infos import *
from languages import *
from utils import *
from exceptions import *
from videos import *
from tasks import *
from subtitles import *
from core import *
from plugins import *
| mit | Python |
b3d9c38828f4929f30c7d71122910b8eca65aa6a | Add save roi to MultiKymographBuilder.py | hadim/fiji_tools,hadim/fiji_scripts,hadim/fiji_tools,hadim/fiji_scripts,hadim/fiji_scripts | plugins/Scripts/Plugins/MultiKymographBuilder.py | plugins/Scripts/Plugins/MultiKymographBuilder.py | # @Context context
# @Dataset dataset
# @ImageJ ij
# @LogService log
# @DatasetIOService io
import os
from ij.plugin.frame import RoiManager
import fiji.plugin.kymographbuilder.KymographFactory as KFactory
from java.io import File
rm = RoiManager.getInstance()
counter = 0
parent_folder = File(dataset.getSource()).getParent()
"""
for roi in rm.getRoisAsArray():
if roi.isLine():
counter += 1
title = "Kymograph" + "_" + str(counter).zfill(3) + "_" + roi.getName()
kfactory = KFactory(context, dataset, roi)
kfactory.build()
kymo = kfactory.getKymograph()
ij.ui().show(title, kymo)
io.save(kymo, os.path.join(parent_folder, title) + ".tif")"""
roi_path = os.path.join(parent_folder, os.path.splitext(dataset.getName())[0] + ".zip")
rm.runCommand("Save", roi_path)
log.info("MultiKymographBuilder Finished. " + str(counter) + " ROIs processed") | # @Context context
# @Dataset dataset
# @ImageJ ij
# @LogService log
# @DatasetIOService io
import os
from ij.plugin.frame import RoiManager
import fiji.plugin.kymographbuilder.KymographFactory as KFactory
from java.io import File
rm = RoiManager.getInstance()
counter = 0
parent_folder = File(dataset.getSource()).getParent()
for roi in rm.getRoisAsArray():
if roi.isLine():
counter += 1
title = "Kymograph" + "_" + str(counter).zfill(3) + "_" + roi.getName()
kfactory = KFactory(context, dataset, roi)
kfactory.build()
kymo = kfactory.getKymograph()
ij.ui().show(title, kymo)
io.save(kymo, os.path.join(parent_folder, title) + ".tif")
log.info("MultiKymographBuilder Finished. " + str(counter) + " ROIs processed") | bsd-3-clause | Python |
71e68394322d34ab777007317f97d4663a7ee40f | Prepare for v0.3.1 release | harikvpy/django-popupcrud,harikvpy/django-popupcrud,harikvpy/django-popupcrud | popupcrud/__init__.py | popupcrud/__init__.py | __version__ = "0.3.1"
| __version__ = "0.3.0"
| bsd-3-clause | Python |
27265c5c290a3ddb5148e71c292ba71a0deea461 | Complete naive sol by nested loops | bowen0701/algorithms_data_structures | lc0121_best_time_to_buy_and_sell_stock.py | lc0121_best_time_to_buy_and_sell_stock.py | """Leetcode 121. Best Time to Buy and Sell Stock
Easy
URL: https://leetcode.com/problems/best-time-to-buy-and-sell-stock/
Say you have an array for which the ith element is the price of a given stock on day i.
If you were only permitted to complete at most one transaction
(i.e., buy one and sell one share of the stock),
design an algorithm to find the maximum profit.
Note that you cannot sell a stock before you buy one.
Example 1:
Input: [7,1,5,3,6,4]
Output: 5
Explanation: Buy on day 2 (price = 1) and sell on day 5 (price = 6), profit = 6-1 = 5.
Not 7-1 = 6, as selling price needs to be larger than buying price.
Example 2:
Input: [7,6,4,3,1]
Output: 0
Explanation: In this case, no transaction is done, i.e. max profit = 0.
"""
class SolutionNaive(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
Time complexity: O(n^2), where n is the number of prices.
Space complexity: O(1).
"""
max_profit = 0
n = len(prices)
for i in range(n - 1):
for j in range(i + 1, n):
if prices[j] - prices[i] > max_profit:
max_profit = prices[j] - prices[i]
return max_profit
def main():
# Ans: 5
prices = [7,1,5,3,6,4]
print SolutionNaive().maxProfit(prices)
# Ans: 0
prices = [7,6,4,3,1]
print SolutionNaive().maxProfit(prices)
if __name__ == '__main__':
main()
| """Leetcode 121. Best Time to Buy and Sell Stock
Easy
URL: https://leetcode.com/problems/best-time-to-buy-and-sell-stock/
Say you have an array for which the ith element is the price of a given stock on day i.
If you were only permitted to complete at most one transaction
(i.e., buy one and sell one share of the stock),
design an algorithm to find the maximum profit.
Note that you cannot sell a stock before you buy one.
Example 1:
Input: [7,1,5,3,6,4]
Output: 5
Explanation: Buy on day 2 (price = 1) and sell on day 5 (price = 6), profit = 6-1 = 5.
Not 7-1 = 6, as selling price needs to be larger than buying price.
Example 2:
Input: [7,6,4,3,1]
Output: 0
Explanation: In this case, no transaction is done, i.e. max profit = 0.
"""
class SolutionNaive(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
9ddb83788322facb4aa41de2f8997f416359581e | Refactor for improved clarity | Jitsusama/lets-do-dns | lets_do_dns/acme_dns_auth/authenticate.py | lets_do_dns/acme_dns_auth/authenticate.py | """letsencrypt's certbot Authentication Logic."""
from lets_do_dns.acme_dns_auth.record import Record
from lets_do_dns.acme_dns_auth.command import run
from lets_do_dns.acme_dns_auth.time_delay import sleep
class Authenticate(object):
"""Handle letsencrypt DNS certificate identity authentication."""
def __init__(self, environment):
self._env = environment
self._record = self._init_record()
def perform(self):
"""Execute the authentication logic."""
if self._in_authentication_hook_stage:
self._create_record()
self._print_record_id()
self._delay_finish()
elif self._in_cleanup_hook_stage:
self._delete_record()
self._run_post_cmd()
return 0
@property
def _in_authentication_hook_stage(self):
return self._env.record_id is None
@property
def _in_cleanup_hook_stage(self):
return self._env.record_id is not None
def _delete_record(self):
self._record.id = self._env.record_id
self._record.delete()
def _run_post_cmd(self):
if self._env.post_cmd:
run(self._env.post_cmd)
def _create_record(self):
self._record.create(self._env.validation_key)
def _print_record_id(self):
self._record.printer()
def _init_record(self):
hostname = self._parse_hostname()
record = Record(self._env.api_key, self._env.domain, hostname)
return record
def _parse_hostname(self):
domain_suffix = '.' + self._env.domain
domain_start = self._env.fqdn.rfind(domain_suffix)
cert_hostname = self._env.fqdn[0:domain_start]
auth_hostname = '_acme-challenge.%s' % cert_hostname
return auth_hostname
@staticmethod
def _delay_finish():
sleep(2)
| """letsencrypt's certbot Authentication Logic."""
from lets_do_dns.acme_dns_auth.record import Record
from lets_do_dns.acme_dns_auth.command import run
from lets_do_dns.acme_dns_auth.time_delay import sleep
class Authenticate(object):
"""Handle letsencrypt DNS certificate identity authentication."""
def __init__(self, environment):
self._env = environment
self._record = self._init_record()
def perform(self):
"""Execute the authentication logic."""
if self._in_post_hook_phase:
self._delete_record()
self._run_post_cmd()
else:
self._create_record()
self._print_record_id()
self._delay_finish()
return 0
@property
def _in_post_hook_phase(self):
return self._env.record_id is not None
def _delete_record(self):
self._record.id = self._env.record_id
self._record.delete()
def _run_post_cmd(self):
if self._env.post_cmd:
run(self._env.post_cmd)
def _create_record(self):
self._record.create(self._env.validation_key)
def _print_record_id(self):
self._record.printer()
def _init_record(self):
hostname = self._parse_hostname()
record = Record(self._env.api_key, self._env.domain, hostname)
return record
def _parse_hostname(self):
domain_suffix = '.' + self._env.domain
domain_start = self._env.fqdn.rfind(domain_suffix)
cert_hostname = self._env.fqdn[0:domain_start]
auth_hostname = '_acme-challenge.%s' % cert_hostname
return auth_hostname
@staticmethod
def _delay_finish():
sleep(2)
| apache-2.0 | Python |
72796a97a24c512cf43fd9559d6e6b47d2f72e72 | Allow address to be null | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot | preferences/models.py | preferences/models.py | import uuid
from django.db import models
from django.contrib.auth.models import User
from opencivicdata.models.people_orgs import Person
class Preferences(models.Model):
user = models.OneToOneField(User, related_name='preferences')
address = models.CharField(max_length=100, blank=True, null=True)
lat = models.FloatField(null=True, blank=True)
lon = models.FloatField(null=True, blank=True)
apikey = models.UUIDField(default=uuid.uuid4)
class PersonFollow(models.Model):
user = models.ForeignKey(User, related_name='person_follows')
person = models.ForeignKey(Person, related_name='follows')
class TopicFollow(models.Model):
user = models.ForeignKey(User, related_name='topic_follows')
topic = models.CharField(max_length=100)
class LocationFollow(models.Model):
user = models.ForeignKey(User, related_name='location_follows')
location = models.CharField(max_length=100)
| import uuid
from django.db import models
from django.contrib.auth.models import User
from opencivicdata.models.people_orgs import Person
from django.contrib.auth.models import User
class Preferences(models.Model):
user = models.OneToOneField(User, related_name='preferences')
address = models.CharField(max_length=100, blank=True)
lat = models.FloatField(null=True, blank=True)
lon = models.FloatField(null=True, blank=True)
apikey = models.UUIDField(default=uuid.uuid4)
class PersonFollow(models.Model):
user = models.ForeignKey(User, related_name='person_follows')
person = models.ForeignKey(Person, related_name='follows')
class TopicFollow(models.Model):
user = models.ForeignKey(User, related_name='topic_follows')
topic = models.CharField(max_length=100)
class LocationFollow(models.Model):
user = models.ForeignKey(User, related_name='location_follows')
location = models.CharField(max_length=100)
| mit | Python |
58e838d2fed690a8e5929fc8f82a492fbb908030 | Remove .stats command | Uname-a/knife_scraper,Uname-a/knife_scraper,Uname-a/knife_scraper | willie/modules/info.py | willie/modules/info.py | """
info.py - Willie Information Module
Copyright 2008, Sean B. Palmer, inamidst.com
Licensed under the Eiffel Forum License 2.
http://willie.dftba.net
"""
def doc(willie, trigger):
"""Shows a command's documentation, and possibly an example."""
name = trigger.group(2)
name = name.lower()
if willie.doc.has_key(name):
willie.reply(willie.doc[name][0])
if willie.doc[name][1]:
willie.say('e.g. ' + willie.doc[name][1])
doc.rule = ('$nick', '(?i)(help|doc) +([A-Za-z]+)(?:\?+)?$')
doc.example = '$nickname: doc tell?'
doc.priority = 'low'
def help(willie, trigger):
"""Get help for a command."""
if not input.group(2):
willie.reply('Say .help <command> (for example .help c) to get help for a command, or .commands for a list of commands.')
else:
doc(willie, trigger)
help.commands = ['help']
help.example = '.help c'
def commands(willie, trigger):
"""Return a list of Willie's commands"""
names = ', '.join(sorted(willie.doc.iterkeys()))
willie.reply("I am sending you a private message of all my commands!")
willie.msg(trigger.nick, 'Commands I recognise: ' + names + '.')
willie.msg(trigger.nick, ("For help, do '%s: help example?' where example is the " +
"name of the command you want help for.") % willie.nick)
commands.commands = ['commands']
commands.priority = 'low'
def help(willie, trigger):
response = (
'Hi, I\'m a bot. Say ".commands" to me in private for a list ' +
'of my commands, or see http://willie.dftba.net for more ' +
'general details. My owner is %s.'
) % willie.config.owner
willie.reply(response)
help.rule = ('$nick', r'(?i)help(?:[?!]+)?$')
help.priority = 'low'
if __name__ == '__main__':
print __doc__.strip()
| """
info.py - Willie Information Module
Copyright 2008, Sean B. Palmer, inamidst.com
Licensed under the Eiffel Forum License 2.
http://willie.dftba.net
"""
def doc(willie, trigger):
"""Shows a command's documentation, and possibly an example."""
name = trigger.group(2)
name = name.lower()
if willie.doc.has_key(name):
willie.reply(willie.doc[name][0])
if willie.doc[name][1]:
willie.say('e.g. ' + willie.doc[name][1])
doc.rule = ('$nick', '(?i)(help|doc) +([A-Za-z]+)(?:\?+)?$')
doc.example = '$nickname: doc tell?'
doc.priority = 'low'
def help(willie, trigger):
"""Get help for a command."""
if not input.group(2):
willie.reply('Say .help <command> (for example .help c) to get help for a command, or .commands for a list of commands.')
else:
doc(willie, trigger)
help.commands = ['help']
help.example = '.help c'
def commands(willie, trigger):
"""Return a list of Willie's commands"""
names = ', '.join(sorted(willie.doc.iterkeys()))
willie.reply("I am sending you a private message of all my commands!")
willie.msg(trigger.nick, 'Commands I recognise: ' + names + '.')
willie.msg(trigger.nick, ("For help, do '%s: help example?' where example is the " +
"name of the command you want help for.") % willie.nick)
commands.commands = ['commands']
commands.priority = 'low'
def help(willie, trigger):
response = (
'Hi, I\'m a bot. Say ".commands" to me in private for a list ' +
'of my commands, or see http://willie.dftba.net for more ' +
'general details. My owner is %s.'
) % willie.config.owner
willie.reply(response)
help.rule = ('$nick', r'(?i)help(?:[?!]+)?$')
help.priority = 'low'
def stats(willie, trigger):
"""Show information on command usage patterns."""
commands = {}
users = {}
channels = {}
ignore = set(['f_note', 'startup', 'message', 'noteuri'])
for (name, user), count in willie.stats.iteritems():
if name in ignore: continue
if not user: continue
if not user.startswith('#'):
try: users[user] += count
except KeyError: users[user] = count
else:
try: commands[name] += count
except KeyError: commands[name] = count
try: channels[user] += count
except KeyError: channels[user] = count
comrank = sorted([(b, a) for (a, b) in commands.iteritems()], reverse=True)
userank = sorted([(b, a) for (a, b) in users.iteritems()], reverse=True)
charank = sorted([(b, a) for (a, b) in channels.iteritems()], reverse=True)
# most heavily used commands
creply = 'most used commands: '
for count, command in comrank[:10]:
creply += '%s (%s), ' % (command, count)
willie.say(creply.rstrip(', '))
# most heavy users
reply = 'power users: '
for count, user in userank[:10]:
reply += '%s (%s), ' % (user, count)
willie.say(reply.rstrip(', '))
# most heavy channels
chreply = 'power channels: '
for count, channel in charank[:3]:
chreply += '%s (%s), ' % (channel, count)
willie.say(chreply.rstrip(', '))
stats.commands = ['stats']
stats.priority = 'low'
if __name__ == '__main__':
print __doc__.strip()
| mit | Python |
5952c27619c340dd212cbf4023919441c5827765 | Make HBox and VBox helper functions | ipython/ipython,ipython/ipython | IPython/html/widgets/widget_container.py | IPython/html/widgets/widget_container.py | """Container class.
Represents a container that can be used to group other widgets.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from .widget import DOMWidget
from IPython.utils.traitlets import Unicode, Tuple, TraitError, Int, CaselessStrEnum
from IPython.utils.warn import DeprecatedClass
class Container(DOMWidget):
_view_name = Unicode('ContainerView', sync=True)
# Child widgets in the container.
# Using a tuple here to force reassignment to update the list.
# When a proper notifying-list trait exists, that is what should be used here.
children = Tuple(sync=True, allow_none=False)
def __init__(self, children = (), **kwargs):
kwargs['children'] = children
super(Container, self).__init__(**kwargs)
self.on_displayed(Container._fire_children_displayed)
def _fire_children_displayed(self):
for child in self.children:
child._handle_displayed()
class Popup(Container):
_view_name = Unicode('PopupView', sync=True)
description = Unicode(sync=True)
button_text = Unicode(sync=True)
class FlexContainer(Container):
_view_name = Unicode('FlexContainerView', sync=True)
orientation = CaselessStrEnum(values=['vertical', 'horizontal'], default_value='vertical', sync=True)
flex = Int(0, sync=True, help="""Specify the flexible-ness of the model.""")
def _flex_changed(self, name, old, new):
new = min(max(0, new), 2)
if self.flex != new:
self.flex = new
_locations = ['start', 'center', 'end', 'baseline', 'stretch']
pack = CaselessStrEnum(
values=_locations,
default_value='start', allow_none=False, sync=True)
align = CaselessStrEnum(
values=_locations,
default_value='start', allow_none=False, sync=True)
def VBox(*pargs, **kwargs):
kwargs['orientation'] = 'vertical'
return FlexContainer(*pargs, **kwargs)
def HBox(*pargs, **kwargs):
kwargs['orientation'] = 'horizontal'
return FlexContainer(*pargs, **kwargs)
ContainerWidget = DeprecatedClass(Container, 'ContainerWidget')
PopupWidget = DeprecatedClass(Popup, 'PopupWidget')
| """Container class.
Represents a container that can be used to group other widgets.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from .widget import DOMWidget
from IPython.utils.traitlets import Unicode, Tuple, TraitError, Int, CaselessStrEnum
from IPython.utils.warn import DeprecatedClass
class Container(DOMWidget):
_view_name = Unicode('ContainerView', sync=True)
# Child widgets in the container.
# Using a tuple here to force reassignment to update the list.
# When a proper notifying-list trait exists, that is what should be used here.
children = Tuple(sync=True, allow_none=False)
def __init__(self, children = (), **kwargs):
kwargs['children'] = children
super(Container, self).__init__(**kwargs)
self.on_displayed(Container._fire_children_displayed)
def _fire_children_displayed(self):
for child in self.children:
child._handle_displayed()
class Popup(Container):
_view_name = Unicode('PopupView', sync=True)
description = Unicode(sync=True)
button_text = Unicode(sync=True)
class FlexContainer(Container):
_view_name = Unicode('FlexContainerView', sync=True)
orientation = Unicode('vertical', sync=True)
flex = Int(0, sync=True, help="""Specify the flexible-ness of the model.""")
def _flex_changed(self, name, old, new):
new = min(max(0, new), 2)
if self.flex != new:
self.flex = new
_locations = ['start', 'center', 'end', 'baseline', 'stretch']
pack = CaselessStrEnum(
values=_locations,
default_value='start', allow_none=False, sync=True)
align = CaselessStrEnum(
values=_locations,
default_value='start', allow_none=False, sync=True)
class VBox(FlexContainer):
_view_name = Unicode('VBoxContainerView', sync=True)
class HBox(FlexContainer):
_view_name = Unicode('HBoxContainerView', sync=True)
ContainerWidget = DeprecatedClass(Container, 'ContainerWidget')
PopupWidget = DeprecatedClass(Popup, 'PopupWidget')
| bsd-3-clause | Python |
269bf7eec9a8e5ea721bce08c29793f0a2999d15 | Add unauthorized handler to flask | selahssea/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core | src/ggrc/login/__init__.py | src/ggrc/login/__init__.py | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""ggrc.login
Provides basic login and session management using Flask-Login with various
backends
"""
import json
import re
import flask_login
from flask_login import login_url
from flask import request
from flask import redirect
from ggrc.extensions import get_extension_module_for
def get_login_module():
return get_extension_module_for('LOGIN_MANAGER', False)
def user_loader(user_id):
from .common import find_user_by_id
return find_user_by_id(user_id)
def init_app(app):
"""Initialize Flask_Login LoginManager with our app"""
login_module = get_login_module()
if not login_module:
return
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
login_manager.login_message = None
# login_manager.session_protection = 'strong'
# pylint: disable=unused-variable
@app.login_manager.unauthorized_handler
def unauthorized():
"""Called when the user tries to access an endpoint guarded with
login_required but they are not authorized.
Endpoints like /dashboard, /program/1, etc. redirect the user to the
/login page.
Endpoints like /api /query, /import, etc. resolve with 401 UNAUTHORIZED
and a simple json error object.
"""
if (re.match(r'^(\/api|\/query|\/search)', request.path) or
request.headers.get('X-Requested-By') == 'gGRC'):
return json.dumps({'error': 'unauthorized'}), 401
return redirect(login_url('/login', request.url))
app.route('/login')(login_module.login)
app.route('/logout')(login_module.logout)
app.login_manager.user_loader(user_loader)
if hasattr(login_module, 'before_request'):
app.before_request(login_module.before_request)
# app.context_processor(login_module.session_context)
def get_current_user():
if get_login_module():
return flask_login.current_user
else:
return None
def get_current_user_id():
user = get_current_user()
if user is not None and not user.is_anonymous():
return user.id
else:
return None
def login_required(func):
if get_login_module():
return flask_login.login_required(func)
else:
return func
def is_creator():
"""Check if the current user has global role Creator."""
current_user = get_current_user()
return (hasattr(current_user, 'system_wide_role') and
current_user.system_wide_role == "Creator")
| # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""ggrc.login
Provides basic login and session management using Flask-Login with various
backends
"""
import flask_login
from ggrc.extensions import get_extension_module_for
def get_login_module():
return get_extension_module_for('LOGIN_MANAGER', False)
def user_loader(user_id):
from .common import find_user_by_id
return find_user_by_id(user_id)
def init_app(app):
login_module = get_login_module()
if not login_module:
return
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
login_manager.login_message = None
# login_manager.session_protection = 'strong'
app.route('/login')(login_module.login)
app.route('/logout')(login_module.logout)
app.login_manager.user_loader(user_loader)
if hasattr(login_module, 'before_request'):
app.before_request(login_module.before_request)
# app.context_processor(login_module.session_context)
def get_current_user():
if get_login_module():
return flask_login.current_user
else:
return None
def get_current_user_id():
user = get_current_user()
if user is not None and not user.is_anonymous():
return user.id
else:
return None
def login_required(func):
if get_login_module():
return flask_login.login_required(func)
else:
return func
def is_creator():
"""Check if the current user has global role Creator."""
current_user = get_current_user()
return (hasattr(current_user, 'system_wide_role') and
current_user.system_wide_role == "Creator")
| apache-2.0 | Python |
2f55210be8651a80f0c054e4f92cc3fcefaea12c | Add check for gaps in roll tables | whonut/Random-Table-Roller,whonut/Random-Table-Roller,whonut/Random-Table-Roller | table_loader.py | table_loader.py | import csv
from itertools import chain
def load_table(filepath, headers=False):
'''Return a dict representing a roll table loaded from filepath.
Loads a roll table from the CSV file at filepath into a dict whose keys
are ranges containing the range of rolls (min, max) associated with the
event specified in that key's value (a string describing the event).
If headers is True, then it is assumed that the first row of the file
contains some descriptive headers and the row is ignored. It defaults
to False.
The first column of the CSV should be the numbers or ranges of numbers
to roll in order to 'bring about' the associated event in the same row
of the second column. Ranges should be specified with dashes e.g.
a roll of 1 to 10 inclusive would be written as '1-10'. None of the
intervals should overlap. If there is a gap in the table i.e. a roll
within the bounds of the table which is not associated with an event, an
IOError is raised.'''
table = {}
with open(filepath, newline='') as table_file:
table_reader = csv.reader(table_file)
for row in table_reader:
if headers and table_reader.line_num == 1:
# Ignore the first line if headers is True
continue
roll = row[0]
event = row[1]
if row[0].find("-") != -1:
# A range of rolls has been specified for this table item.
min_roll = int(roll[:roll.find("-")])
max_roll = int(roll[roll.find("-")+1:])
table[range(min_roll, max_roll+1)] = event
else:
# A single roll has been specified for this table item.
roll_num = int(roll)
table[range(roll_num, roll_num+1)] = event
# Check if there is a gap in the table by comparing its keys to a range.
rolls_in_table = chain(table.keys())
max_in_table = max(rolls_in_table)
min_in_table = min(rolls_in_table)
gap = list(rolls_in_table) != list(range(min_in_table, max_in_table+1))
if gap:
raise IOError('There is a gap in the table "{}"'.format(filepath))
return table
| import csv
def load_table(filepath, headers=False):
'''Return a dict representing a roll table loaded from filepath.
Loads a roll table from the CSV file at filepath into a dict whose keys
are ranges containing the range of rolls (min, max) associated with the
event specified in that key's value (a string describing the event).
If headers is True, then it is assumed that the first row of the file
contains some descriptive headers and the row is ignored. It defaults
to False.
The first column of the CSV should be the numbers or ranges of numbers
to roll in order to 'bring about' the associated event in the same row
of the second column. Ranges should be specified with dashes e.g.
a roll of 1 to 10 inclusive would be written as '1-10'. None of the
intervals should overlap.'''
table = {}
with open(filepath, newline='') as table_file:
table_reader = csv.reader(table_file)
for row in table_reader:
if headers and table_reader.line_num == 1:
# Ignore the first line if headers is True
continue
roll = row[0]
event = row[1]
if row[0].find("-") != -1:
# A range of rolls has been specified for this table item.
min_roll = int(roll[:roll.find("-")])
max_roll = int(roll[roll.find("-")+1:])
table[range(min_roll, max_roll+1)] = event
else:
# A single roll has been specified for this table item.
roll_num = int(roll)
table[range(roll_num, roll_num+1)] = event
return table
| mit | Python |
c8fcda66041f15278e09a77ad2f4674f876a2755 | Update version file. | rubasov/opensub-utils,rubasov/opensub-utils | src/lib/opensub/version.py | src/lib/opensub/version.py | """
Version of the opensub-utils distribution.
* Keep _one_ version number for the whole distribution.
Don't start versioning the scripts, modules, etc separately.
* Use Semantic Versioning v2 as a guide: http://semver.org/
* Don't repeat the version in the commit message.
* Tag the commit changing this file.
"""
# DO NOT FORGET TO TAG:
# python lib/opensub/version.py | xargs -r -I {} git tag -a {} -m {}
__version_info__ = ("0", "9", "5")
__version__ = ".".join(__version_info__)
if __name__ == "__main__":
print(__version__)
| """
Version of the opensub-utils distribution.
* Keep _one_ version number for the whole distribution.
Don't start versioning the scripts, modules, etc separately.
* Use Semantic Versioning v2 as a guide: http://semver.org/
* Don't repeat the version in the commit message.
* Tag the commit changing this file.
"""
# DO NOT FORGET TO TAG:
# python lib/opensub/version.py | xargs -r -I {} git tag -a {} -m {}
__version_info__ = ("0", "9", "4")
__version__ = ".".join(__version_info__)
if __name__ == "__main__":
print(__version__)
| bsd-2-clause | Python |
f873c1b54257d4d19ea05bc776de7147c8b3914d | Update django_cache_url.py | ghickman/django-cache-url | django_cache_url.py | django_cache_url.py | # -*- coding: utf-8 -*-
import os
import urlparse
# Register cache schemes in URLs.
urlparse.uses_netloc.append('db')
urlparse.uses_netloc.append('dummy')
urlparse.uses_netloc.append('file')
urlparse.uses_netloc.append('locmem')
urlparse.uses_netloc.append('memcache')
DEFAULT_ENV = 'CACHE_URL'
CACHE_TYPES = {
'db': 'django.core.cache.backends.db.DatabaseCache',
'dummy': 'django.core.cache.backends.dummy.DummyCache',
'file': 'django.core.cache.backends.filebased.FileBasedCache',
'locmem': 'django.core.cache.backends.locmem.LocMemCache',
'memcached': 'django.core.cache.backends.memcached.PyLibMCCache',
'pymemcached': 'django.core.cache.backends.memcached.MemcachedCache'
}
def config(env=DEFAULT_ENV, default='locmem://'):
"""Returns configured CACHES dictionary from CACHE_URL"""
config = {}
s = os.environ.get(env, default)
if s:
config = parse(s)
return config
def parse(url):
"""Parses a cache URL."""
config = {}
url = urlparse.urlparse(url)
# Update with environment configuration.
config['BACKEND'] = CACHE_TYPES[url.scheme]
if url.scheme == 'file':
config['LOCATION'] = url.path
return config
config['LOCATION'] = url.netloc
config['PREFIX'] = url.path[1:]
return config
| # -*- coding: utf-8 -*-
import os
import urlparse
# Register cache schemes in URLs.
urlparse.uses_netloc.append('db')
urlparse.uses_netloc.append('dummy')
urlparse.uses_netloc.append('file')
urlparse.uses_netloc.append('locmem')
urlparse.uses_netloc.append('memcache')
DEFAULT_ENV = 'CACHE_URL'
CACHE_TYPES = {
'db': 'django.core.cache.backends.db.DatabaseCache',
'dummy': 'django.core.cache.backends.dummy.DummyCache',
'file': 'django.core.cache.backends.filebased.FileBasedCache',
'locmem': 'django.core.cache.backends.locmem.LocMemCache',
'memcached': 'django.core.cache.backends.memcached.PyLibMCCache'
}
def config(env=DEFAULT_ENV, default='locmem://'):
"""Returns configured CACHES dictionary from CACHE_URL"""
config = {}
s = os.environ.get(env, default)
if s:
config = parse(s)
return config
def parse(url):
"""Parses a cache URL."""
config = {}
url = urlparse.urlparse(url)
# Update with environment configuration.
config['BACKEND'] = CACHE_TYPES[url.scheme]
if url.scheme == 'file':
config['LOCATION'] = url.path
return config
config['LOCATION'] = url.netloc
config['PREFIX'] = url.path[1:]
return config
| mit | Python |
f2e0ffd246ef2f86407bdb9ed8dcc2a7182f532a | Update script to patch disassembly for CFU instructions. | google/CFU-Playground,google/CFU-Playground,google/CFU-Playground,google/CFU-Playground | scripts/fix_cfu_dis.py | scripts/fix_cfu_dis.py | #!/usr/bin/env python3
# Copyright 2021 The CFU-Playground Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import re
regnames = ['x0', 'ra', 'sp', 'gp',
'tp', 't0', 't1', 't2',
's0', 's1', 'a0', 'a1',
'a2', 'a3', 'a4', 'a5',
'a6', 'a7', 's2', 's3',
's4', 's5', 's6', 's7',
's8', 's9', 's10', 's11',
't3', 't4', 't5', 't6']
lines = sys.stdin.readlines()
for i in range(len(lines)):
lin = lines[i]
lin = lin.replace('\r','')
lin = lin.replace('\n','')
#
# Replace the last word with 'cfu[funct7,funct3] rd, rs1, rs2'
#
# 40000148: 0094280b 0x94280b
#
m = re.match(r"^( ?\w+:\s+\w+\s+)(0x[0-9A-Fa-f]+)$", lin)
if m:
bits = int(m.group(2), 0)
funct7 = (bits >> 25) & 127
rs2 = (bits >> 20) & 31
rs1 = (bits >> 15) & 31
funct3 = (bits >> 12) & 7
rd = (bits >> 7) & 31
op = f'cfu[{funct7},{funct3}] {regnames[rd]}, {regnames[rs1]}, {regnames[rs2]}'
print(m.group(1) + op)
else:
n = re.match(r"^( ?\w+:\s+\w+\s+)(\w+)(\s+)(\S+.*)$", lin)
if n:
# Add two spaces so regular instruction lines line up with cfu[] lines
print(n.group(1) + n.group(2) + n.group(3) + " " + n.group(4))
else:
print(lin)
| #!/usr/bin/env python3
# Copyright 2021 The CFU-Playground Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import re
regnames = ['x0', 'ra', 'sp', 'gp',
'tp', 't0', 't1', 't2',
's0', 's1', 'a0', 'a1',
'a2', 'a3', 'a4', 'a5',
'a6', 'a7', 's2', 's3',
's4', 's5', 's6', 's7',
's8', 's9', 's10', 's11',
't3', 't4', 't5', 't6']
lines = sys.stdin.readlines()
for i in range(len(lines)):
lin = lines[i]
lin = lin.replace('\r','')
lin = lin.replace('\n','')
#
# Replace the last word with 'cfu[funct7,funct3] rd, rs1, rs2'
#
# 40000148: 0094280b 0x94280b
#
m = re.match(r"^(\w+:\s+\w+\s+)(0x[0-9A-Fa-f]+)$", lin)
if m:
bits = int(m.group(2), 0)
funct7 = (bits >> 25) & 127
rs2 = (bits >> 20) & 31
rs1 = (bits >> 15) & 31
funct3 = (bits >> 12) & 7
rd = (bits >> 7) & 31
op = f'cfu[{funct7},{funct3}] {regnames[rd]}, {regnames[rs1]}, {regnames[rs2]}'
print(m.group(1) + op)
else:
n = re.match(r"^(\w+:\s+\w+\s+)(\w+)(\s+)(\S+.*)$", lin)
if n:
# Add two spaces so regular instruction lines line up with cfu[] lines
print(n.group(1) + n.group(2) + n.group(3) + " " + n.group(4))
else:
print(lin)
| apache-2.0 | Python |
394a760d6b507edd008459af6062683e684614ef | Update image.py | mapclient-plugins/hearttransform | mapclientplugins/hearttransformstep/utils/image.py | mapclientplugins/hearttransformstep/utils/image.py | '''
Created on May 21, 2015
@author: hsorby
'''
import pydicom
import os
import numpy as np
def extractImageCorners(directory, filename):
'''
Extract the image corners from an image that is assumed to be
a DICOM image.
Corners are returned as:
[bl, br, tl, tr]
'''
ds = pydicom.read_file(os.path.join(directory, filename))
pixel_spacing = ds.PixelSpacing
delta_i = float(pixel_spacing[0])
delta_j = float(pixel_spacing[1])
orient = [float(iop) for iop in ds.ImageOrientationPatient]
pos = [float(ipp) for ipp in ds.ImagePositionPatient]
rows = ds.Rows
columns = ds.Columns
orient_1 = np.array(orient[:3])
orient_2 = np.array(orient[3:])
pos = np.array(pos) - delta_i * (0.5 * orient_1 + 0.5 * orient_2)
A = np.array([orient[0] * delta_i, orient[3] * delta_j, 0, pos[0],
orient[1] * delta_i, orient[4] * delta_j, 0, pos[1],
orient[2] * delta_i, orient[5] * delta_j, 0, pos[2],
0, 0, 0, 1]).reshape(4, 4)
b_tl = np.array([0, 0, 0, 1])
b_tr = np.array([rows, 0, 0, 1])
b_bl = np.array([0, columns, 0, 1])
b_br = np.array([rows, columns, 0, 1])
tl = np.dot(A, b_tl)
tr = np.dot(A, b_tr)
bl = np.dot(A, b_bl)
br = np.dot(A, b_br)
return [bl[:3].tolist(), br[:3].tolist(), tl[:3].tolist(), tr[:3].tolist()]
| '''
Created on May 21, 2015
@author: hsorby
'''
# import dicom
import pydicom
import os
import numpy as np
def extractImageCorners(directory, filename):
'''
Extract the image corners from an image that is assumed to be
a DICOM image.
Corners are returned as:
[bl, br, tl, tr]
'''
# ds = dicom.read_file(os.path.join(directory, filename))
ds = pydicom.read_file(os.path.join(directory, filename))
pixel_spacing = ds.PixelSpacing
delta_i = float(pixel_spacing[0])
delta_j = float(pixel_spacing[1])
orient = [float(iop) for iop in ds.ImageOrientationPatient]
pos = [float(ipp) for ipp in ds.ImagePositionPatient]
rows = ds.Rows
columns = ds.Columns
orient_1 = np.array(orient[:3])
orient_2 = np.array(orient[3:])
pos = np.array(pos) - delta_i * (0.5 * orient_1 + 0.5 * orient_2)
A = np.array([orient[0] * delta_i, orient[3] * delta_j, 0, pos[0],
orient[1] * delta_i, orient[4] * delta_j, 0, pos[1],
orient[2] * delta_i, orient[5] * delta_j, 0, pos[2],
0, 0, 0, 1]).reshape(4, 4)
b_tl = np.array([0, 0, 0, 1])
b_tr = np.array([rows, 0, 0, 1])
b_bl = np.array([0, columns, 0, 1])
b_br = np.array([rows, columns, 0, 1])
tl = np.dot(A, b_tl)
tr = np.dot(A, b_tr)
bl = np.dot(A, b_bl)
br = np.dot(A, b_br)
return [bl[:3].tolist(), br[:3].tolist(), tl[:3].tolist(), tr[:3].tolist()]
| apache-2.0 | Python |
382fff28c025858a8b273898afebc5ce4ae60a1e | convert services.yaml to bytes before dumping to yaml (#924) | somic/paasta,somic/paasta,Yelp/paasta,Yelp/paasta | paasta_tools/generate_services_yaml.py | paasta_tools/generate_services_yaml.py | #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
import yaml
from paasta_tools.marathon_tools import get_all_namespaces
from paasta_tools.utils import atomic_file_write
from paasta_tools.utils import paasta_print
YOCALHOST = b'169.254.255.254'
def generate_configuration():
service_data = get_all_namespaces()
config = {}
for (name, data) in service_data:
proxy_port = data.get('proxy_port')
if proxy_port is None:
continue
config[name.encode('utf-8')] = {
b'host': YOCALHOST,
b'port': int(proxy_port),
}
return config
def main():
if len(sys.argv) != 2:
paasta_print("Usage: %s <output_path>", file=sys.stderr)
sys.exit(1)
output_path = sys.argv[1]
configuration = generate_configuration()
with atomic_file_write(output_path) as fp:
yaml.dump(
configuration,
fp,
indent=2,
explicit_start=True,
default_flow_style=False,
allow_unicode=False,
)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
import yaml
from paasta_tools.marathon_tools import get_all_namespaces
from paasta_tools.utils import atomic_file_write
from paasta_tools.utils import paasta_print
YOCALHOST = '169.254.255.254'
def generate_configuration():
service_data = get_all_namespaces()
config = {}
for (name, data) in service_data:
proxy_port = data.get('proxy_port')
if proxy_port is None:
continue
config[name] = {
'host': YOCALHOST,
'port': int(proxy_port),
}
return config
def main():
if len(sys.argv) != 2:
paasta_print("Usage: %s <output_path>", file=sys.stderr)
sys.exit(1)
output_path = sys.argv[1]
configuration = generate_configuration()
with atomic_file_write(output_path) as fp:
yaml.dump(configuration,
fp,
indent=2,
explicit_start=True,
default_flow_style=False)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
7346665052eac9dbf7578728b85e4cbd16727249 | Add docstrings to core.mixins module (#23, #27) | a5kin/hecate,a5kin/hecate | xentica/core/mixins.py | xentica/core/mixins.py | """
The collection of mixins to be used in core classes.
Would be interesting only if you are planning to hack into Xentica
core functionality.
"""
import inspect
import xentica.core.base
from xentica.core.exceptions import XenticaException
class BscaDetectorMixin:
"""
Add a functionlality to detect BSCA class instances holding current class.
All methods are for private use only.
"""
@property
def _bsca(self):
"""
Get a BSCA instance holding current class.
Objects tree is scanned up to top and first instance found is returned.
"""
frame = inspect.currentframe()
while frame is not None:
for l in frame.f_locals.values():
if hasattr(l, "__get__"):
continue
if isinstance(l, xentica.core.base.BSCA):
return l
frame = frame.f_back
raise XenticaException("BSCA not detected")
@property
def _holder_frame(self):
"""
Get a frame of class instance holding current class.
Objects tree is scanned up to top and first instance found is returned.
"""
# TODO: detect base class by scanning inheritance tree:
# inspect.getclasstree(inspect.getmro(type(self)))
frame = inspect.currentframe().f_back.f_back.f_back
while isinstance(frame.f_locals.get('self', ''), self.base_class):
frame = frame.f_back
return frame
@property
def _holder(self):
"""Get an instance from a frame found by :meth:`_holder_frame`."""
return self._holder_frame.f_locals['self']
| import inspect
import xentica.core.base
from xentica.core.exceptions import XenticaException
class BscaDetectorMixin:
@property
def _bsca(self):
frame = inspect.currentframe()
while frame is not None:
for l in frame.f_locals.values():
if hasattr(l, "__get__"):
continue
if isinstance(l, xentica.core.base.BSCA):
return l
frame = frame.f_back
raise XenticaException("BSCA not detected")
@property
def _holder_frame(self):
# TODO: detect base class by scanning inheritance tree:
# inspect.getclasstree(inspect.getmro(type(self)))
frame = inspect.currentframe().f_back.f_back.f_back
while isinstance(frame.f_locals.get('self', ''), self.base_class):
frame = frame.f_back
return frame
@property
def _holder(self):
return self._holder_frame.f_locals['self']
| mit | Python |
03764d38c381efdda2566589953c5430e2a4962d | Implement definition for dev env | beeedy/selfupdate | selfupdate/__init__.py | selfupdate/__init__.py | #!/usr/bin/env python3
import inspect
import git
import os
__version__ = "0.1.0"
__author__ = "Broderick Carlin (beeedy)"
__email__= "broderick.carlin@gmail.com"
__license__= "MIT"
def __get_calling_file():
'''
This function will go through the python call stack and find
the script that originally called into this file. Returns a
tuple where the first element is a string that is the folder
containing the calling script, and the second element is the
name of the file name of the calling script. If a file can not
be found for some reason a LookupError is raised to indicate
that an external script could not be found.
'''
stack = inspect.stack()
this_file = stack[0][1]
for i in range(1, len(stack)):
if stack[i][1] != this_file:
complete_path = os.path.normpath(os.getcwd() + "/" + stack[i][1])
return os.path.split(complete_path)
raise LookupError("Module was not called by an external script.")
def __find_repo():
'''
This function will go figure out if the calling python script
is inside a git repo, and if so, return a string that is the
location of the base of the git repo. If the script is not, a
LookupError is raised to indicate it could not find the repo
'''
file_path, file_name = __get_calling_file()
# walk up the file tree looking for a valid git repo, stop when we hit the base
while True:
if os.path.samefile(os.path.normpath(file_path), os.path.normpath("/")):
raise LookupError("Calling script is not in a valid git repo")
try:
git.Repo(file_path)
return os.path.normpath(file_path)
except git.exc.InvalidGitRepositoryError:
file_path = os.path.normpath(file_path + "/..")
def is_dev_env(directory, suppress_errors=False):
'''
This function will return 'True' if the git repo is setup to
be a selfupdate development environment. This indicates that
functions that perform destructive file manipulation will be
limited in scope as to not cause the script to complicate
development efforts when using the selfupdate library. A
selfupdate development environment is configured by placeing
an empty file in the root directory of the repo simply named
'.devenv'. This file must also be included in the .gitignore
or a EnvironmentError will be raised. This is to avoid the
propogation of the development environment file to the main
repo and any other local repositories that would then pull
this file down and turn themselves into development
environments. This error can be suppressed by setting the
argument 'suppress_errors' to 'True' when calling is_dev_env().
Suppressing this error can cause remote repos that rely on
selfupdate to no longer update succesfully without direct
user input. You have been warned!
'''
directory = os.path.normpath(directory)
# see if the .devenv file even exists
if os.path.isfile(directory + "/.devenv"):
# it exists, so make sure a .gitignore exists and it includes .devenv
if os.path.isfile(directory + "/.gitignore"):
with open(directory + "/.gitignore", 'r') as gitignore:
for line in gitignore.readlines():
if ".devenv" in line:
return True
#raise error here
if not suppress_errors:
raise EnvironmentError("'.devenv' found but not included in '.gitignore'.")
return False
def test():
repo_path = __find_repo()
is_dev_env(repo_path)
| #!/usr/bin/env python3
import inspect
import git
import os
__version__ = "0.1.0"
__author__ = "Broderick Carlin (beeedy)"
__email__= "broderick.carlin@gmail.com"
__license__= "MIT"
def __get_calling_file():
'''
This function will go through the python call stack and find
the script that originally called into this file. Returns a
tuple where the first element is a string that is the folder
containing the calling script, and the second element is the
name of the file name of the calling script. If a file can not
be found for some reason a LookupError is raised to indicate
that an external script could not be found.
'''
stack = inspect.stack()
this_file = stack[0][1]
for i in range(1, len(stack)):
if stack[i][1] != this_file:
complete_path = os.path.normpath(os.getcwd() + "/" + stack[i][1])
return os.path.split(complete_path)
raise LookupError("Module was not called by an external script.")
def __find_repo():
'''
This function will go figure out if the calling python script
is inside a git repo, and if so, return a string that is the
location of the base of the git repo. If the script is not, a
LookupError is raised to indicate it could not find the repo
'''
file_path, file_name = __get_calling_file()
while True:
if os.path.samefile(os.path.normpath(file_path), os.path.normpath("/")):
raise LookupError("Calling script is not in a valid git repo")
try:
git.Repo(file_path)
return file_path
except git.exc.InvalidGitRepositoryError:
file_path = os.path.normpath(file_path + "/..")
def test():
print(__find_repo())
| mit | Python |
ed83bb7f5423aef20b7e8201c117bced12583365 | Refactor zerver.views.upload. | isht3/zulip,aakash-cr7/zulip,AZtheAsian/zulip,andersk/zulip,dawran6/zulip,krtkmj/zulip,hackerkid/zulip,tommyip/zulip,tommyip/zulip,showell/zulip,samatdav/zulip,PhilSk/zulip,amanharitsh123/zulip,timabbott/zulip,SmartPeople/zulip,souravbadami/zulip,zulip/zulip,samatdav/zulip,samatdav/zulip,ryanbackman/zulip,arpith/zulip,brainwane/zulip,TigorC/zulip,rishig/zulip,umkay/zulip,tommyip/zulip,TigorC/zulip,Juanvulcano/zulip,showell/zulip,peguin40/zulip,hackerkid/zulip,rishig/zulip,eeshangarg/zulip,christi3k/zulip,paxapy/zulip,JPJPJPOPOP/zulip,zacps/zulip,grave-w-grave/zulip,showell/zulip,Jianchun1/zulip,SmartPeople/zulip,umkay/zulip,umkay/zulip,zacps/zulip,shubhamdhama/zulip,aakash-cr7/zulip,sup95/zulip,reyha/zulip,rht/zulip,reyha/zulip,jrowan/zulip,jphilipsen05/zulip,mohsenSy/zulip,AZtheAsian/zulip,zulip/zulip,brockwhittaker/zulip,j831/zulip,mahim97/zulip,jrowan/zulip,niftynei/zulip,sharmaeklavya2/zulip,kou/zulip,PhilSk/zulip,paxapy/zulip,paxapy/zulip,Jianchun1/zulip,umkay/zulip,brainwane/zulip,synicalsyntax/zulip,dhcrzf/zulip,christi3k/zulip,dawran6/zulip,punchagan/zulip,j831/zulip,punchagan/zulip,aakash-cr7/zulip,christi3k/zulip,shubhamdhama/zulip,eeshangarg/zulip,amyliu345/zulip,ryanbackman/zulip,tommyip/zulip,niftynei/zulip,blaze225/zulip,dhcrzf/zulip,ryanbackman/zulip,Diptanshu8/zulip,dhcrzf/zulip,jphilipsen05/zulip,rht/zulip,synicalsyntax/zulip,hackerkid/zulip,KingxBanana/zulip,punchagan/zulip,showell/zulip,rht/zulip,timabbott/zulip,blaze225/zulip,rishig/zulip,PhilSk/zulip,KingxBanana/zulip,showell/zulip,joyhchen/zulip,cosmicAsymmetry/zulip,jrowan/zulip,brainwane/zulip,sonali0901/zulip,Galexrt/zulip,verma-varsha/zulip,sup95/zulip,sup95/zulip,showell/zulip,paxapy/zulip,amanharitsh123/zulip,souravbadami/zulip,dattatreya303/zulip,mahim97/zulip,zulip/zulip,mohsenSy/zulip,mahim97/zulip,kou/zulip,ahmadassaf/zulip,souravbadami/zulip,vabs22/zulip,vaidap/zulip,cosmicAsymmetry/zulip,rishig/zulip,ahmadassaf/zulip,aakash-cr7/zulip,vabs22/zulip,Galexrt/zulip,joyhchen/zulip,showell/zulip,Diptanshu8/zulip,zacps/zulip,jackrzhang/zulip,mohsenSy/zulip,peguin40/zulip,PhilSk/zulip,vikas-parashar/zulip,hackerkid/zulip,ahmadassaf/zulip,shubhamdhama/zulip,grave-w-grave/zulip,calvinleenyc/zulip,Juanvulcano/zulip,krtkmj/zulip,isht3/zulip,jackrzhang/zulip,TigorC/zulip,jrowan/zulip,dhcrzf/zulip,blaze225/zulip,verma-varsha/zulip,dawran6/zulip,souravbadami/zulip,eeshangarg/zulip,dawran6/zulip,shubhamdhama/zulip,jackrzhang/zulip,amanharitsh123/zulip,mahim97/zulip,JPJPJPOPOP/zulip,verma-varsha/zulip,ryanbackman/zulip,vabs22/zulip,Juanvulcano/zulip,reyha/zulip,tommyip/zulip,brockwhittaker/zulip,eeshangarg/zulip,Galexrt/zulip,brockwhittaker/zulip,reyha/zulip,sup95/zulip,AZtheAsian/zulip,ahmadassaf/zulip,christi3k/zulip,AZtheAsian/zulip,jphilipsen05/zulip,Galexrt/zulip,souravbadami/zulip,vabs22/zulip,andersk/zulip,kou/zulip,krtkmj/zulip,timabbott/zulip,sharmaeklavya2/zulip,christi3k/zulip,punchagan/zulip,niftynei/zulip,umkay/zulip,PhilSk/zulip,susansls/zulip,dattatreya303/zulip,brockwhittaker/zulip,andersk/zulip,souravbadami/zulip,AZtheAsian/zulip,eeshangarg/zulip,sonali0901/zulip,krtkmj/zulip,jphilipsen05/zulip,susansls/zulip,Diptanshu8/zulip,niftynei/zulip,synicalsyntax/zulip,dawran6/zulip,JPJPJPOPOP/zulip,sonali0901/zulip,ryanbackman/zulip,rishig/zulip,hackerkid/zulip,sonali0901/zulip,vikas-parashar/zulip,Diptanshu8/zulip,j831/zulip,dattatreya303/zulip,peguin40/zulip,punchagan/zulip,amanharitsh123/zulip,blaze225/zulip,jackrzhang/zulip,arpith/zulip,arpith/zulip,KingxBanana/zulip,zulip/zulip,andersk/zulip,sharmaeklavya2/zulip,zulip/zulip,jainayush975/zulip,Juanvulcano/zulip,krtkmj/zulip,vaidap/zulip,Galexrt/zulip,SmartPeople/zulip,KingxBanana/zulip,vabs22/zulip,j831/zulip,andersk/zulip,SmartPeople/zulip,JPJPJPOPOP/zulip,brainwane/zulip,synicalsyntax/zulip,Jianchun1/zulip,punchagan/zulip,rht/zulip,TigorC/zulip,zacps/zulip,vabs22/zulip,dattatreya303/zulip,rht/zulip,jrowan/zulip,vikas-parashar/zulip,brainwane/zulip,zulip/zulip,peguin40/zulip,vaidap/zulip,isht3/zulip,mohsenSy/zulip,synicalsyntax/zulip,AZtheAsian/zulip,niftynei/zulip,amanharitsh123/zulip,sonali0901/zulip,calvinleenyc/zulip,calvinleenyc/zulip,punchagan/zulip,sonali0901/zulip,kou/zulip,amyliu345/zulip,amyliu345/zulip,eeshangarg/zulip,zulip/zulip,jainayush975/zulip,jrowan/zulip,amyliu345/zulip,JPJPJPOPOP/zulip,arpith/zulip,calvinleenyc/zulip,brainwane/zulip,brockwhittaker/zulip,sharmaeklavya2/zulip,j831/zulip,isht3/zulip,sharmaeklavya2/zulip,aakash-cr7/zulip,timabbott/zulip,vaidap/zulip,ahmadassaf/zulip,blaze225/zulip,jainayush975/zulip,shubhamdhama/zulip,jainayush975/zulip,vikas-parashar/zulip,zacps/zulip,jackrzhang/zulip,verma-varsha/zulip,PhilSk/zulip,arpith/zulip,cosmicAsymmetry/zulip,SmartPeople/zulip,niftynei/zulip,brockwhittaker/zulip,synicalsyntax/zulip,jainayush975/zulip,cosmicAsymmetry/zulip,calvinleenyc/zulip,shubhamdhama/zulip,samatdav/zulip,Jianchun1/zulip,zacps/zulip,andersk/zulip,blaze225/zulip,rishig/zulip,susansls/zulip,verma-varsha/zulip,brainwane/zulip,mahim97/zulip,peguin40/zulip,timabbott/zulip,shubhamdhama/zulip,tommyip/zulip,grave-w-grave/zulip,jainayush975/zulip,andersk/zulip,paxapy/zulip,amanharitsh123/zulip,dattatreya303/zulip,kou/zulip,krtkmj/zulip,ahmadassaf/zulip,KingxBanana/zulip,vikas-parashar/zulip,Jianchun1/zulip,dawran6/zulip,amyliu345/zulip,vaidap/zulip,Juanvulcano/zulip,SmartPeople/zulip,amyliu345/zulip,hackerkid/zulip,mohsenSy/zulip,kou/zulip,grave-w-grave/zulip,peguin40/zulip,sup95/zulip,j831/zulip,cosmicAsymmetry/zulip,TigorC/zulip,tommyip/zulip,rht/zulip,joyhchen/zulip,Galexrt/zulip,dhcrzf/zulip,grave-w-grave/zulip,samatdav/zulip,synicalsyntax/zulip,isht3/zulip,susansls/zulip,jackrzhang/zulip,vaidap/zulip,isht3/zulip,sharmaeklavya2/zulip,umkay/zulip,TigorC/zulip,sup95/zulip,rishig/zulip,jphilipsen05/zulip,Galexrt/zulip,ryanbackman/zulip,cosmicAsymmetry/zulip,dhcrzf/zulip,calvinleenyc/zulip,joyhchen/zulip,timabbott/zulip,eeshangarg/zulip,mahim97/zulip,kou/zulip,vikas-parashar/zulip,verma-varsha/zulip,reyha/zulip,krtkmj/zulip,joyhchen/zulip,mohsenSy/zulip,Juanvulcano/zulip,dhcrzf/zulip,umkay/zulip,timabbott/zulip,Jianchun1/zulip,christi3k/zulip,aakash-cr7/zulip,rht/zulip,dattatreya303/zulip,samatdav/zulip,Diptanshu8/zulip,paxapy/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,jphilipsen05/zulip,joyhchen/zulip,reyha/zulip,susansls/zulip,jackrzhang/zulip,KingxBanana/zulip,grave-w-grave/zulip,arpith/zulip,hackerkid/zulip,susansls/zulip,Diptanshu8/zulip | zerver/views/upload.py | zerver/views/upload.py | from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse, HttpResponseForbidden
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from zerver.decorator import authenticated_json_post_view, zulip_login_required
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.response import json_success, json_error
from zerver.lib.upload import upload_message_image_through_web_client, \
get_signed_upload_url, get_realm_for_filename
from zerver.lib.validator import check_bool
from zerver.models import UserProfile
from django.conf import settings
def upload_file_backend(request, user_profile):
# type: (HttpRequest, UserProfile) -> HttpResponse
if len(request.FILES) == 0:
return json_error(_("You must specify a file to upload"))
if len(request.FILES) != 1:
return json_error(_("You may only upload one file at a time"))
user_file = list(request.FILES.values())[0]
if ((settings.MAX_FILE_UPLOAD_SIZE * 1024 * 1024) < user_file._get_size()):
return json_error(_("File Upload is larger than allowed limit"))
uri = upload_message_image_through_web_client(request, user_file, user_profile)
return json_success({'uri': uri})
def serve_s3(request, user_profile, realm_id_str, filename, redir):
# type: (HttpRequest, UserProfile, str, str, bool) -> HttpResponse
url_path = "%s/%s" % (realm_id_str, filename)
if realm_id_str == "unk":
realm_id = get_realm_for_filename(url_path)
if realm_id is None:
# File does not exist
return json_error(_("That file does not exist."), status=404)
# Internal users can access all uploads so we can receive attachments in cross-realm messages
if user_profile.realm.id == int(realm_id) or user_profile.realm.domain == 'zulip.com':
uri = get_signed_upload_url(url_path)
if redir:
return redirect(uri)
else:
return json_success({'uri': uri})
else:
return HttpResponseForbidden()
def serve_file_backend(request, user_profile, realm_id_str, filename, redir):
# type: (HttpRequest, UserProfile, str, str, bool) -> HttpResponse
if settings.LOCAL_UPLOADS_DIR is not None:
return HttpResponseForbidden() # Should have been served by nginx
return serve_s3(request, user_profile, realm_id_str, filename, redir)
@authenticated_json_post_view
@has_request_variables
def json_upload_file(request, user_profile):
# type: (HttpRequest, UserProfile) -> HttpResponse
return upload_file_backend(request, user_profile)
@zulip_login_required
@has_request_variables
def get_uploaded_file(request, realm_id_str, filename,
redir=REQ(validator=check_bool, default=True)):
# type: (HttpRequest, str, str, bool) -> HttpResponse
user_profile = request.user
return serve_file_backend(request, user_profile, realm_id_str, filename, redir)
| from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse, HttpResponseForbidden
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from zerver.decorator import authenticated_json_post_view, zulip_login_required
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.response import json_success, json_error
from zerver.lib.upload import upload_message_image_through_web_client, \
get_signed_upload_url, get_realm_for_filename
from zerver.lib.validator import check_bool
from zerver.models import UserProfile
from django.conf import settings
@authenticated_json_post_view
@has_request_variables
def json_upload_file(request, user_profile):
# type: (HttpRequest, UserProfile) -> HttpResponse
if len(request.FILES) == 0:
return json_error(_("You must specify a file to upload"))
if len(request.FILES) != 1:
return json_error(_("You may only upload one file at a time"))
user_file = list(request.FILES.values())[0]
if ((settings.MAX_FILE_UPLOAD_SIZE * 1024 * 1024) < user_file._get_size()):
return json_error(_("File Upload is larger than allowed limit"))
uri = upload_message_image_through_web_client(request, user_file, user_profile)
return json_success({'uri': uri})
@zulip_login_required
@has_request_variables
def get_uploaded_file(request, realm_id_str, filename,
redir=REQ(validator=check_bool, default=True)):
# type: (HttpRequest, str, str, bool) -> HttpResponse
if settings.LOCAL_UPLOADS_DIR is not None:
return HttpResponseForbidden() # Should have been served by nginx
user_profile = request.user
url_path = "%s/%s" % (realm_id_str, filename)
if realm_id_str == "unk":
realm_id = get_realm_for_filename(url_path)
if realm_id is None:
# File does not exist
return json_error(_("That file does not exist."), status=404)
# Internal users can access all uploads so we can receive attachments in cross-realm messages
if user_profile.realm.id == int(realm_id) or user_profile.realm.domain == 'zulip.com':
uri = get_signed_upload_url(url_path)
if redir:
return redirect(uri)
else:
return json_success({'uri': uri})
else:
return HttpResponseForbidden()
| apache-2.0 | Python |
d7c41853277c1df53192b2f879f47f75f3c62fd5 | Add redirect for / to collections | MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager | server/covmanager/urls.py | server/covmanager/urls.py | from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^$', views.index, name='index'),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
)
| from django.conf.urls import patterns, include, url
from rest_framework import routers
from covmanager import views
router = routers.DefaultRouter()
router.register(r'collections', views.CollectionViewSet, base_name='collections')
router.register(r'repositories', views.RepositoryViewSet, base_name='repositories')
urlpatterns = patterns('',
url(r'^rest/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^repositories/', views.repositories, name="repositories"),
url(r'^collections/$', views.collections, name="collections"),
url(r'^collections/api/$', views.CollectionViewSet.as_view({'get': 'list'}), name="collections_api"),
url(r'^collections/(?P<collectionid>\d+)/browse/$', views.collections_browse, name="collections_browse"),
url(r'^collections/(?P<collectionid>\d+)/browse/api/(?P<path>.*)', views.collections_browse_api, name="collections_browse_api"),
url(r'^rest/', include(router.urls)),
) | mpl-2.0 | Python |
91cbb2b79c565484389f659d436dc3a0813107a4 | bump version | kaneawk/shadowsocksr,kaneawk/shadowsocksr | shadowsocks/version.py | shadowsocks/version.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 breakwa11
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def version():
return 'SSRR 3.2.2 2018-05-22'
| #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 breakwa11
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def version():
return 'SSRR 3.2.1 2017-10-15'
| apache-2.0 | Python |
ab442c822242ea34a752a154873f69c55801c121 | Fix warnings during tests | TriOptima/tri.table,TriOptima/tri.table,TriOptima/tri.tables,TriOptima/tri.tables | tests/models.py | tests/models.py | from __future__ import unicode_literals
from django.db import models
from django.db.models import CASCADE
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Foo(models.Model):
a = models.IntegerField()
b = models.CharField(max_length=255)
def __str__(self):
return 'Foo(%s, %s)' % (self.a, self.b)
class Meta:
ordering = ('pk',)
class Bar(models.Model):
foo = models.ForeignKey(Foo, on_delete=CASCADE)
c = models.BooleanField()
class Meta:
ordering = ('pk',)
class Baz(models.Model):
foo = models.ManyToManyField(Foo)
class Meta:
ordering = ('pk',)
| from __future__ import unicode_literals
from django.db import models
from django.db.models import CASCADE
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Foo(models.Model):
a = models.IntegerField()
b = models.CharField(max_length=255)
def __str__(self):
return 'Foo(%s, %s)' % (self.a, self.b)
class Bar(models.Model):
foo = models.ForeignKey(Foo, on_delete=CASCADE)
c = models.BooleanField()
class Baz(models.Model):
foo = models.ManyToManyField(Foo)
| bsd-3-clause | Python |
376357185a7cff4c2dea663e87d334bd5e7daa27 | Rename function name. | simphony/simphony-common | simphony/cuds/utils.py | simphony/cuds/utils.py | import warnings
import importlib
from functools import wraps
from .meta import api
_CUBA_CUDS_MAP = None
def deprecated(func):
@wraps(func)
def _deprecated(*args, **kwargs):
warnings.warn("Deprecation warning: {}".format(func.__name__))
return func(*args, **kwargs)
return _deprecated
def map_cuba_key_to_cuds_class(cuba_key):
"""Return the equivalent CUDS class for the given CUBA key.
Parameters
----------
cuba_key: CUBA
The key to find its equivalent CUDS class.
Raises
------
ValueError:
If no CUDS exists for the given CUBA key.
Returns
-------
object: type
"""
global _CUBA_CUDS_MAP
if not _CUBA_CUDS_MAP:
_fill_cuba_cuds_map()
if cuba_key not in _CUBA_CUDS_MAP:
raise ValueError('No CUDS class exist for {cuba_key}'
.format(cuba_key=cuba_key))
return _CUBA_CUDS_MAP[cuba_key]
def _fill_cuba_cuds_map():
"""Fill the cuba-cuds map."""
api_mod = importlib.import_module('simphony.cuds.meta.api')
global _CUBA_CUDS_MAP
_CUBA_CUDS_MAP = \
dict([(cls.cuba_key, cls) for name, cls \
in api_mod.__dict__.items() \
if isinstance(cls, type) and issubclass(cls, api.CUDSItem)])
| import warnings
import importlib
from functools import wraps
_CUBA_CUDS_MAP = None
def deprecated(func):
@wraps(func)
def _deprecated(*args, **kwargs):
warnings.warn("Deprecation warning: {}".format(func.__name__))
return func(*args, **kwargs)
return _deprecated
def turn_cuba_into_cuds(cuba_key):
"""Return the equivalent CUDS class for the given CUBA key.
Parameters
----------
cuba_key: CUBA
The key to find its equivalent CUDS class.
Raises
------
ValueError:
If no CUDS exists for the given CUBA key.
Returns
-------
object: type
"""
global _CUBA_CUDS_MAP
if not _CUBA_CUDS_MAP:
_fill_cuba_cuds_map()
if cuba_key not in _CUBA_CUDS_MAP:
raise ValueError('No CUDS class exist for {cuba_key}'
.format(cuba_key=cuba_key))
return _CUBA_CUDS_MAP[cuba_key]
def _fill_cuba_cuds_map():
"""Fill the cuba-cuds map."""
api_mod = importlib.import_module('simphony.cuds.meta.api')
global _CUBA_CUDS_MAP
_CUBA_CUDS_MAP = \
dict([(cls.cuba_key, cls) for name, cls \
in api_mod.__dict__.items() if isinstance(cls, type)]) | bsd-2-clause | Python |
7c2904615e7c18e2a93900d6315b3489ad1296c4 | Resolve Django 4.0 warning in urls config | sunscrapers/djoser,sunscrapers/djoser,sunscrapers/djoser | djoser/social/urls.py | djoser/social/urls.py | from django.urls import re_path
from djoser.social import views
urlpatterns = [
re_path(
r"^o/(?P<provider>\S+)/$",
views.ProviderAuthView.as_view(),
name="provider-auth",
)
]
| from django.conf.urls import url
from djoser.social import views
urlpatterns = [
url(
r"^o/(?P<provider>\S+)/$",
views.ProviderAuthView.as_view(),
name="provider-auth",
)
]
| mit | Python |
18f0f07a5cacd82e930c022d877bf30de70181af | simplify with pipe | Javran/misc,Javran/misc,Javran/misc,Javran/misc,Javran/misc | py-subprocess/test.py | py-subprocess/test.py | #!/usr/bin/env python3
import subprocess
import io
import tempfile
def main():
proc = subprocess.Popen(
['./out.sh'],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
for line in proc.stdout:
print(f'subproc stdout:{line}')
for line in proc.stderr:
print(f'subproc stderr:{line}')
proc.wait()
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
import subprocess
import io
import tempfile
def main():
with tempfile.NamedTemporaryFile(mode='w', buffering=1) as f_out, \
tempfile.NamedTemporaryFile(mode='w', buffering=1) as f_err:
proc = subprocess.Popen(
['./out.sh'],
shell=True,
stdin=subprocess.PIPE,
stdout=f_out,
stderr=f_err,
)
proc.wait()
if __name__ == '__main__':
main()
| mit | Python |
9940a61cd7dbe9b66dcd4c7e07f967e53d2951d4 | Change signature to match other resources auth functions | geotagx/pybossa,jean/pybossa,stefanhahmann/pybossa,harihpr/tweetclickers,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,Scifabric/pybossa,geotagx/pybossa,jean/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,OpenNewsLabs/pybossa,inteligencia-coletiva-lsd/pybossa,PyBossa/pybossa | pybossa/auth/token.py | pybossa/auth/token.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
def create(token=None):
return False
def read(token=None):
return not current_user.is_anonymous()
def update(token):
return False
def delete(token):
return False
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
def create(token=None):
return False
def read(token=None):
return not current_user.is_anonymous()
def update(token=None):
return False
def delete(token=None):
return False
| agpl-3.0 | Python |
9d1268fe44f4eab78cdb76f70a914fa3851db00d | Add names.name_part template node to simplify porting BibTeX name formatting patterns. | live-clones/pybtex | pybtex/style/names.py | pybtex/style/names.py | # Copyright (C) 2006, 2007, 2008 Andrey Golovizin
#
# This file is part of pybtex.
#
# pybtex is free software; you can redistribute it and/or modify
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# pybtex is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pybtex; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
"""name formatting styles
"""
from pybtex.richtext import Symbol, Text, nbsp
from pybtex.style.template import join, together, node, _format_list
from pybtex.bibtex.name import tie_or_space
@node
def name_part(children, data, before='', tie=False):
parts = together [children].format_data(data)
if not parts:
return Text()
if tie:
return Text(before, parts, tie_or_space(parts, nbsp, ' '))
else:
return Text(before, parts)
def plain(person, abbr=False):
r"""
>>> from pybtex.core import Person
>>> name = Person(string=r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin")
>>> print plain(name).format().plaintext()
de<nbsp>la Vall{\'e}e<nbsp>Poussin, Charles Louis Xavier<nbsp>Joseph
>>> print plain(name, abbr=True).format().plaintext()
de<nbsp>la Vall{\'e}e<nbsp>Poussin, C.<nbsp>L. X.<nbsp>J.
>>> name = Person(first='First', last='Last', middle='Middle')
>>> print plain(name).format().plaintext()
Last, First<nbsp>Middle
>>> print plain(name, abbr=True).format().plaintext()
Last, F.<nbsp>M.
"""
return join [
name_part(tie=True) [person.prelast()],
name_part [person.last()],
name_part(before=', ') [person.lineage()],
name_part(before=', ') [person.first(abbr) + person.middle(abbr)],
]
| # Copyright (C) 2006, 2007, 2008 Andrey Golovizin
#
# This file is part of pybtex.
#
# pybtex is free software; you can redistribute it and/or modify
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# pybtex is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pybtex; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
"""name formatting styles
"""
from pybtex.richtext import Symbol
from pybtex.style.template import join, together
def plain(person, abbr=False):
r"""
>>> from pybtex.core import Person
>>> name = Person(string=r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin")
>>> print plain(name).format().plaintext()
de<nbsp>la Vall{\'e}e<nbsp>Poussin, Charles Louis Xavier<nbsp>Joseph
>>> print plain(name, abbr=True).format().plaintext()
de<nbsp>la Vall{\'e}e<nbsp>Poussin, C.<nbsp>L. X.<nbsp>J.
>>> name = Person(first='First', last='Last', middle='Middle')
>>> print plain(name).format().plaintext()
Last, First<nbsp>Middle
>>> print plain(name, abbr=True).format().plaintext()
Last, F.<nbsp>M.
"""
nbsp = Symbol('nbsp')
return together(last_tie=False) [
together [person.prelast()],
join(sep=', ') [
together [person.last()],
together [person.lineage()],
together [person.first(abbr) + person.middle(abbr)],
],
]
| mit | Python |
bfdf2b45baf6a57bf186ef6e710e081d1abb104f | Add error reports to admin. | magcius/sweettooth,GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,GNOME/extensions-web | sweettooth/extensions/admin.py | sweettooth/extensions/admin.py |
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from extensions.models import Extension, ExtensionVersion, ErrorReport
from review.models import CodeReview
class CodeReviewAdmin(admin.TabularInline):
model = CodeReview
fields = 'reviewer', 'comments',
class ExtensionVersionAdmin(admin.ModelAdmin):
list_display = 'title', 'status',
list_display_links = 'title',
def title(self, ver):
return "%s (%d)" % (ver.extension.uuid, ver.version)
title.short_description = "Extension (version)"
inlines = [CodeReviewAdmin]
admin.site.register(ExtensionVersion, ExtensionVersionAdmin)
class ExtensionVersionInline(admin.TabularInline):
model = ExtensionVersion
fields = 'version', 'status',
extra = 0
class ExtensionAdmin(admin.ModelAdmin):
list_display = 'name', 'num_versions', 'creator',
list_display_links = 'name',
def num_versions(self, ext):
return ext.versions.count()
num_versions.short_description = "#V"
inlines = [ExtensionVersionInline]
admin.site.register(Extension, ExtensionAdmin)
class ErrorReportAdmin(admin.ModelAdmin):
list_display = 'user_or_email', 'extension', 'version_num'
list_display_links = list_display
def user_or_email(self, report):
if report.user:
return report.user
else:
return report.email
def version_num(self, report):
return report.version.version
def extension(self, report):
return report.version.extension
admin.site.register(ErrorReport, ErrorReportAdmin)
|
from django.contrib import admin
from sorl.thumbnail.admin import AdminImageMixin
from extensions.models import Extension, ExtensionVersion
from review.models import CodeReview
class CodeReviewAdmin(admin.TabularInline):
model = CodeReview
fields = 'reviewer', 'comments',
class ExtensionVersionAdmin(admin.ModelAdmin):
list_display = 'title', 'status',
list_display_links = 'title',
def title(self, ver):
return "%s (%d)" % (ver.extension.uuid, ver.version)
title.short_description = "Extension (version)"
inlines = [CodeReviewAdmin]
admin.site.register(ExtensionVersion, ExtensionVersionAdmin)
class ExtensionVersionInline(admin.TabularInline):
model = ExtensionVersion
fields = 'version', 'status',
extra = 0
class ExtensionAdmin(admin.ModelAdmin):
list_display = 'name', 'num_versions', 'creator',
list_display_links = 'name',
def num_versions(self, ext):
return ext.versions.count()
num_versions.short_description = "#V"
inlines = [ExtensionVersionInline]
admin.site.register(Extension, ExtensionAdmin)
| agpl-3.0 | Python |
50b6a5a1aec88c2e3b12af8b7f673d7f3daf4dea | remove unnecessary vispy depencdency | QULab/sound_field_analysis-py | AE2_SampledPlaneWave.py | AE2_SampledPlaneWave.py | # SOFiA example 2: Sampled unity plane wave simulation for different kr
# Generate a full audio spectrum plane wave using S/W/G
# Additionally requires vispy, see http://vispy.org
import numpy as np
from sofia import gen, process, plot
pi = np.pi
r = 0.1 # Array radius
ac = 0 # Rigid Sphere
FS = 48000 # Sampling Frequency
NFFT = 128 # FFT-Bin
AZ = 0 # Azimuth angle
EL = pi / 2 # Elevation angle
Nsft = 5 # Transform order
Nrf = Nsft # Radial filter order
Nviz = Nsft # Visualization order
krIDX = [15, 23, 29, 39] # kr-bin for subfigures
quadrature_grid, _ = gen.lebedev(110)
fftData, kr = gen.swg(r=r, gridData=quadrature_grid, ac=ac, FS=FS, NFFT=NFFT, AZ=AZ, EL=EL)
# Spatial Fourier Transform
Pnm = process.stc(Nsft, fftData, quadrature_grid)
# Make radial filters
dn, _ = gen.mf(Nrf, kr, ac)
# Generate data to visualize
mtxDataLOW = plot.makeMTX(Pnm, dn, Nviz, krIDX[0])
mtxDataMID = plot.makeMTX(Pnm, dn, Nviz, krIDX[1])
mtxDataHIGH = plot.makeMTX(Pnm, dn, Nviz, krIDX[2])
mtxDataVHIGH = plot.makeMTX(Pnm, dn, Nviz, krIDX[3])
vizMtx = [np.abs(mtxDataLOW),
np.abs(mtxDataMID),
np.abs(mtxDataHIGH),
np.abs(mtxDataVHIGH)]
plot.plotGrid(2, 2, vizMtx, style='shape', bgcolor='white', colorize=False, normalize=True)
input("3D visualization opened in new window.\nUse mouse to look around, scroll to zoom and shift + drag do move around.\nPress any key in the console to exit.")
# To export to png:
# >> from vispy import io
# >> img = canvas.render()
# >> io.write_png("img/AE2_grid.png", img)
| # SOFiA example 2: Sampled unity plane wave simulation for different kr
# Generate a full audio spectrum plane wave using S/W/G
# Additionally requires vispy, see http://vispy.org
import numpy as np
from sofia import gen, process, plot
from vispy import scene
pi = np.pi
r = 0.1 # Array radius
ac = 0 # Rigid Sphere
FS = 48000 # Sampling Frequency
NFFT = 128 # FFT-Bin
AZ = 0 # Azimuth angle
EL = pi / 2 # Elevation angle
Nsft = 5 # Transform order
Nrf = Nsft # Radial filter order
Nviz = Nsft # Visualization order
krIDX = [15, 23, 29, 39] # kr-bin for subfigures
quadrature_grid, _ = gen.lebedev(110)
fftData, kr = gen.swg(r=r, gridData=quadrature_grid, ac=ac, FS=FS, NFFT=NFFT, AZ=AZ, EL=EL)
# Spatial Fourier Transform
Pnm = process.stc(Nsft, fftData, quadrature_grid)
# Make radial filters
dn, _ = gen.mf(Nrf, kr, ac)
# Generate data to visualize
mtxDataLOW = plot.makeMTX(Pnm, dn, Nviz, krIDX[0])
mtxDataMID = plot.makeMTX(Pnm, dn, Nviz, krIDX[1])
mtxDataHIGH = plot.makeMTX(Pnm, dn, Nviz, krIDX[2])
mtxDataVHIGH = plot.makeMTX(Pnm, dn, Nviz, krIDX[3])
vizMtx = [np.abs(mtxDataLOW),
np.abs(mtxDataMID),
np.abs(mtxDataHIGH),
np.abs(mtxDataVHIGH)]
plot.plotGrid(2, 2, vizMtx, style='shape', bgcolor='white', colorize=False, normalize=True)
input("3D visualization opened in new window.\nUse mouse to look around, scroll to zoom and shift + drag do move around.\nPress any key in the console to exit.")
# To export to png:
# >> from vispy import io
# >> img = canvas.render()
# >> io.write_png("img/AE2_grid.png", img)
| mit | Python |
498b3e6fc1f2d0cb45b44904f0b4cf5fde78d31d | Add comments for the directory monitor. | phac-nml/irida-miseq-uploader,phac-nml/irida-miseq-uploader | API/directorymonitor.py | API/directorymonitor.py | import os
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileCreatedEvent
from API.pubsub import send_message
from API.directoryscanner import find_runs_in_directory
class DirectoryMonitorTopics(object):
"""Topics for monitoring directories for new runs."""
new_run_observed = "new_run_observed"
finished_discovering_run = "finished_discovering_run"
class CompletedJobInfoEventHandler(FileSystemEventHandler):
"""A subclass of watchdog.events.FileSystemEventHandler that will run
a directory scan on the monitored directory. This will filter explicitly on
a file creation event for a file with the name `CompletedJobInfo.xml`."""
def on_created(self, event):
"""Overrides `on_created` in `FileSystemEventHandler` to filter on
file creation events for `CompletedJobInfo.xml`."""
if isinstance(event, FileCreatedEvent) and event.src_path.endswith('CompletedJobInfo.xml'):
logging.info("Observed new run in {}, telling the UI to start uploading it.".format(event.src_path))
directory = os.path.dirname(event.src_path)
# tell the UI to clean itself up before observing new runs
send_message(DirectoryMonitorTopics.new_run_observed)
# this will send a bunch of events that the UI is listening for, but
# unlike the UI (which runs this in a separate thread), we're going to do this
# in our own thread and block on it so we can tell the UI to start
# uploading once we've finished discovering the run
find_runs_in_directory(directory)
# now tell the UI to start
send_message(DirectoryMonitorTopics.finished_discovering_run)
def monitor_directory(directory):
"""Starts monitoring the specified directory in a background thread. File events
will be passed to the `CompletedJobInfoEventHandler`.
Arguments:
directory: the directory to monitor.
"""
logging.info("Getting ready to monitor directory {}".format(directory))
event_handler = CompletedJobInfoEventHandler()
observer = Observer()
observer.schedule(event_handler, directory, recursive=True)
observer.start()
| import os
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, FileCreatedEvent
from API.pubsub import send_message
from API.directoryscanner import find_runs_in_directory
class DirectoryMonitorTopics(object):
new_run_observed = "new_run_observed"
finished_discovering_run = "finished_discovering_run"
class CompletedJobInfoEventHandler(FileSystemEventHandler):
def on_created(self, event):
if isinstance(event, FileCreatedEvent) and event.src_path.endswith('CompletedJobInfo.xml'):
logging.info("Observed new run in {}, telling the UI to start uploading it.".format(event.src_path))
directory = os.path.dirname(event.src_path)
# tell the UI to clean itself up before observing new runs
send_message(DirectoryMonitorTopics.new_run_observed)
# this will send a bunch of events that the UI is listening for, but
# unlike the UI (which runs this in a separate thread), we're going to do this
# in our own thread and block on it so we can tell the UI to start
# uploading once we've finished discovering the run
find_runs_in_directory(directory)
# now tell the UI to start
send_message(DirectoryMonitorTopics.finished_discovering_run)
def monitor_directory(directory):
logging.info("Getting ready to monitor directory {}".format(directory))
event_handler = CompletedJobInfoEventHandler()
observer = Observer()
observer.schedule(event_handler, directory, recursive=True)
observer.start()
| apache-2.0 | Python |
4ffaa730bde8fae1bc46b692ee7b5d1c6e922a0e | bump version 2.5 | phfaist/pylatexenc | pylatexenc/version.py | pylatexenc/version.py | #
# The MIT License (MIT)
#
# Copyright (c) 2019 Philippe Faist
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#
# Self-note: Don't forget to
#
# 1) bump version number here and update change log (doc/changes.rst)
#
# 2) git commit any remaining changes
#
# 3) " git tag vX.X -am '<message>' "
#
# 4) " git push --tags "
#
# 5) on github.com, fill in release details with a summary of changes etc.
#
# 6) upload package to PyPI (twine upload dist/pylatexenc-X.X.tar.gz -r realpypi)
#
version_str = "2.5"
| #
# The MIT License (MIT)
#
# Copyright (c) 2019 Philippe Faist
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#
# Self-note: Don't forget to
#
# 1) bump version number here and update change log (doc/changes.rst)
#
# 2) git commit any remaining changes
#
# 3) " git tag vX.X -am '<message>' "
#
# 4) " git push --tags "
#
# 5) on github.com, fill in release details with a summary of changes etc.
#
# 6) upload package to PyPI (twine upload dist/pylatexenc-X.X.tar.gz -r realpypi)
#
version_str = "2.4"
| mit | Python |
d3f9cfa4f59710dede0844f3f49ce0a1cc2cf1c3 | Fix #2401: Github integration error | CMLL/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,taigaio/taiga-back,Rademade/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,dycodedev/taiga-back,WALR/taiga-back,CMLL/taiga-back,Tigerwhit4/taiga-back,coopsource/taiga-back,rajiteh/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,gauravjns/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,xdevelsistemas/taiga-back-community,joshisa/taiga-back,bdang2012/taiga-back-casting,forging2012/taiga-back,forging2012/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,seanchen/taiga-back,joshisa/taiga-back,astagi/taiga-back,Tigerwhit4/taiga-back,dycodedev/taiga-back,19kestier/taiga-back,forging2012/taiga-back,joshisa/taiga-back,dycodedev/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,gam-phon/taiga-back,gam-phon/taiga-back,astagi/taiga-back,Rademade/taiga-back,coopsource/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,coopsource/taiga-back,astronaut1712/taiga-back,CMLL/taiga-back,jeffdwyatt/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,WALR/taiga-back,astronaut1712/taiga-back,obimod/taiga-back,gam-phon/taiga-back,seanchen/taiga-back,taigaio/taiga-back,jeffdwyatt/taiga-back,astronaut1712/taiga-back,coopsource/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,dayatz/taiga-back,astronaut1712/taiga-back,dayatz/taiga-back,bdang2012/taiga-back-casting,rajiteh/taiga-back,xdevelsistemas/taiga-back-community,Rademade/taiga-back,frt-arch/taiga-back,seanchen/taiga-back,CoolCloud/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,frt-arch/taiga-back,crr0004/taiga-back,dycodedev/taiga-back,gam-phon/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,WALR/taiga-back,gauravjns/taiga-back,CoolCloud/taiga-back,dayatz/taiga-back,obimod/taiga-back,taigaio/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,astagi/taiga-back,obimod/taiga-back,19kestier/taiga-back,CoolCloud/taiga-back,gauravjns/taiga-back,CMLL/taiga-back,obimod/taiga-back,Tigerwhit4/taiga-back,astagi/taiga-back | taiga/hooks/github/services.py | taiga/hooks/github/services.py | # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from django.core.urlresolvers import reverse
from taiga.users.models import User
from taiga.users.models import AuthData
from taiga.base.utils.urls import get_absolute_url
def get_or_generate_config(project):
config = project.modules_config.config
if config and "github" in config:
g_config = project.modules_config.config["github"]
else:
g_config = {"secret": uuid.uuid4().hex}
url = reverse("github-hook-list")
url = get_absolute_url(url)
url = "%s?project=%s" % (url, project.id)
g_config["webhooks_url"] = url
return g_config
def get_github_user(github_id):
user = None
if github_id:
try:
user = AuthData.objects.get(key="github", value=github_id).user
except AuthData.DoesNotExist:
pass
if user is None:
user = User.objects.get(is_system=True, username__startswith="github")
return user
| # Copyright (C) 2014 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2014 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from django.core.urlresolvers import reverse
from taiga.users.models import User
from taiga.base.utils.urls import get_absolute_url
def get_or_generate_config(project):
config = project.modules_config.config
if config and "github" in config:
g_config = project.modules_config.config["github"]
else:
g_config = {"secret": uuid.uuid4().hex}
url = reverse("github-hook-list")
url = get_absolute_url(url)
url = "%s?project=%s" % (url, project.id)
g_config["webhooks_url"] = url
return g_config
def get_github_user(user_id):
user = None
if user_id:
try:
user = User.objects.get(github_id=user_id)
except User.DoesNotExist:
pass
if user is None:
user = User.objects.get(is_system=True, username__startswith="github")
return user
| agpl-3.0 | Python |
6fe48fc7499327d27f69204b7f8ec927fc975177 | Implement python lexer ZMQ service. | orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,abramhindle/UnnaturalCodeFork,orezpraw/estimate-charm,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork,naturalness/unnaturalcode,abramhindle/UnnaturalCodeFork | python/lexPythonMQ.py | python/lexPythonMQ.py | #!/usr/bin/python
import re, sys, tokenize, zmq;
from StringIO import StringIO
def err(msg):
sys.err.write(str(msg) + '\n')
class LexPyMQ(object):
def __init__(self):
self.zctx = zmq.Context()
self.socket = self.zctx.socket(zmq.REP)
def run(self):
self.socket.bind("tcp://lo:32132")
while True:
msg = self.socket.recv_json(0)
# there are definitely new lines in the code
if not msg.get('python'):
err('received non-python code')
code = msg.get('body', '')
self.socket.send_json(tokenize.generate_tokens(StringIO(code)))
if __name__ == '__main__':
LexPyMQ().run()
| #!/usr/bin/python
import tokenize;
import zmq;
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind("tcp://lo:32132")
while True:
# Wait for next request from client
message = socket.recv() | agpl-3.0 | Python |
45a48126246ee7d82d0a881923d1f78a4d807004 | fix #168 | roxma/nvim-completion-manager | pythonx/cm_default.py | pythonx/cm_default.py |
# sane default for programming languages
_patterns = {}
_patterns['*'] = r'(-?\d*\.\d\w*)|([^\`\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['css'] = r'(-?\d*\.\d[\w-]*)|([^\`\~\!\@\#\$\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['css'] = _patterns['css']
_patterns['php'] = r'(-?\d*\.\d\w*)|([^\-\`\~\!\@\#\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['vim'] = r'(-?\d*\.\d\w*)|([^\-\`\~\!\@\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\'\"\,\.\<\>\/\?\s]+)'
def word_pattern(ctx):
scope = ctx.get('scope',ctx.get('filetype','')).lower()
return _patterns.get(scope, None) or _patterns['*']
|
# sane default for programming languages
_patterns = {}
_patterns['*'] = r'(-?\d*\.\d\w*)|([^\`\~\!\@\#\$\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['css'] = r'(-?\d*\.\d[\w-]*)|([^\`\~\!\@\#\$\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['php'] = r'(-?\d*\.\d\w*)|([^\-\`\~\!\@\#\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)'
_patterns['vim'] = r'(-?\d*\.\d\w*)|([^\-\`\~\!\@\%\^\&\*\(\)\=\+\[\{\]\}\\\|\;\'\"\,\.\<\>\/\?\s]+)'
def word_pattern(ctx):
scope = ctx.get('scope',ctx.get('filetype','')).lower()
return _patterns.get(scope, None) or _patterns['*']
| mit | Python |
659d2262dcbe64d2415dfe9dbdd0e04ebeac79f0 | Update utils.py | ParallelDots/WordEmbeddingAutoencoder | tyrion/utils.py | tyrion/utils.py | import heapq,random
from compatibility import range, pickle
import numpy as np
from scipy.spatial.distance import cosine
def gen_embedding(word):
'''
Generates embedding of the word from the model trained.
'''
try:
with open('./embeddings.pickle', 'rb') as f:
embeddings = pickle.load(f)
return embeddings[word]
except Exception as e:
print ("Exception: Model file not found, please train the model first by runing train")
def closest_words(word,topn=10):
'''
Returns top 10 closest words to word provided by user.
'''
try:
embeddings = pickle.load(open("./embeddings.pickle"))
words = embeddings.keys()
closest = []
# Embedding of word provided by user
vec = embeddings[word]
for word in words:
heapq.heappush(closest, (1 - cosine(vec, embeddings[word]), word))
closest_words = heapq.nlargest(topn, closest)
return closest_words
except Exception as e:
print ("Exception: Model file not found, please train the model by running train function")
| import heapq,random
from compatibility import range, pickle
import numpy as np
from scipy.spatial.distance import cosine
def gen_embedding(word):
'''
Generates embedding of the word from the model trained.
'''
try:
f = open('./embeddings.pickle')
embeddings = pickle.load(f)
return embeddings[word]
except Exception as e:
print ("Exception: Model file not found, please train the model first by runing train")
def closest_words(word,topn=10):
'''
Returns top 10 closest words to word provided by user.
'''
try:
embeddings = pickle.load(open("./embeddings.pickle"))
words = embeddings.keys()
closest = []
# Embedding of word provided by user
vec = embeddings[word]
for word in words:
heapq.heappush(closest, (1 - cosine(vec, embeddings[word]), word))
closest_words = heapq.nlargest(topn, closest)
return closest_words
except Exception as e:
print ("Exception: Model file not found, please train the model by running train function")
| mit | Python |
778cb1a9f9fbb7e260d9a17f07d412d4fa12930a | Add "all" to the queryset in DepartmentForm | snahor/django-ubigeo | ubigeo/forms.py | ubigeo/forms.py | from django import forms
from .models import Department, Province, District
class DepartmentForm(forms.Form):
department = forms.ModelChoiceField(
queryset=Department.objects.all()
)
class ProvinceForm(DepartmentForm):
province = forms.ModelChoiceField(
queryset=Province.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
department = self._get_field_value('department')
if department:
self.fields['province'].queryset = Province.objects.filter(
parent=department
)
def _get_field_value(self, name):
field = self.fields[name]
value = field.widget.value_from_datadict(
self.data,
self.files,
self.add_prefix(name)
)
try:
return field.clean(value)
except:
return None
class DistrictForm(ProvinceForm):
district = forms.ModelChoiceField(
queryset=District.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
province = self._get_field_value('province')
if province:
self.fields['district'].queryset = District.objects.filter(
parent=province
)
UbigeoForm = DistrictForm
| from django import forms
from .models import Department, Province, District
class DepartmentForm(forms.Form):
department = forms.ModelChoiceField(
queryset=Department.objects
)
class ProvinceForm(DepartmentForm):
province = forms.ModelChoiceField(
queryset=Province.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
department = self._get_field_value('department')
if department:
self.fields['province'].queryset = Province.objects.filter(
parent=department
)
def _get_field_value(self, name):
field = self.fields[name]
value = field.widget.value_from_datadict(
self.data,
self.files,
self.add_prefix(name)
)
try:
return field.clean(value)
except:
return None
class DistrictForm(ProvinceForm):
district = forms.ModelChoiceField(
queryset=District.objects.none()
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.is_bound:
province = self._get_field_value('province')
if province:
self.fields['district'].queryset = District.objects.filter(
parent=province
)
UbigeoForm = DistrictForm
| mit | Python |
3adc7f40553459aa7af1a444782d08c243a9d736 | Tweak Marathon object __repr__ | mattrobenolt/marathon-python,Carles-Figuerola/marathon-python,mattrobenolt/marathon-python,Carles-Figuerola/marathon-python,burakbostancioglu/marathon-python,drewrobb/marathon-python,Yelp/marathon-python,elyast/marathon-python,mesosphere/marathon-python,elyast/marathon-python,fengyehong/marathon-python,Yelp/marathon-python,Rob-Johnson/marathon-python,thefactory/marathon-python,fengyehong/marathon-python,burakbostancioglu/marathon-python,drewrobb/marathon-python,thefactory/marathon-python,Rob-Johnson/marathon-python,vitan/marathon-python,mesosphere/marathon-python | marathon/models/base.py | marathon/models/base.py | import json
from marathon.util import to_camel_case, to_snake_case, MarathonJsonEncoder
class MarathonObject(object):
"""Base Marathon object."""
def __repr__(self):
return "{clazz}::{obj}".format(clazz=self.__class__.__name__, obj=self.to_json())
def json_repr(self):
"""Construct a JSON-friendly representation of the object.
:rtype: dict
"""
return {to_camel_case(k):v for k,v in vars(self).iteritems()}
@classmethod
def from_json(cls, attributes):
"""Construct an object from a parsed response.
:param dict attributes: object attributes from parsed response
"""
return cls(**{to_snake_case(k): v for k,v in attributes.iteritems()})
def to_json(self):
"""Encode an object as a JSON string.
:rtype: str
"""
return json.dumps(self.json_repr(), cls=MarathonJsonEncoder, sort_keys=True)
class MarathonResource(MarathonObject):
"""Base Marathon resource."""
def __repr__(self):
if 'id' in vars(self).keys():
return "{clazz}::{id}".format(clazz=self.__class__.__name__, id=self.id)
else:
return super(MarathonResource).__repr__()
| import json
from marathon.util import to_camel_case, to_snake_case, MarathonJsonEncoder
class MarathonObject(object):
"""Base Marathon object."""
def json_repr(self):
"""Construct a JSON-friendly representation of the object.
:rtype: dict
"""
return {to_camel_case(k):v for k,v in vars(self).iteritems()}
@classmethod
def from_json(cls, attributes):
"""Construct an object from a parsed response.
:param dict attributes: object attributes from parsed response
"""
return cls(**{to_snake_case(k): v for k,v in attributes.iteritems()})
class MarathonResource(MarathonObject):
"""Base Marathon resource."""
def __repr__(self):
if 'id' in vars(self).keys():
return "{clazz}::{id}".format(clazz=self.__class__.__name__, id=self.id)
else:
return "{clazz}::{obj}".format(clazz=self.__class__.__name__, obj=self.to_json())
def to_json(self):
"""Encode an object as a JSON string.
:rtype: str
"""
return json.dumps(self.json_repr(), cls=MarathonJsonEncoder, sort_keys=True)
| mit | Python |
5a5f703c51fd46ca8a22acebcac78925d6d52984 | Comment out some print statements that were making life hard to work with | sunil07t/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,joshzarrabi/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server | CFC_WebApp/clients/commontrips/commontrips.py | CFC_WebApp/clients/commontrips/commontrips.py | from dao.user import User
import json
import sys, os, random
# This is in here so the pygmaps associated functions can be imported
# from the webapp
# sys.path.append("%s/../../CFC_WebApp/" % os.getcwd())
from uuid import UUID
def getUserTour(user_uuid):
"""
Gets a users "tour"
"""
# This is in here so the getCanonicalTrips function can be imported
# from the recommender pipeline
sys.path.append("%s/../CFC_DataCollector/" % os.getcwd())
from recommender.filter_modules import getCanonicalTrips
import recommender.tripiterator as ti
from recommender.trip import Canonical_E_Mission_Trip
canonical_trips = list(ti.TripIterator(user_uuid,["recommender", "get_improve"], Canonical_E_Mission_Trip))
for cluster in canonical_trips:
# print(cluster.start_point_distr)
# print(cluster.end_point_distr)
# print(cluster.start_time_distr)
# print(cluster.end_time_distr)
print cluster._id
# representative_trips = map(lambda x: x.trip_id, canonical_trips)
sys.path.remove("%s/../CFC_DataCollector/" % os.getcwd())
# print "Number of canonical_trips: %i" % len(canonical_trips)
return canonical_trips
def generate_tour_map(user_uuid):
"""
Generates tour map and saves it to result_template.html
"""
sys.path.append("%s/../../CFC_WebApp/" % os.getcwd())
import main.pygmaps_modified as pygmaps
from main.gmap_display import drawSection
from get_database import get_section_db, get_routeCluster_db
gmap = pygmaps.maps(37.8717, -122.2728, 14)
r = lambda: random.randint(0,255)
canonical_trips = getUserTour(user_uuid)
for section in canonical_trips:
color = '#%02X%02X%02X' % (r(),r(),r())
# print section._id
section_json = get_section_db().find_one({'_id': section._id})
drawSection(section_json, 'path', gmap, color)
try:
os.remove('clients/commontrips/result_template.html')
except OSError, e:
print "Result of removing the result template is %s" % e
gmap.draw('clients/commontrips/result_template.html')
sys.path.remove("%s/../../CFC_WebApp/" % os.getcwd())
def getResult(user_uuid):
# This is in here, as opposed to the top level as recommended by the PEP
# because then we don't have to worry about loading bottle in the unit tests
from bottle import template
print "common trips getResult UUID: %s" % user_uuid
generate_tour_map(user_uuid)
renderedTemplate = template("clients/commontrips/result_template.html")
return renderedTemplate
| from dao.user import User
import json
import sys, os, random
# This is in here so the pygmaps associated functions can be imported
# from the webapp
# sys.path.append("%s/../../CFC_WebApp/" % os.getcwd())
from uuid import UUID
def getUserTour(user_uuid):
"""
Gets a users "tour"
"""
# This is in here so the getCanonicalTrips function can be imported
# from the recommender pipeline
sys.path.append("%s/../CFC_DataCollector/" % os.getcwd())
from recommender.filter_modules import getCanonicalTrips
import recommender.tripiterator as ti
from recommender.trip import Canonical_E_Mission_Trip
canonical_trips = list(ti.TripIterator(user_uuid,["recommender", "get_improve"], Canonical_E_Mission_Trip))
for cluster in canonical_trips:
print(cluster.start_point_distr)
# print(cluster.end_point_distr)
# print(cluster.start_time_distr)
# print(cluster.end_time_distr)
print cluster._id
# representative_trips = map(lambda x: x.trip_id, canonical_trips)
sys.path.remove("%s/../CFC_DataCollector/" % os.getcwd())
print "Number of canonical_trips: %i" % len(canonical_trips)
return canonical_trips
def generate_tour_map(user_uuid):
"""
Generates tour map and saves it to result_template.html
"""
sys.path.append("%s/../../CFC_WebApp/" % os.getcwd())
import main.pygmaps_modified as pygmaps
from main.gmap_display import drawSection
from get_database import get_section_db, get_routeCluster_db
gmap = pygmaps.maps(37.8717, -122.2728, 14)
r = lambda: random.randint(0,255)
canonical_trips = getUserTour(user_uuid)
for section in canonical_trips:
color = '#%02X%02X%02X' % (r(),r(),r())
print section._id
section_json = get_section_db().find_one({'_id': section._id})
drawSection(section_json, 'path', gmap, color)
try:
os.remove('clients/commontrips/result_template.html')
except OSError, e:
print "Result of removing the result template is %s" % e
gmap.draw('clients/commontrips/result_template.html')
sys.path.remove("%s/../../CFC_WebApp/" % os.getcwd())
def getResult(user_uuid):
# This is in here, as opposed to the top level as recommended by the PEP
# because then we don't have to worry about loading bottle in the unit tests
from bottle import template
print "common trips getResult UUID: %s" % user_uuid
generate_tour_map(user_uuid)
renderedTemplate = template("clients/commontrips/result_template.html")
return renderedTemplate
| bsd-3-clause | Python |
22e6519f04c4d912dcec669b90009fbb6392145d | remove redundant parameter in migrate-episodes.py | gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo | mygpo/core/management/commands/migrate-episodes.py | mygpo/core/management/commands/migrate-episodes.py | from optparse import make_option
from django.core.management.base import BaseCommand
from mygpo import migrate
from mygpo.utils import iterate_together, progress
from mygpo.api import models as oldmodels
from mygpo.core import models as newmodels
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--min-id', action='store', type="int", dest='min_id', default=0, help="Id from which the migration should start."),
make_option('--max-id', action='store', type="int", dest='max_id', help="Id at which the migration should end."),
)
def handle(self, *args, **options):
min_id = options.get('min_id', 0)
max_id = options.get('max_id', oldmodels.Episode.objects.order_by('-id')[0].id)
updated, deleted, created = 0, 0, 0
oldepisodes = oldmodels.Episode.objects.filter(id__gte=min_id, id__lte=max_id)
newepisodes = newmodels.Episode.view('core/episodes_by_oldid', startkey=min_id, endkey=max_id, include_docs=True).iterator()
total = oldepisodes.count()
compare = lambda o, n: cmp(long(o.id), long(n.oldid))
for n, (olde, newe) in enumerate(iterate_together(oldepisodes, newepisodes, compare)):
if (olde != None) and (newe != None):
podcast = newmodels.Podcast.get(newe.podcast)
updated += migrate.update_episode(olde, newe)
elif olde == None:
deleted += 1
newe.delete()
elif newe == None:
newe = migrate.create_episode(olde)
created += 1
status_str = '%d new, %d upd, %d del' % (created, updated, deleted)
progress(n, total, status_str)
| from optparse import make_option
from django.core.management.base import BaseCommand
from mygpo import migrate
from mygpo.utils import iterate_together, progress
from mygpo.api import models as oldmodels
from mygpo.core import models as newmodels
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--min-id', action='store', type="int", dest='min_id', default=0, help="Id from which the migration should start."),
make_option('--max-id', action='store', type="int", dest='max_id', help="Id at which the migration should end."),
)
def handle(self, *args, **options):
min_id = options.get('min_id', 0)
max_id = options.get('max_id', oldmodels.Episode.objects.order_by('-id')[0].id)
updated, deleted, created = 0, 0, 0
oldepisodes = oldmodels.Episode.objects.filter(id__gte=min_id, id__lte=max_id)
newepisodes = newmodels.Episode.view('core/episodes_by_oldid', startkey=min_id, endkey=max_id, include_docs=True).iterator()
total = oldepisodes.count()
compare = lambda o, n: cmp(long(o.id), long(n.oldid))
for n, (olde, newe) in enumerate(iterate_together(oldepisodes, newepisodes, compare)):
if (olde != None) and (newe != None):
podcast = newmodels.Podcast.get(newe.podcast)
updated += migrate.update_episode(olde, newe, podcast)
elif olde == None:
deleted += 1
newe.delete()
elif newe == None:
newe = migrate.create_episode(olde)
created += 1
status_str = '%d new, %d upd, %d del' % (created, updated, deleted)
progress(n, total, status_str)
| agpl-3.0 | Python |
15de2fe886c52f0900deeb519f944d22bb5c6db4 | Use the @view decorator to ensure that the project page gets user data. | onceuponatimeforever/oh-mainline,vipul-sharma20/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,waseem18/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,SnappleCap/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,sudheesh001/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,sudheesh001/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,campbe13/openhatch,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,heeraj123/oh-mainline,heeraj123/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,moijes12/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,Changaco/oh-mainline,moijes12/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,onceuponatimeforever/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,eeshangarg/oh-mainline,ojengwa/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,sudheesh001/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline | mysite/project/views.py | mysite/project/views.py | from mysite.search.models import Project
import django.template
import mysite.base.decorators
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError
from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404
@mysite.base.decorators.view
def project(request, project__name = None):
p = Project.objects.get(name=project__name)
return (request,
'project/project.html',
{
'project': p,
'contributors': p.get_contributors()
},
)
| from mysite.search.models import Project
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError
from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404
def project(request, project__name = None):
p = Project.objects.get(name=project__name)
return render_to_response('project/project.html',
{
'the_user': request.user,
'project': p,
'contributors': p.get_contributors()
}
)
| agpl-3.0 | Python |
dee8c17f78989c2f508eca5a2771a1d63a60d9c6 | Bump version number to 0.1 | Xion/recursely | recursely/__init__.py | recursely/__init__.py | """
recursely
"""
__version__ = "0.1"
__description__ = "Recursive importer for Python submodules"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import sys
from recursely._compat import IS_PY3
from recursely.importer import RecursiveImporter
from recursely.utils import SentinelList
__all__ = ['install']
def install(retroactive=True):
"""Install the recursive import hook in ``sys.meta_path``,
enabling the use of ``__recursive__`` directive.
:param retroactive: Whether the hook should be retroactively applied
to module's that have been imported before
it was installed.
"""
if RecursiveImporter.is_installed():
return
importer = RecursiveImporter()
# because the hook is a catch-all one, we ensure that it's always
# at the very end of ``sys.meta_path``, so that it's tried only if
# no other (more specific) hook has been chosen by Python
if IS_PY3:
for i in reversed(range(len(sys.meta_path))):
ih_module = getattr(sys.meta_path[i], '__module__', '')
is_builtin = ih_module == '_frozen_importlib'
if not is_builtin:
break
sys.meta_path = SentinelList(
sys.meta_path[:i],
sentinels=[importer] + sys.meta_path[i:])
else:
sys.meta_path = SentinelList(sys.meta_path, sentinel=importer)
# look through already imported packages and recursively import
# their submodules, if they contain the ``__recursive__`` directive
if retroactive:
for module in list(sys.modules.values()):
importer.recurse(module)
| """
recursely
"""
__version__ = "0.0.3"
__description__ = "Recursive importer for Python submodules"
__author__ = "Karol Kuczmarski"
__license__ = "Simplified BSD"
import sys
from recursely._compat import IS_PY3
from recursely.importer import RecursiveImporter
from recursely.utils import SentinelList
__all__ = ['install']
def install(retroactive=True):
"""Install the recursive import hook in ``sys.meta_path``,
enabling the use of ``__recursive__`` directive.
:param retroactive: Whether the hook should be retroactively applied
to module's that have been imported before
it was installed.
"""
if RecursiveImporter.is_installed():
return
importer = RecursiveImporter()
# because the hook is a catch-all one, we ensure that it's always
# at the very end of ``sys.meta_path``, so that it's tried only if
# no other (more specific) hook has been chosen by Python
if IS_PY3:
for i in reversed(range(len(sys.meta_path))):
ih_module = getattr(sys.meta_path[i], '__module__', '')
is_builtin = ih_module == '_frozen_importlib'
if not is_builtin:
break
sys.meta_path = SentinelList(
sys.meta_path[:i],
sentinels=[importer] + sys.meta_path[i:])
else:
sys.meta_path = SentinelList(sys.meta_path, sentinel=importer)
# look through already imported packages and recursively import
# their submodules, if they contain the ``__recursive__`` directive
if retroactive:
for module in list(sys.modules.values()):
importer.recurse(module)
| bsd-2-clause | Python |
d2a040618a1e816b97f60aa66f5b4c9ab4a3e6b9 | Add method to handle files args from cli | jrsmith3/refmanage | refmanage/fs_utils.py | refmanage/fs_utils.py | # -*- coding: utf-8 -*-
import os
import glob
import pathlib2 as pathlib
def handle_files_args(paths_args):
"""
Handle files arguments from command line
This method takes a list of strings representing paths passed to the cli. It expands the path arguments and creates a list of pathlib.Path objects which unambiguously point to the files indicated by the command line arguments.
:param list paths_args: Paths to files.
"""
for paths_arg in paths_args:
# Handle paths implicitly rooted at user home dir
paths_arg = os.path.expanduser(paths_arg)
# Expand wildcards
paths_arg = glob.glob(paths_arg)
# Create list of pathlib.Path objects
paths = [pathlib.Path(path_arg) for path_arg in paths_arg]
return paths
| # -*- coding: utf-8 -*-
| mit | Python |
85f471a63238815b3506f464f9261d2b96751aae | Use cog check for duelyst cog | Harmon758/Harmonbot,Harmon758/Harmonbot | Discord/cogs/duelyst.py | Discord/cogs/duelyst.py |
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Duelyst(bot))
class Duelyst(commands.Cog):
def __init__(self, bot):
self.bot = bot
def cog_check(self, ctx):
return checks.not_forbidden_predicate(ctx)
@commands.group(invoke_without_command = True, case_insensitive = True)
async def duelyst(self, ctx):
'''Duelyst'''
await ctx.send_help(ctx.command)
@duelyst.group(case_insensitive = True)
async def card(self, ctx, *, name : str):
'''Details of a specific card'''
url = "https://duelyststats.info/scripts/carddata/get.php"
async with ctx.bot.aiohttp_session.get(url, params = {"cardName": name}) as resp:
data = await resp.text()
await ctx.embed_reply(data)
@card.command()
async def card_random(self, ctx):
'''Details of a random card'''
url = "https://duelyststats.info/scripts/carddata/get.php"
async with ctx.bot.aiohttp_session.get(url, params = {"random": 1}) as resp:
data = await resp.text()
await ctx.embed_reply(data)
|
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Duelyst(bot))
class Duelyst(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.group(invoke_without_command = True, case_insensitive = True)
@checks.not_forbidden()
async def duelyst(self, ctx):
'''Duelyst'''
await ctx.send_help(ctx.command)
@duelyst.group(case_insensitive = True)
@checks.not_forbidden()
async def card(self, ctx, *, name : str):
'''Details of a specific card'''
url = "https://duelyststats.info/scripts/carddata/get.php"
async with ctx.bot.aiohttp_session.get(url, params = {"cardName": name}) as resp:
data = await resp.text()
await ctx.embed_reply(data)
@card.command()
@checks.not_forbidden()
async def card_random(self, ctx):
'''Details of a random card'''
url = "https://duelyststats.info/scripts/carddata/get.php"
async with ctx.bot.aiohttp_session.get(url, params = {"random": 1}) as resp:
data = await resp.text()
await ctx.embed_reply(data)
| mit | Python |
abdfdfb6a28e6ce4700b50ebeed12f8b241b5123 | Update example push command | ivoire/ReactOBus,ivoire/ReactOBus | share/examples/push.py | share/examples/push.py | import json
import sys
import uuid
import zmq
from zmq.utils.strtypes import b
def main():
# Get the arguments
if len(sys.argv) != 4:
print("Usage: push.py url topic num_messages")
sys.exit(1)
url = sys.argv[1]
topic = sys.argv[2]
num_messages = int(sys.argv[3])
# Create the socket
context = zmq.Context()
sock = context.socket(zmq.PUSH)
sock.connect(url)
for i in range(0, num_messages):
sock.send_multipart([b(topic),
b(str(uuid.uuid1())),
b(json.dumps({'id': i}))])
if __name__ == "__main__":
main()
| import sys
import zmq
from zmq.utils.strtypes import b
def main():
# Get the arguments
if len(sys.argv) != 4:
print("Usage: push.py url topic num_messages")
sys.exit(1)
url = sys.argv[1]
topic = sys.argv[2]
num_messages = int(sys.argv[3])
# Create the socket
context = zmq.Context()
sock = context.socket(zmq.PUSH)
sock.connect(url)
for i in range(0, num_messages):
sock.send_multipart([b(topic), b("id"), b(str(i))])
if __name__ == "__main__":
main()
| agpl-3.0 | Python |
77dc0b3b7dcd7c03a9c23c0f2c95d8f7f7ae26c6 | update init file | danforthcenter/plantcv,stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv | plantcv/plantcv/morphology/__init__.py | plantcv/plantcv/morphology/__init__.py | from plantcv.plantcv.morphology.find_branch_pts import find_branch_pts
from plantcv.plantcv.morphology.find_tips import find_tips
from plantcv.plantcv.morphology._iterative_prune import _iterative_prune
from plantcv.plantcv.morphology.segment_skeleton import segment_skeleton
from plantcv.plantcv.morphology.segment_sort import segment_sort
from plantcv.plantcv.morphology.prune import prune
from plantcv.plantcv.morphology.skeletonize import skeletonize
from plantcv.plantcv.morphology.check_cycles import check_cycles
from plantcv.plantcv.morphology.segment_skeleton import segment_skeleton
from plantcv.plantcv.morphology.segment_angle import segment_angle
from plantcv.plantcv.morphology.segment_path_length import segment_path_length
from plantcv.plantcv.morphology.segment_euclidean_length import segment_euclidean_length
from plantcv.plantcv.morphology.segment_curvature import segment_curvature
from plantcv.plantcv.morphology.segment_tangent_angle import segment_tangent_angle
from plantcv.plantcv.morphology.segment_id import segment_id
from plantcv.plantcv.morphology.segment_insertion_angle import segment_insertion_angle
from plantcv.plantcv.morphology.segment_combine import segment_combine
__all__ = ["find_branch_pts", "find_tips", "prune", "skeletonize", "check_cycles", "segment_skeleton", "segment_angle",
"segment_path_length", "segment_euclidean_length", "segment_curvature", "segment_sort", "segment_id",
"segment_tangent_angle", "segment_insertion_angle", "segment_combine", "_iterative_prune"] | from plantcv.plantcv.morphology.find_branch_pts import find_branch_pts
from plantcv.plantcv.morphology.segment_skeleton import segment_skeleton
from plantcv.plantcv.morphology.find_tips import find_tips
from plantcv.plantcv.morphology.segment_sort import segment_sort
from plantcv.plantcv.morphology.prune import prune
from plantcv.plantcv.morphology.skeletonize import skeletonize
from plantcv.plantcv.morphology.check_cycles import check_cycles
from plantcv.plantcv.morphology.segment_skeleton import segment_skeleton
from plantcv.plantcv.morphology._iterative_prune import _iterative_prune
from plantcv.plantcv.morphology.segment_angle import segment_angle
from plantcv.plantcv.morphology.segment_path_length import segment_path_length
from plantcv.plantcv.morphology.segment_euclidean_length import segment_euclidean_length
from plantcv.plantcv.morphology.segment_curvature import segment_curvature
from plantcv.plantcv.morphology.segment_tangent_angle import segment_tangent_angle
from plantcv.plantcv.morphology.segment_id import segment_id
from plantcv.plantcv.morphology.segment_insertion_angle import segment_insertion_angle
from plantcv.plantcv.morphology.segment_combine import segment_combine
__all__ = ["find_branch_pts", "find_tips", "prune", "skeletonize", "check_cycles", "segment_skeleton", "segment_angle",
"segment_path_length", "segment_euclidean_length", "segment_curvature", "segment_sort", "segment_id",
"segment_tangent_angle", "segment_insertion_angle", "segment_combine", "_iterative_prune"]
| mit | Python |
b6cae9dd657f79b84a9ac622b9a888d372ca9077 | Build mono with -O2. | BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild | packages/mono-master.py | packages/mono-master.py | import os
class MonoMasterPackage(Package):
def __init__(self):
Package.__init__(self, 'mono', '3.0.7',
sources = ['git://github.com/mono/mono'],
revision = os.getenv('MONO_BUILD_REVISION'),
configure_flags = [
'--enable-nls=no',
'--prefix=' + Package.profile.prefix,
'--with-ikvm=yes',
'--with-moonlight=no'
]
)
if Package.profile.name == 'darwin':
self.configure_flags.extend([
# fix build on lion, it uses 64-bit host even with -m32
'--build=i386-apple-darwin11.2.0',
'--enable-loadedllvm'
])
self.sources.extend ([
# Fixes up pkg-config usage on the Mac
'patches/mcs-pkgconfig.patch'
])
self.configure = 'CFLAGS=-O2 ./autogen.sh'
def prep (self):
Package.prep (self)
if Package.profile.name == 'darwin':
for p in range (1, len (self.sources)):
self.sh ('patch -p1 < "%{sources[' + str (p) + ']}"')
MonoMasterPackage()
| import os
class MonoMasterPackage(Package):
def __init__(self):
Package.__init__(self, 'mono', '3.0.7',
sources = ['git://github.com/mono/mono'],
revision = os.getenv('MONO_BUILD_REVISION'),
configure_flags = [
'--enable-nls=no',
'--prefix=' + Package.profile.prefix,
'--with-ikvm=yes',
'--with-moonlight=no'
]
)
if Package.profile.name == 'darwin':
self.configure_flags.extend([
# fix build on lion, it uses 64-bit host even with -m32
'--build=i386-apple-darwin11.2.0',
'--enable-loadedllvm'
])
self.sources.extend ([
# Fixes up pkg-config usage on the Mac
'patches/mcs-pkgconfig.patch'
])
self.configure = './autogen.sh'
def prep (self):
Package.prep (self)
if Package.profile.name == 'darwin':
for p in range (1, len (self.sources)):
self.sh ('patch -p1 < "%{sources[' + str (p) + ']}"')
MonoMasterPackage()
| mit | Python |
ba0ceb3ffb2cd4b73b461afd57c197aefbb48949 | Remove custom logging from dev | dbinetti/barberscore-django,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api | project/settings/dev.py | project/settings/dev.py | from .base import *
ALLOWED_HOSTS = [
'localhost',
]
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
# Aliasing Django Defaults
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
STATICFILES_STORAGE = STATIC_STORAGE
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--nologcapture',
]
INSTALLED_APPS += (
'debug_toolbar',
'django_nose',
)
| from .base import *
ALLOWED_HOSTS = [
'localhost',
]
# Static Server Config
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
# Media (aka File Upload) Server Config
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_URL = '/media/'
# Aliasing Django Defaults
DEFAULT_FILE_STORAGE = MEDIA_STORAGE
STATICFILES_STORAGE = STATIC_STORAGE
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--nologcapture',
]
LOGGING = {
'version': 1,
"disable_existing_loggers": True,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'formatters': {
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'loggers': {
'apps.api': {
'level': 'DEBUG',
'handlers': ['console'],
},
'apps.website': {
'level': 'DEBUG',
'handlers': ['console'],
},
# 'noncense': {
# 'level': 'DEBUG',
# 'handlers': ['console'],
# },
'utils': {
'level': 'DEBUG',
'handlers': ['console'],
},
},
}
INSTALLED_APPS += (
'debug_toolbar',
'django_nose',
)
| bsd-2-clause | Python |
a028c7234a4fc74b725780014e3f840078e6fedb | Fix typo | deffi/protoplot | protoplot/model/axis.py | protoplot/model/axis.py | from protoplot.engine import Item
class Axis(Item):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.options.register("log", False, False)
self.options.register("logBase", False, 10)
self.options.register("min", False, None)
self.options.register("max", False, None)
self.options.register("format", False, None)
self.options.register("majorInterval", False, None)
self.options.register("minorInterval", False, None)
self.options.register("majorTicks", False, True)
self.options.register("minorTicks", False, False)
self.options.register("majorGridVisible", False, True)
self.options.register("minorGridVisible", False, False)
self.options.register("majorGridColor", False, None)
self.options.register("minorGridColor", False, None)
self.options.register("majorGridLineStyle", False, "solid")
self.options.register("minorGridLineStyle", False, "solid")
| from protoplot.engine import Item
class Axis(Item):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.options.register("log", False, False)
self.options.register("logBase", False, 10)
self.options.register("min", False, None)
self.options.register("max", False, None)
self.options.register("format", False, None)
self.option.register("majorInterval", False, None)
self.option.register("minorInterval", False, None)
self.options.register("majorTicks", False, True)
self.options.register("minorTicks", False, False)
self.options.register("majorGridVisible", False, True)
self.options.register("minorGridVisible", False, False)
self.options.register("majorGridColor", False, None)
self.options.register("minorGridColor", False, None)
self.options.register("majorGridLineStyle", False, "solid")
self.options.register("minorGridLineStyle", False, "solid")
| agpl-3.0 | Python |
2bd443655529527a64e46872cd1dbf8142be8dc3 | change session name to identifier | opencivicdata/pupa,opencivicdata/pupa,influence-usa/pupa,datamade/pupa,influence-usa/pupa,mileswwatkins/pupa,datamade/pupa,mileswwatkins/pupa,rshorey/pupa,rshorey/pupa | pupa/importers/votes.py | pupa/importers/votes.py | from .base import BaseImporter
from opencivicdata.models import VoteEvent, LegislativeSession
class VoteImporter(BaseImporter):
_type = 'vote'
model_class = VoteEvent
related_models = {'counts': {}, 'votes': {}, 'sources': {}}
def __init__(self, jurisdiction_id,
person_importer, org_importer, bill_importer):
super(VoteImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.bill_importer = bill_importer
self.org_importer = org_importer
def get_object(self, vote):
spec = {
'identifier': vote['identifier'],
'legislative_session__identifier': vote['legislative_session'],
'legislative_session__jurisdiction_id': self.jurisdiction_id,
}
# TODO: use bill, session, etc.
return self.model_class.objects.get(**spec)
def prepare_for_db(self, data):
data['legislative_session'] = LegislativeSession.objects.get(
identifier=data.pop('legislative_session'), jurisdiction_id=self.jurisdiction_id)
data['organization_id'] = self.org_importer.resolve_json_id(data.pop('organization'))
data['bill_id'] = self.bill_importer.resolve_json_id(data.pop('bill'))
return data
| from .base import BaseImporter
from opencivicdata.models import VoteEvent, LegislativeSession
class VoteImporter(BaseImporter):
_type = 'vote'
model_class = VoteEvent
related_models = {'counts': {}, 'votes': {}, 'sources': {}}
def __init__(self, jurisdiction_id,
person_importer, org_importer, bill_importer):
super(VoteImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.bill_importer = bill_importer
self.org_importer = org_importer
def get_object(self, vote):
spec = {
'identifier': vote['identifier'],
'legislative_session__identifier': vote['legislative_session'],
'legislative_session__jurisdiction_id': self.jurisdiction_id,
}
# TODO: use bill, session, etc.
return self.model_class.objects.get(**spec)
def prepare_for_db(self, data):
data['legislative_session'] = LegislativeSession.objects.get(
name=data.pop('legislative_session'), jurisdiction_id=self.jurisdiction_id)
data['organization_id'] = self.org_importer.resolve_json_id(data.pop('organization'))
data['bill_id'] = self.bill_importer.resolve_json_id(data.pop('bill'))
return data
| bsd-3-clause | Python |
46338d8ea16cbc801df1791b8467ec9268ad8d2e | convert do_spec to klk | tek/amino | unit/do_spec.py | unit/do_spec.py | from typing import Generator, Any
from amino.test.spec_spec import Spec
from kallikrein import kf, Expectation, k
from kallikrein.matchers.maybe import be_just, be_nothing
from amino import Just, Nothing, Maybe, do, Eval
from amino.state import EvalState, StateT
class DoSpec(Spec):
'''do notation
yield all `Just`s $just
yield a `Nothing` $nothing
`EvalState` $eval_state
'''
def just(self) -> Expectation:
@do
def run(i: int) -> Generator[Maybe[int], Any, None]:
a = yield Just(i)
b = yield Just(a + 5)
c = yield Just(b + 7)
d = yield Just(c * 3)
yield Just(d)
return kf(run, 3).must(be_just(45))
def nothing(self) -> None:
@do
def run(i: int) -> Generator[Maybe[int], Any, None]:
yield Just(i)
b = yield Nothing
c = yield Just(b + 7)
yield Just(c)
return kf(run, 3).must(be_nothing)
def eval_state(self) -> None:
@do
def run() -> Generator[StateT[Eval, str, Any], Any, None]:
a = yield EvalState.pure(1)
yield EvalState.set('state')
yield EvalState.inspect(lambda s: f'{s}: {a}')
return k(run().run_a('init').value) == 'state: 1'
__all__ = ('DoSpec',)
| from typing import Generator
from amino.test.spec_spec import Spec
from amino import Just, Nothing, Maybe, do
from amino.state import EvalState, StateT
class DoSpec(Spec):
def just(self) -> None:
@do
def run(i: int) -> Generator[Maybe[int], int, Maybe[int]]:
a = yield Just(i)
b = yield Just(a + 5)
c = yield Just(b + 7)
d = yield Just(c * 3)
yield Just(d)
run(3).should.equal(Just(45))
def nothing(self) -> None:
@do
def run(i: int) -> Generator[Maybe[int], int, Maybe[int]]:
yield Just(i)
b = yield Nothing
c = yield Just(b + 7)
yield Just(c)
run(3).should.equal(Nothing)
def eval_state(self) -> None:
@do
def run() -> None:
a = yield EvalState.pure(1)
yield EvalState.set('state')
yield EvalState.inspect(lambda s: f'{s}: {a}')
run().run_a('init').value.should.equal('state: 1')
__all__ = ('DoSpec',)
| mit | Python |
e79949dbb639a4818fc7b509b03bc6f94db458e4 | create test to load all passbands listed in pbzptmag file | gnarayan/source_synphot | source_synphot/main.py | source_synphot/main.py | # -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import numpy as np
from . import io
from . import passband
def main(inargs=None):
pbzptfile = os.path.join('passbands','pbzptmag.txt')
pbzptfile = io.get_pkgfile(pbzptfile)
pbzpt = np.recfromtxt(pbzptfile, names=True)
pbnames = pbzpt.obsmode
pbnames = [x.decode('latin-1') for x in pbnames]
pbs = passband.get_pbs(pbnames, 0.)
print(pbs)
| # -*- coding: UTF-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from . import io
from . import passband
import pysynphot as S
def main(inargs=None):
args = io.get_options(args=inargs)
sourcepb, sourcepbzp = io.get_passband(args.sourcepb)
if sourcepbzp is None:
sourcepbzp = np.nan
new_sourcepbzp = passband.get_pb_zpt(sourcepb, model_mag=0.)
if sourcepbzp != new_sourcepbzp:
delta_zp = sourcepbzp - new_sourcepbzp
print(delta_zp, sourcepbzp, new_sourcepbzp)
sourcepbzp = new_sourcepbzp
source_spec = S.FlatSpectrum(3631, fluxunits='jy')
source_spec.convert('flam')
ob = S.Observation(source_spec, sourcepb)
print(ob.effstim('abmag'))
out = passband.synphot(source_spec, sourcepb, sourcepbzp)
print(out)
| mit | Python |
8714d136cea7fe54f3ae8cbf97da14a142b88db8 | Handle IntegrityError when creating transactions | CorbanU/corban-shopify,CorbanU/corban-shopify | shopify/product/models.py | shopify/product/models.py | from __future__ import unicode_literals
from decimal import Decimal
from django.db import IntegrityError
from django.db import models
from django.db.models import Sum
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
@python_2_unicode_compatible
class Product(models.Model):
# Unique Shopify product ID number
product_id = models.IntegerField(unique=True)
# Type for this product (fee, deposit, etc.)
product_type = models.CharField(max_length=64, blank=True)
# Informative description, used for display purposes
description = models.CharField(max_length=255)
# Internal account number for this product
account_number = models.BigIntegerField(null=True, blank=True)
def __str__(self):
return self.description
class TransactionManager(models.Manager):
def add_transaction(self, product_id, price, quantity, credit=True, **kwargs):
try:
product = Product.objects.get(product_id=product_id)
except Product.DoesNotExist:
pass
else:
amount = Decimal(price) * Decimal(quantity)
try:
self.create(product=product, amount=amount, is_credit=credit,
created_at=now(), **kwargs)
except IntegrityError:
pass
def get_amounts(self):
"""
Return aggregated transaction amounts of all transactions
that have not already been exported and have a product
account number. All returned transactions are marked as
exported.
"""
transactions = self.filter(exported_at__isnull=True).exclude(product__account_number__isnull=True)
# Force queryset evaluation so we can call update on the queryset
amounts = list(transactions.values('product__account_number', 'order_name', 'is_credit').order_by('created_at').annotate(amount=Sum('amount')))
transactions.update(exported_at=now())
return amounts
class Transaction(models.Model):
# Product for which this transaction occurred
product = models.ForeignKey(Product)
# Amount (price * quantity) for this transaction
amount = models.DecimalField(decimal_places=2, max_digits=6)
# Specify if transaction type is credit or debit
is_credit = models.BooleanField(default=True)
# Shopify order ID for the transaction
order_id = models.IntegerField(null=True, blank=True)
# Shopify order name for the transaction
order_name = models.CharField(max_length=16, blank=True)
# Shopify item ID for the transaction
item_id = models.IntegerField(null=True, blank=True)
# When the transaction occurred
created_at = models.DateTimeField()
# When the transaction was exported
exported_at = models.DateTimeField(editable=False, null=True)
objects = TransactionManager()
| from __future__ import unicode_literals
from decimal import Decimal
from django.db import models
from django.db.models import Sum
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
@python_2_unicode_compatible
class Product(models.Model):
# Unique Shopify product ID number
product_id = models.IntegerField(unique=True)
# Type for this product (fee, deposit, etc.)
product_type = models.CharField(max_length=64, blank=True)
# Informative description, used for display purposes
description = models.CharField(max_length=255)
# Internal account number for this product
account_number = models.BigIntegerField(null=True, blank=True)
def __str__(self):
return self.description
class TransactionManager(models.Manager):
def add_transaction(self, product_id, price, quantity, credit=True, **kwargs):
try:
product = Product.objects.get(product_id=product_id)
except Product.DoesNotExist:
pass
else:
amount = Decimal(price) * Decimal(quantity)
self.create(product=product, amount=amount, is_credit=credit,
created_at=now(), **kwargs)
def get_amounts(self):
"""
Return aggregated transaction amounts of all transactions
that have not already been exported and have a product
account number. All returned transactions are marked as
exported.
"""
transactions = self.filter(exported_at__isnull=True).exclude(product__account_number__isnull=True)
# Force queryset evaluation so we can call update on the queryset
amounts = list(transactions.values('product__account_number', 'order_name', 'is_credit').order_by('created_at').annotate(amount=Sum('amount')))
transactions.update(exported_at=now())
return amounts
class Transaction(models.Model):
# Product for which this transaction occurred
product = models.ForeignKey(Product)
# Amount (price * quantity) for this transaction
amount = models.DecimalField(decimal_places=2, max_digits=6)
# Specify if transaction type is credit or debit
is_credit = models.BooleanField(default=True)
# Shopify order ID for the transaction
order_id = models.IntegerField(null=True, blank=True)
# Shopify order name for the transaction
order_name = models.CharField(max_length=16, blank=True)
# Shopify item ID for the transaction
item_id = models.IntegerField(null=True, blank=True)
# When the transaction occurred
created_at = models.DateTimeField()
# When the transaction was exported
exported_at = models.DateTimeField(editable=False, null=True)
objects = TransactionManager()
| bsd-3-clause | Python |
b57499cb00012ffb364ca0cb3095ec572ce85d4d | Use HTML5 output for Markdown. | xouillet/sigal,t-animal/sigal,jdn06/sigal,muggenhor/sigal,jasuarez/sigal,saimn/sigal,Ferada/sigal,kontza/sigal,saimn/sigal,jasuarez/sigal,Ferada/sigal,cbosdo/sigal,muggenhor/sigal,elaOnMars/sigal,Ferada/sigal,kontza/sigal,t-animal/sigal,franek/sigal,jasuarez/sigal,t-animal/sigal,elaOnMars/sigal,saimn/sigal,xouillet/sigal,xouillet/sigal,franek/sigal,jdn06/sigal,cbosdo/sigal,jdn06/sigal,kontza/sigal,cbosdo/sigal | sigal/utils.py | sigal/utils.py | # -*- coding: utf-8 -*-
# Copyright (c) 2011-2014 - Simon Conseil
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import codecs
import os
import shutil
from markdown import Markdown
from subprocess import Popen, PIPE
from . import compat
def copy(src, dst, symlink=False):
"""Copy or symlink the file."""
func = os.symlink if symlink else shutil.copy2
if symlink and os.path.lexists(dst):
os.remove(dst)
func(src, dst)
def check_or_create_dir(path):
"Create the directory if it does not exist"
if not os.path.isdir(path):
os.makedirs(path)
def url_from_path(path):
"""Transform path to url, converting backslashes to slashes if needed."""
if os.sep == '/':
return path
else:
return '/'.join(path.split(os.sep))
def read_markdown(filename):
# Use utf-8-sig codec to remove BOM if it is present
with codecs.open(filename, 'r', 'utf-8-sig') as f:
text = f.read()
md = Markdown(extensions=['meta'], output_format='html5')
html = md.convert(text)
return {
'title': md.Meta.get('title', [''])[0],
'description': html,
'meta': md.Meta.copy()
}
def call_subprocess(cmd):
"""Wrapper to call ``subprocess.Popen`` and return stdout & stderr."""
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not compat.PY2:
stderr = stderr.decode('utf8')
stdout = stdout.decode('utf8')
return p.returncode, stdout, stderr
| # -*- coding: utf-8 -*-
# Copyright (c) 2011-2014 - Simon Conseil
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import codecs
import os
import shutil
from markdown import Markdown
from subprocess import Popen, PIPE
from . import compat
def copy(src, dst, symlink=False):
"""Copy or symlink the file."""
func = os.symlink if symlink else shutil.copy2
if symlink and os.path.lexists(dst):
os.remove(dst)
func(src, dst)
def check_or_create_dir(path):
"Create the directory if it does not exist"
if not os.path.isdir(path):
os.makedirs(path)
def url_from_path(path):
"""Transform path to url, converting backslashes to slashes if needed."""
if os.sep == '/':
return path
else:
return '/'.join(path.split(os.sep))
def read_markdown(filename):
# Use utf-8-sig codec to remove BOM if it is present
with codecs.open(filename, 'r', 'utf-8-sig') as f:
text = f.read()
md = Markdown(extensions=['meta'])
html = md.convert(text)
return {
'title': md.Meta.get('title', [''])[0],
'description': html,
'meta': md.Meta.copy()
}
def call_subprocess(cmd):
"""Wrapper to call ``subprocess.Popen`` and return stdout & stderr."""
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
if not compat.PY2:
stderr = stderr.decode('utf8')
stdout = stdout.decode('utf8')
return p.returncode, stdout, stderr
| mit | Python |
8eaef068757b23361c4c71b5057ec13aa7e916d5 | make generate_md5_signature work as standalone | byteweaver/django-skrill | skrill/tests/factories.py | skrill/tests/factories.py | from decimal import Decimal
import hashlib
import random
from django.contrib.auth.models import User
import factory
from skrill.settings import get_secret_word_as_md5
from skrill.models import PaymentRequest, StatusReport
from skrill.settings import *
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
username = factory.Sequence(lambda n: "Test User %s" % n)
class PaymentRequestFactory(factory.DjangoModelFactory):
FACTORY_FOR = PaymentRequest
user = UserFactory()
amount = Decimal(random.randrange(10000))/100
currency = random.choice(ISO4217)[0]
class StatusReportFactory(factory.DjangoModelFactory):
FACTORY_FOR = StatusReport
pay_to_email = PAY_TO_EMAIL
pay_from_email = "someone@example.com"
merchant_id = 12345
payment_request = PaymentRequestFactory()
mb_transaction_id = 12345
mb_amount = payment_request.amount
mb_currency = payment_request.currency
status = 0
md5sig = ''
amount = payment_request.amount
currency = payment_request.currency
def generate_md5_signature(status_report):
m = hashlib.md5()
m.update(str(status_report.merchant_id))
m.update(str(status_report.transaction_id))
m.update(get_secret_word_as_md5())
m.update(str(status_report.mb_amount))
m.update(status_report.mb_currency)
m.update(str(status_report.status))
return m.hexdigest().upper()
| from decimal import Decimal
import hashlib
import random
from django.contrib.auth.models import User
import factory
from skrill.settings import get_secret_word_as_md5
from skrill.models import PaymentRequest, StatusReport
from skrill.settings import *
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
username = factory.Sequence(lambda n: "Test User %s" % n)
class PaymentRequestFactory(factory.DjangoModelFactory):
FACTORY_FOR = PaymentRequest
user = UserFactory()
amount = Decimal(random.randrange(10000))/100
currency = random.choice(ISO4217)[0]
class StatusReportFactory(factory.DjangoModelFactory):
FACTORY_FOR = StatusReport
pay_to_email = PAY_TO_EMAIL
pay_from_email = "someone@example.com"
merchant_id = 12345
payment_request = PaymentRequestFactory()
mb_transaction_id = 12345
mb_amount = payment_request.amount
mb_currency = payment_request.currency
status = 0
md5sig = ''
amount = payment_request.amount
currency = payment_request.currency
def _generate_md5_signature(self):
m = hashlib.md5()
m.update(str(self.merchant_id))
m.update(str(self.transaction_id))
m.update(get_secret_word_as_md5())
m.update(str(self.mb_amount))
m.update(self.mb_currency)
m.update(str(self.status))
self.md5sig = m.hexdigest().upper()
| bsd-3-clause | Python |
7406975f2a0484c76bb2998c7f6ae064434ad4a3 | Build function working! | timcolonel/wow,timcolonel/wow | wow/__main__.py | wow/__main__.py | """
Wow
Usage:
wow
wow install <application>...
wow uninstall <application>...
wow unpack <file>...
wow build [<config_file>]
wow push
wow compile
wow (-h | --help)
wow --version
Action
Options:
-h --help Show this screen.
--version Show version.
"""
from lib.wow import Wow
from docopt import docopt
from lib.exception import WowException
import logging
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
if __name__ == '__main__':
arguments = docopt(__doc__, version='1.0.0')
engine = Wow()
try:
engine.run(arguments)
except WowException as e:
logging.error(str(e))
| """
Wow
Usage:
wow
wow install <application>...
wow uninstall <application>...
wow build
wow push
wow compile
wow (-h | --help)
wow --version
Action
Options:
-h --help Show this screen.
--version Show version.
"""
from lib.wow import Wow
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='1.0.0')
engine = Wow()
engine.run(arguments)
| mit | Python |
bb2926591dcab344c330c931286ab65d31336736 | fix sklearn tests with the correct model_dir (#98) | kubeflow/kfserving-lts,kubeflow/kfserving-lts,kubeflow/kfserving-lts,kubeflow/kfserving-lts,kubeflow/kfserving-lts,kubeflow/kfserving-lts | python/sklearnserver/sklearnserver/test_model.py | python/sklearnserver/sklearnserver/test_model.py | from sklearn import svm
from sklearn import datasets
from sklearnserver import SKLearnModel
import joblib
import os
model_dir = "../../docs/samples/sklearn"
JOBLIB_FILE = "model.joblib"
def test_model():
iris = datasets.load_iris()
X, y = iris.data, iris.target
sklearn_model = svm.SVC(gamma='scale')
sklearn_model.fit(X, y)
model_file = os.path.join((model_dir),JOBLIB_FILE)
joblib.dump(value=sklearn_model, filename=model_file)
server = SKLearnModel("sklearnmodel", model_dir)
server.load()
request = X[0:1].tolist()
response = server.predict(request)
assert response == [0]
| from sklearn import svm
from sklearn import datasets
from sklearnserver import SKLearnModel
import joblib
import os
model_dir = "/path/to/kfserving/docs/samples/sklearn"
JOBLIB_FILE = "model.joblib"
def test_model():
iris = datasets.load_iris()
X, y = iris.data, iris.target
sklearn_model = svm.SVC(gamma='scale')
sklearn_model.fit(X, y)
model_file = os.path.join((model_dir),JOBLIB_FILE)
joblib.dump(value=sklearn_model, filename=model_file)
server = SKLearnModel("sklearnmodel", model_dir)
server.load()
request = X[0:1].tolist()
response = server.predict(request)
assert response == [0] | apache-2.0 | Python |
9c5bd69391ad1dad03b58371ee24fcea00b58220 | use assemble_output in in-process kernel test | ipython/ipython,ipython/ipython | IPython/kernel/inprocess/tests/test_kernel.py | IPython/kernel/inprocess/tests/test_kernel.py | # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import sys
import unittest
from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
from IPython.kernel.inprocess.manager import InProcessKernelManager
from IPython.kernel.inprocess.ipkernel import InProcessKernel
from IPython.kernel.tests.utils import assemble_output
from IPython.testing.decorators import skipif_not_matplotlib
from IPython.utils.io import capture_output
from IPython.utils import py3compat
if py3compat.PY3:
from io import StringIO
else:
from StringIO import StringIO
class InProcessKernelTestCase(unittest.TestCase):
def setUp(self):
self.km = InProcessKernelManager()
self.km.start_kernel()
self.kc = BlockingInProcessKernelClient(kernel=self.km.kernel)
self.kc.start_channels()
self.kc.wait_for_ready()
@skipif_not_matplotlib
def test_pylab(self):
"""Does %pylab work in the in-process kernel?"""
kc = self.kc
kc.execute('%pylab')
out, err = assemble_output(kc.iopub_channel)
self.assertIn('matplotlib', out)
def test_raw_input(self):
""" Does the in-process kernel handle raw_input correctly?
"""
io = StringIO('foobar\n')
sys_stdin = sys.stdin
sys.stdin = io
try:
if py3compat.PY3:
self.kc.execute('x = input()')
else:
self.kc.execute('x = raw_input()')
finally:
sys.stdin = sys_stdin
self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
def test_stdout(self):
""" Does the in-process kernel correctly capture IO?
"""
kernel = InProcessKernel()
with capture_output() as io:
kernel.shell.run_cell('print("foo")')
self.assertEqual(io.stdout, 'foo\n')
kc = BlockingInProcessKernelClient(kernel=kernel)
kernel.frontends.append(kc)
kc.execute('print("bar")')
out, err = assemble_output(kc.iopub_channel)
self.assertEqual(out, 'bar\n')
| # Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import sys
import unittest
from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
from IPython.kernel.inprocess.manager import InProcessKernelManager
from IPython.kernel.inprocess.ipkernel import InProcessKernel
from IPython.testing.decorators import skipif_not_matplotlib
from IPython.utils.io import capture_output
from IPython.utils import py3compat
if py3compat.PY3:
from io import StringIO
else:
from StringIO import StringIO
class InProcessKernelTestCase(unittest.TestCase):
def setUp(self):
self.km = InProcessKernelManager()
self.km.start_kernel()
self.kc = BlockingInProcessKernelClient(kernel=self.km.kernel)
self.kc.start_channels()
self.kc.wait_for_ready()
@skipif_not_matplotlib
def test_pylab(self):
"""Does %pylab work in the in-process kernel?"""
kc = self.kc
kc.execute('%pylab')
msg = get_stream_message(kc)
self.assertIn('matplotlib', msg['content']['text'])
def test_raw_input(self):
""" Does the in-process kernel handle raw_input correctly?
"""
io = StringIO('foobar\n')
sys_stdin = sys.stdin
sys.stdin = io
try:
if py3compat.PY3:
self.kc.execute('x = input()')
else:
self.kc.execute('x = raw_input()')
finally:
sys.stdin = sys_stdin
self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
def test_stdout(self):
""" Does the in-process kernel correctly capture IO?
"""
kernel = InProcessKernel()
with capture_output() as io:
kernel.shell.run_cell('print("foo")')
self.assertEqual(io.stdout, 'foo\n')
kc = BlockingInProcessKernelClient(kernel=kernel)
kernel.frontends.append(kc)
kc.execute('print("bar")')
msg = get_stream_message(kc)
self.assertEqual(msg['content']['text'], 'bar\n')
#-----------------------------------------------------------------------------
# Utility functions
#-----------------------------------------------------------------------------
def get_stream_message(kernel_client, timeout=5):
""" Gets a single stream message synchronously from the sub channel.
"""
while True:
msg = kernel_client.get_iopub_msg(timeout=timeout)
if msg['header']['msg_type'] == 'stream':
return msg
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
0eeaf0f6b49dcc39139e596a2237a31646b0b5ee | Add missing import, remove star import | spz-signup/spz-signup | src/spz/test/test_views.py | src/spz/test/test_views.py | # -*- coding: utf-8 -*-
"""Tests the application views.
"""
from . import login, logout, get_text
from test.fixtures import client, user, superuser
from spz import app
from spz.models import Course, Origin, Degree, Graduation
def test_startpage(client):
response = client.get('/')
response_text = get_text(response)
assert 'Anmeldung' in response_text
assert 'Kurswahl' in response_text
assert 'Persönliche Angaben' in response_text
assert 'Absenden' in response_text
def test_login(client, user, superuser):
response = login(client, user)
response_text = get_text(response)
logout(client)
assert 'Angemeldet als {} ()'.format(user[0]) in response_text
response = login(client, superuser)
response_text = get_text(response)
logout(client)
assert 'Angemeldet als {} (SUPERUSER)'.format(superuser[0]) in response_text
response = login(client, (user[0], 'definately-wrong-password'))
response_text = get_text(response)
logout(client)
assert 'Du kommst hier net rein!' in response_text
response = login(client, ('definately-wrong-username', user[1]))
response_text = get_text(response)
logout(client)
assert 'Du kommst hier net rein!' in response_text
def test_signup(client, superuser):
with app.app_context(): # lazy load (as used in course.full_name()) will fail otherwise
course = Course.query.first()
origin = Origin.query.first()
degree = Degree.query.first()
graduation = Graduation.query.first()
course_name = course.full_name()
name = ('Mika', 'Müller')
tag = '123456'
phone = '01521 1234567'
mail = 'mika.mueller@beispiel.de'
semester = 1
data = dict(
course=course.id,
first_name=name[0],
last_name=name[1],
phone=phone,
mail=mail,
confirm_mail=mail,
origin=origin.id)
if origin.validate_registration:
data = dict(
data,
tag=tag,
degree=degree.id,
semester=semester,
graduation=graduation.id)
login(client, superuser) # login to override time-delta restrictions
response = client.post('/', data=data)
response_text = get_text(response)
logout(client)
assert '{} {} – Sie haben sich für den Kurs {} beworben.'.format(name[0], name[1], course_name) in response_text
| # -*- coding: utf-8 -*-
"""Tests the application views.
"""
from test.fixtures import * # noqa
from . import login, logout, get_text
from spz.models import Course, Origin, Degree, Graduation
def test_startpage(client):
response = client.get('/')
response_text = get_text(response)
assert 'Anmeldung' in response_text
assert 'Kurswahl' in response_text
assert 'Persönliche Angaben' in response_text
assert 'Absenden' in response_text
def test_login(client, user, superuser):
response = login(client, user)
response_text = get_text(response)
logout(client)
assert 'Angemeldet als {} ()'.format(user[0]) in response_text
response = login(client, superuser)
response_text = get_text(response)
logout(client)
assert 'Angemeldet als {} (SUPERUSER)'.format(superuser[0]) in response_text
response = login(client, (user[0], 'definately-wrong-password'))
response_text = get_text(response)
logout(client)
assert 'Du kommst hier net rein!' in response_text
response = login(client, ('definately-wrong-username', user[1]))
response_text = get_text(response)
logout(client)
assert 'Du kommst hier net rein!' in response_text
def test_signup(client, superuser):
with app.app_context(): # lazy load (as used in course.full_name()) will fail otherwise
course = Course.query.first()
origin = Origin.query.first()
degree = Degree.query.first()
graduation = Graduation.query.first()
course_name = course.full_name()
name = ('Mika', 'Müller')
tag = '123456'
phone = '01521 1234567'
mail = 'mika.mueller@beispiel.de'
semester = 1
data = dict(
course=course.id,
first_name=name[0],
last_name=name[1],
phone=phone,
mail=mail,
confirm_mail=mail,
origin=origin.id)
if origin.validate_registration:
data = dict(
data,
tag=tag,
degree=degree.id,
semester=semester,
graduation=graduation.id)
login(client, superuser) # login to override time-delta restrictions
response = client.post('/', data=data)
response_text = get_text(response)
logout(client)
assert '{} {} – Sie haben sich für den Kurs {} beworben.'.format(name[0], name[1], course_name) in response_text
| mit | Python |
75138535e2975abfdf7ce39f9063a69ca6db51ad | change ellipsis to pass | PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild,PolyJIT/benchbuild | benchbuild/environments/service_layer/ensure.py | benchbuild/environments/service_layer/ensure.py | import sys
from . import unit_of_work
if sys.version_info <= (3, 8):
from typing_extensions import Protocol
else:
from typing import Protocol
class ImageNotFound(Exception):
pass
class NamedCommand(Protocol):
@property
def name(self) -> str:
...
def image_exists(
cmd: NamedCommand, uow: unit_of_work.AbstractUnitOfWork
) -> None:
image = uow.registry.get_image(cmd.name)
if not image:
raise ImageNotFound(cmd)
| import sys
from . import unit_of_work
if sys.version_info <= (3, 8):
from typing_extensions import Protocol
else:
from typing import Protocol
class ImageNotFound(Exception):
...
class NamedCommand(Protocol):
@property
def name(self) -> str:
...
def image_exists(
cmd: NamedCommand, uow: unit_of_work.AbstractUnitOfWork
) -> None:
image = uow.registry.get_image(cmd.name)
if not image:
raise ImageNotFound(cmd)
| mit | Python |
c3605b7d7c5076478764c536992d3dbcf26ac07d | Add @api.one warning | topecz/Odoo_Samples,Yenthe666/Odoo_Samples,topecz/Odoo_Samples,Yenthe666/Odoo_Samples,topecz/Odoo_Samples,Yenthe666/Odoo_Samples | button_action_demo/models/button_action_demo.py | button_action_demo/models/button_action_demo.py | # -*- coding: utf-8 -*-
from openerp import models, fields, api
#Non-odoo library
import random
from random import randint
import string
class button_action_demo(models.Model):
_name = 'button.demo'
name = fields.Char(required=True,default='Click on generate name!')
password = fields.Char()
# WARNING! @api.one is deprecated in Odoo 9, use @api.multi with ensure_one instead!
@api.one
def generate_record_name(self):
#Generates a random name between 9 and 15 characters long and writes it to the record.
self.write({'name': ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(randint(9,15)))})
# WARNING! @api.one is deprecated in Odoo 9, use @api.multi with ensure_one instead!
@api.one
def generate_record_password(self):
#Generates a random password between 12 and 15 characters long and writes it to the record.
self.write({'password': ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(randint(12,15)))})
# WARNING! @api.one is deprecated in Odoo 9, use @api.multi with ensure_one instead!
@api.one
def clear_record_data(self):
self.write({
'name': '',
'password': ''
})
| # -*- coding: utf-8 -*-
from openerp import models, fields, api
#Non-odoo library
import random
from random import randint
import string
class button_action_demo(models.Model):
_name = 'button.demo'
name = fields.Char(required=True,default='Click on generate name!')
password = fields.Char()
@api.one
def generate_record_name(self):
#Generates a random name between 9 and 15 characters long and writes it to the record.
self.write({'name': ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(randint(9,15)))})
@api.one
def generate_record_password(self):
#Generates a random password between 12 and 15 characters long and writes it to the record.
self.write({'password': ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(randint(12,15)))})
@api.one
def clear_record_data(self):
self.write({
'name': '',
'password': ''
})
| agpl-3.0 | Python |
a68f10eeff40591487ede5ac27c9dca488b1e494 | Implement length parameter | AmosGarner/PyInventory | main.py | main.py | from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--type', dest='collectionType', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
parser.add_argument('--length', dest='length', required=True)
return parser.parse_args()
def generateCollection(collectionType, collectionName, username, length):
items = []
now = datetime.datetime.now()
if collectionType.lower() == 'item':
for i in range(0,length):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
items.append(item)
return Collection(collectionName, username, items)
def main():
arguments = generateArgumentsFromParser()
createCollection(arguments.username, arguments.collectionName)
itemCollection = generateCollection('item', arguments.collectionName, arguments.username, int(arguments.length))
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+arguments.collectionName+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
| from createCollection import createCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
import datetime, json, os.path, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--user', dest='username', required=True)
parser.add_argument('--type', dest='collectionType', required=True)
parser.add_argument('--name', dest='collectionName', required=True)
return parser.parse_args()
def generateCollection(collectionType, collectionName, username, length):
items = []
now = datetime.datetime.now()
if collectionType.lower() == 'item':
for i in range(0,length):
item = ItemFactory.factory('item', [i, 'item' + str(i), now, now])
items.append(item)
return Collection(collectionName, username, items)
def main():
arguments = generateArgumentsFromParser()
createCollection(arguments.username, arguments.collectionName)
itemCollection = generateCollection('item', arguments.collectionName, arguments.username, 10)
collectionsFilePath = CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+CONST_COLLECTIONS_NAME+'/'+arguments.username+'_'+arguments.collectionName+'_'+'collection.dat'
if os.path.isfile(collectionsFilePath):
collectionFile = open(collectionsFilePath, 'w')
collectionFile.write(itemCollection.toJSON())
collectionFile.close()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
91294ee9601a0aae89a2d45123138e1db69ad289 | remove the magic xpos numbers from shooter setup, and simplify | edunham/engr421 | main.py | main.py | #! /usr/bin/env python
import cv2
import sys
from shooters import Shooter
from arduino import Arduino, FakeArduino
from camera import Camera
def choose_center(centers):
if centers == []:
return centers
nearesty = sorted(centers, key = lambda pair: pair[0])
# game tactics logic goes here
return nearesty[0]
def tactical_shoot(shooters, centers):
# BB-conservation logic goes here
target = choose_center(centers)
if target:
for s in shooters:
if s.can_hit(target):
s.shoot(target)
else:
s.aim(target)
def setup_shooters(board, offset_in = 3, field = [22.3125, 45], dpi = 17):
offset = offset_in * dpi
wpx = field[0] * dpi
sixth = int(wpx / 6)
# left shooter centered in first third of board
left_shooter = Shooter(offset, sixth, dpi, board, "left")
# center shooter centered
center_shooter = Shooter(offset, int(wpx / 2), dpi, board, "center")
# right shooter centered in rightmost third of board
right_shooter = Shooter(offset, (wpx - sixth), dpi, board, "right")
shooterlist = [left_shooter, center_shooter, right_shooter]
return shooterlist
def main(args):
if "fake" in args:
board = FakeArduino()
else:
board = Arduino()
cam = Camera()
cam.calibrate()
#cam.adj_thresh(2, 50)
shooterlist = setup_shooters(board, cam.board_size, cam.dpi)
while True:
targets = cam.get_targets()
tactical_shoot(shooterlist, targets)
aims = [s.get_aim_line() for s in shooterlist]
cam.display(aims)
if (cv2.waitKey(2) >= 0):
break
cam.cleanup()
if __name__ == "__main__":
main(sys.argv)
| #! /usr/bin/env python
import cv2
import sys
from shooters import Shooter
from arduino import Arduino, FakeArduino
from camera import Camera
def choose_center(centers):
if centers == []:
return centers
nearesty = sorted(centers, key = lambda pair: pair[0])
# game tactics logic goes here
return nearesty[0]
def tactical_shoot(shooters, centers):
# BB-conservation logic goes here
target = choose_center(centers)
if target:
for s in shooters:
if s.can_hit(target):
s.shoot(target)
else:
s.aim(target)
def main(args):
if "fake" in args:
board = FakeArduino()
else:
board = Arduino()
left_shooter = Shooter(3, 8, 17, board, "left")
center_shooter = Shooter(3, 12, 17, board, "center")
right_shooter = Shooter(3, 16, 17, board, "right")
cam = Camera()
shooterlist = [left_shooter, center_shooter, right_shooter]
cam.calibrate()
#cam.adj_thresh(2, 50)
while True:
targets = cam.get_targets()
tactical_shoot(shooterlist, targets)
aims = [s.get_aim_line() for s in shooterlist]
cam.display(aims)
if (cv2.waitKey(2) >= 0):
break
cam.cleanup()
if __name__ == "__main__":
main(sys.argv)
| mit | Python |
2e39d0c864680793b42234d2c686cae44ed7727c | Add some missing imports | datamade/yournextmp-popit,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative | elections/mixins.py | elections/mixins.py | from django.conf import settings
from django.http import Http404
from django.utils.translation import ugettext as _
class ElectionMixin(object):
'''A mixin to add election data from the URL to the context'''
def dispatch(self, request, *args, **kwargs):
self.election = election = self.kwargs['election']
if election not in settings.ELECTIONS:
raise Http404(_("Unknown election: '{election}'").format(election=election))
self.election_data = settings.ELECTIONS[election]
return super(ElectionMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ElectionMixin, self).get_context_data(**kwargs)
context['election'] = self.election
context['election_data'] = self.election_data
return context
| from django.conf import settings
class ElectionMixin(object):
'''A mixin to add election data from the URL to the context'''
def dispatch(self, request, *args, **kwargs):
self.election = election = self.kwargs['election']
if election not in settings.ELECTIONS:
raise Http404(_("Unknown election: '{election}'").format(election=election))
self.election_data = settings.ELECTIONS[election]
return super(ElectionMixin, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ElectionMixin, self).get_context_data(**kwargs)
context['election'] = self.election
context['election_data'] = self.election_data
return context
| agpl-3.0 | Python |
8124c8d9582e743cb52707a5e097198794f00cd0 | fix license notice | cs-shadow/phabricator-tools,aevri/phabricator-tools,kjedruczyk/phabricator-tools,bloomberg/phabricator-tools,cs-shadow/phabricator-tools,aevri/phabricator-tools,kjedruczyk/phabricator-tools,kjedruczyk/phabricator-tools,aevri/phabricator-tools,cs-shadow/phabricator-tools,kjedruczyk/phabricator-tools,cs-shadow/phabricator-tools,bloomberg/phabricator-tools,cs-shadow/phabricator-tools,bloomberg/phabricator-tools,bloomberg/phabricator-tools,kjedruczyk/phabricator-tools,aevri/phabricator-tools,aevri/phabricator-tools,bloomberg/phabricator-tools | testbed/threading/thread-subprocess-test.py | testbed/threading/thread-subprocess-test.py | """Test that the 'subprocess' releases the GIL, allowing threading.
If the GIL was not released for the duration of the calls to 'sleep' then we'd
expect the running time to be over 9 seconds. In practice it's closer to the
ideal of 3 in the single-processor Lubuntu 13.04 VM tested on.
With this result we can see that there's a benefit to using threading in
conjuction with subprocess, if we're spending a lot of time in subprocess.
"""
import datetime
import subprocess
import sys
import threading
def sleep_work(lock):
with lock:
print("{}: starting sleep".format(threading.current_thread().name))
subprocess.check_call(["sleep", "3"])
with lock:
print("{}: finished sleep".format(threading.current_thread().name))
def main():
start = datetime.datetime.now()
lock = threading.Lock()
threads = []
for i in xrange(0, 3):
t = threading.Thread(
args=[lock],
target=sleep_work,
name=str(i))
threads.append(t)
t.start()
for t in threads:
t.join()
end = datetime.datetime.now()
duration = end - start
print("took {} secs".format(duration))
if __name__ == "__main__":
sys.exit(main())
# -----------------------------------------------------------------------------
# Copyright (C) 2014 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
| """Test that the 'subprocess' releases the GIL, allowing threading.
If the GIL was not released for the duration of the calls to 'sleep' then we'd
expect the running time to be over 9 seconds. In practice it's closer to the
ideal of 3 in the single-processor Lubuntu 13.04 VM tested on.
With this result we can see that there's a benefit to using threading in
conjuction with subprocess, if we're spending a lot of time in subprocess.
"""
import datetime
import subprocess
import sys
import threading
def sleep_work(lock):
with lock:
print("{}: starting sleep".format(threading.current_thread().name))
subprocess.check_call(["sleep", "3"])
with lock:
print("{}: finished sleep".format(threading.current_thread().name))
def main():
start = datetime.datetime.now()
lock = threading.Lock()
threads = []
for i in xrange(0, 3):
t = threading.Thread(
args=[lock],
target=sleep_work,
name=str(i))
threads.append(t)
t.start()
for t in threads:
t.join()
end = datetime.datetime.now()
duration = end - start
print("took {} secs".format(duration))
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | Python |
fa14c040e6483087f5b2c78bc1a7aeee9ad2274a | Add time formatter for competitions | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org | Instanssi/kompomaatti/misc/time_formatting.py | Instanssi/kompomaatti/misc/time_formatting.py | # -*- coding: utf-8 -*-
import awesometime
def compo_times_formatter(compo):
compo.compo_time = awesometime.format_single(compo.compo_start)
compo.adding_time = awesometime.format_single(compo.adding_end)
compo.editing_time = awesometime.format_single(compo.editing_end)
compo.voting_time = awesometime.format_between(compo.voting_start, compo.voting_end)
return compo
def competition_times_formatter(competition):
competition.start_time = awesometime.format_single(competition.start)
competition.participation_end_time = awesometime.format_single(competition.participation_end)
return competition
| # -*- coding: utf-8 -*-
import awesometime
def compo_times_formatter(compo):
compo.compo_time = awesometime.format_single(compo.compo_start)
compo.adding_time = awesometime.format_single(compo.adding_end)
compo.editing_time = awesometime.format_single(compo.editing_end)
compo.voting_time = awesometime.format_between(compo.voting_start, compo.voting_end)
return compo
| mit | Python |
607c48b426c934dd56724e1966497d0742eb0a05 | Update BinomialProbability_Calculator.py | StevenPeutz/myDataProjects | PYTHON/BinomialProbability_Calculator.py | PYTHON/BinomialProbability_Calculator.py |
#This Python script calculates the probability for n choose k in a binomial probability distribution.
#Functions are described in the docstrings.
#!/usr/bin/env python3
print('-------------------')
print("This will calculate the probability for n choose k in a binomial probability distribution.")
p = float(input('Choose the probability for success (1 success for 1 trial, on a 0 to 1 scale): '))
k = float(input('Choose the number of successes: '))
n = int(input('Choose the number of trials: '))
print("---")
print("This creates '{} choose {}' with p of {}.".format(n,k,p))
print("I will now return the combined probability using the 'n choose k' binomial coefficient formula (n!/((n-k)!*k!)) together with the base probability (p**k * (1-p)**(n-k)):")
def __factorial__(n):
"""
The factorial function takes input n and return the factorial.
Takes n (int) as input and returns the factorial of n. This function is built in using a for loop to circumvent requiring the math library
Examples:
__factorial__(3) returns 6, because 3*2*1 is 6
__factorial__(5) returns 120, because 5*4*3*2*1 is 120
"""
fact = 1
for i in range(1, int(n)+1):
fact *= i
return fact
fact = __factorial__(n)
def __calculate_prob__(p,k,n):
prob_total = p**k * (1-p)**(n-k)
return prob_total
print("base probablity: ", __calculate_prob__(p,k,n))
prob_total = __calculate_prob__(p,k,n)
# using math library for factorial instead:
#def __calculate_binom_coeff__(n,k):
# binom_coeff = math.factorial(n) / ( math.factorial((n-k)) * math.factorial(k) )
# return binom_coeff
#print("binomial coefficent: ", __calculate_binom_coeff__(n,k))
#binom_coeff = __calculate_binom_coeff__(n,k)
def __calculate_binom_coeff__(n,k):
binom_coeff = fact / (__factorial__((n-k)) * __factorial__(k) )
return binom_coeff
print("binomial coefficent: ", __calculate_binom_coeff__(n,k))
binom_coeff = __calculate_binom_coeff__(n,k)
def calculate_answer(prob_total, binom_coeff):
"""
The calculate_answer() function takes the two inputs prob_total and binom_coeff
1) The prob_total input takes the returned value from the __calculate_prob__(p,k,n)function. This is the
probability calculated as p**k * (1-p)**(n-k)
2) The binom_coeff input takes the returned value from the __binom_coeff__(n,k)function. This uses a combinatrics formula to
calculate the binomial coefficient (n Choose k). The formula is n!/(k!(n-k)!).
Multiplying these two inputs together results in the binomial probability.
"""
answer = prob_total * binom_coeff
return answer
answer = calculate_answer(prob_total, binom_coeff)
answer2 = calculate_answer(prob_total, binom_coeff) * 100
print("The probability is: {:.2f} (or {:.0f}%)".format(answer, answer2))
print("-----------")
|
#This Python script calculates the probability for n choose k in a binomial probability distribution.
#Functions are described in the docstrings.
print('-------------------')
print("This will calculate the probability for n choose k in a binomial probability distribution.")
p = float(input('Choose the probability for success (1 success for 1 trial, on a 0 to 1 scale): '))
k = float(input('Choose the number of successes: '))
n = int(input('Choose the number of trials: '))
print("---")
print("This creates '{} choose {}' with p of {}.".format(n,k,p))
print("I will now return the combined probability using the 'n choose k' binomial coefficient formula (n!/((n-k)!*k!)) together with the base probability (p**k * (1-p)**(n-k)):")
def __factorial__(n):
"""
The factorial function takes input n and return the factorial.
Takes n (int) as input and returns the factorial of n. This function is built in using a for loop to circumvent requiring the math library
Examples:
__factorial__(3) returns 6, because 3*2*1 is 6
__factorial__(5) returns 120, because 5*4*3*2*1 is 120
"""
fact = 1
for i in range(1, int(n)+1):
fact *= i
return fact
fact = __factorial__(n)
def __calculate_prob__(p,k,n):
prob_total = p**k * (1-p)**(n-k)
return prob_total
print("base probablity: ", __calculate_prob__(p,k,n))
prob_total = __calculate_prob__(p,k,n)
# using math library for factorial instead:
#def __calculate_binom_coeff__(n,k):
# binom_coeff = math.factorial(n) / ( math.factorial((n-k)) * math.factorial(k) )
# return binom_coeff
#print("binomial coefficent: ", __calculate_binom_coeff__(n,k))
#binom_coeff = __calculate_binom_coeff__(n,k)
def __calculate_binom_coeff__(n,k):
binom_coeff = fact / (__factorial__((n-k)) * __factorial__(k) )
return binom_coeff
print("binomial coefficent: ", __calculate_binom_coeff__(n,k))
binom_coeff = __calculate_binom_coeff__(n,k)
def calculate_answer(prob_total, binom_coeff):
"""
The calculate_answer() function takes the two inputs prob_total and binom_coeff
1) The prob_total input takes the returned value from the __calculate_prob__(p,k,n)function. This is the
probability calculated as p**k * (1-p)**(n-k)
2) The binom_coeff input takes the returned value from the __binom_coeff__(n,k)function. This uses a combinatrics formula to
calculate the binomial coefficient (n Choose k). The formula is n!/(k!(n-k)!).
Multiplying these two inputs together results in the binomial probability.
"""
answer = prob_total * binom_coeff
return answer
answer = calculate_answer(prob_total, binom_coeff)
answer2 = calculate_answer(prob_total, binom_coeff) * 100
print("The probability is: {:.2f} (or {:.0f}%)".format(answer, answer2))
print("-----------")
| cc0-1.0 | Python |
98f2b72768f005ca83af2d8cea02a542c60cbfcb | Bump version -> v0.0.3 | alphagov/estools | estools/__init__.py | estools/__init__.py | __version__ = '0.0.3'
| __version__ = '0.0.2'
| mit | Python |
5b0c092ab39087724d11960c76e9d2fb62888e1d | append str, not Path, to sys.path | Debian/debsources,Debian/debsources,Debian/debsources,Debian/debsources,Debian/debsources | etc/debsources.wsgi | etc/debsources.wsgi | # WSGI Python file to bridge apache2 / Flask application
import sys
from pathlib import Path
DEBSOURCES_LIB = Path(__file__).resolve().parent.parent / "lib"
sys.path.append(str(DEBSOURCES_LIB))
from debsources.app import app_wrapper
app_wrapper.go()
application = app_wrapper.app
| # WSGI Python file to bridge apache2 / Flask application
import sys
from pathlib import Path
DEBSOURCES_LIB = Path(__file__).resolve().parent.parent / "lib"
sys.path.append(DEBSOURCES_LIB)
from debsources.app import app_wrapper
app_wrapper.go()
application = app_wrapper.app
| agpl-3.0 | Python |
a171bbe0cd36f23d994870453a6e35d9cf56a6fe | Fix to Hyperprecossor Test | keras-team/autokeras,keras-team/autokeras,keras-team/autokeras | tests/autokeras/hyper_preprocessors_test.py | tests/autokeras/hyper_preprocessors_test.py | # Copyright 2020 The AutoKeras Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import tensorflow as tf
from autokeras import hyper_preprocessors
from autokeras import preprocessors
def test_serialize_and_deserialize_default_hpps():
preprocessor = preprocessors.AddOneDimension()
hyper_preprocessor = hyper_preprocessors.DefaultHyperPreprocessor(preprocessor)
hyper_preprocessor = hyper_preprocessors.deserialize(
hyper_preprocessors.serialize(hyper_preprocessor)
)
assert isinstance(hyper_preprocessor.preprocessor, preprocessors.AddOneDimension)
def test_serialize_and_deserialize_default_hpps_categorical():
x_train = np.array([["a", "ab", 2.1], ["b", "bc", 1.0], ["a", "bc", "nan"]])
preprocessor = preprocessors.CategoricalToNumericalPreprocessor(
column_names=["column_a", "column_b", "column_c"],
column_types={
"column_a": "categorical",
"column_b": "categorical",
"column_c": "numerical",
},
)
hyper_preprocessor = hyper_preprocessors.DefaultHyperPreprocessor(preprocessor)
dataset = tf.data.Dataset.from_tensor_slices(x_train).batch(32)
hyper_preprocessor.preprocessor.fit(
tf.data.Dataset.from_tensor_slices(x_train).batch(32)
)
hyper_preprocessor = hyper_preprocessors.deserialize(
hyper_preprocessors.serialize(hyper_preprocessor)
)
assert isinstance(
hyper_preprocessor.preprocessor,
preprocessors.CategoricalToNumericalPreprocessor,
)
result = hyper_preprocessor.preprocessor.transform(dataset)
assert result[0][0] == result[2][0]
assert result[0][0] != result[1][0]
assert result[0][1] != result[1][1]
assert result[0][1] != result[2][1]
assert result[2][2] == 0
assert result.dtype == tf.float32
| # Copyright 2020 The AutoKeras Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import tensorflow as tf
from autokeras import hyper_preprocessors
from autokeras import preprocessors
def test_serialize_and_deserialize_default_hpps():
preprocessor = preprocessors.AddOneDimension()
hyper_preprocessor = hyper_preprocessors.DefaultHyperPreprocessor(preprocessor)
hyper_preprocessor = hyper_preprocessors.deserialize(
hyper_preprocessors.serialize(hyper_preprocessor)
)
assert isinstance(hyper_preprocessor.preprocessor, preprocessors.AddOneDimension)
def test_serialize_and_deserialize_default_hpps_categorical():
x_train = np.array([["a", "ab", 2.1], ["b", "bc", 1.0], ["a", "bc", "nan"]])
preprocessor = preprocessors.CategoricalToNumericalPreprocessor(
column_names=["column_a", "column_b", "column_c"],
column_types={
"column_a": "categorical",
"column_b": "categorical",
"column_c": "numerical",
},
)
hyper_preprocessor = hyper_preprocessors.DefaultHyperPreprocessor(preprocessor)
dataset = tf.data.Dataset.from_tensor_slices(x_train).batch(32)
hyper_preprocessor.preprocessor.fit(
tf.data.Dataset.from_tensor_slices(x_train).batch(32)
)
hyper_preprocessor = hyper_preprocessors.deserialize(
hyper_preprocessors.serialize(hyper_preprocessor)
)
assert isinstance(
hyper_preprocessor.preprocessor, preprocessors.CategoricalToNumerical
)
result = hyper_preprocessor.preprocessor.transform(dataset)
assert result[0][0] == result[2][0]
assert result[0][0] != result[1][0]
assert result[0][1] != result[1][1]
assert result[0][1] != result[2][1]
assert result[2][2] == 0
assert result.dtype == tf.float32
| apache-2.0 | Python |
26e7dc786d58dad94e9555fb20e6992c29533f0e | add pre-twisted-import stub to spyne.Address for _address_from_twisted_address | arskom/spyne,arskom/spyne,arskom/spyne | spyne/_base.py | spyne/_base.py |
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logger = logging.getLogger('spyne')
from collections import namedtuple
# When spyne.server.twisted gets imported, this type gets a static method named
# `from_twisted_address`. Dark magic.
Address = namedtuple("Address", ["type", "host", "port"])
class _add_address_types():
Address.TCP4 = 'TCP4'
Address.TCP6 = 'TCP6'
Address.UDP4 = 'UDP4'
Address.UDP6 = 'UDP6'
def address_str(self):
return ":".join((self.type, self.host, str(self.port)))
Address.__str__ = address_str
# this gets overwritten once spyne.server.twisted is imported
@staticmethod
def _fta(*a, **kw):
from spyne.server.twisted._base import _address_from_twisted_address
return _address_from_twisted_address(*a, **kw)
Address.from_twisted_address = _fta
|
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import logging
logger = logging.getLogger('spyne')
from collections import namedtuple
# When spyne.server.twisted gets imported, this type gets a static method named
# `from_twisted_address`. Dark magic.
Address = namedtuple("Address", ["type", "host", "port"])
class _add_address_types():
Address.TCP4 = 'TCP4'
Address.TCP6 = 'TCP6'
Address.UDP4 = 'UDP4'
Address.UDP6 = 'UDP6'
def address_str(self):
return ":".join((self.type, self.host, str(self.port)))
Address.__str__ = address_str
| lgpl-2.1 | Python |
23fbbb1e164360287b775ab33da321a29136b2a4 | Drop `six` module from coverage. | ebsaral/django-rest-framework,ezheidtmann/django-rest-framework,vstoykov/django-rest-framework,cheif/django-rest-framework,potpath/django-rest-framework,alacritythief/django-rest-framework,nryoung/django-rest-framework,akalipetis/django-rest-framework,hnakamur/django-rest-framework,davesque/django-rest-framework,rhblind/django-rest-framework,ajaali/django-rest-framework,krinart/django-rest-framework,jness/django-rest-framework,vstoykov/django-rest-framework,zeldalink0515/django-rest-framework,wwj718/django-rest-framework,kezabelle/django-rest-framework,nhorelik/django-rest-framework,nhorelik/django-rest-framework,iheitlager/django-rest-framework,bluedazzle/django-rest-framework,aericson/django-rest-framework,wangpanjun/django-rest-framework,justanr/django-rest-framework,edx/django-rest-framework,lubomir/django-rest-framework,jerryhebert/django-rest-framework,leeahoward/django-rest-framework,sbellem/django-rest-framework,iheitlager/django-rest-framework,dmwyatt/django-rest-framework,douwevandermeij/django-rest-framework,paolopaolopaolo/django-rest-framework,hnakamur/django-rest-framework,jtiai/django-rest-framework,delinhabit/django-rest-framework,mgaitan/django-rest-framework,damycra/django-rest-framework,ezheidtmann/django-rest-framework,adambain-vokal/django-rest-framework,MJafarMashhadi/django-rest-framework,hnarayanan/django-rest-framework,hnarayanan/django-rest-framework,jpulec/django-rest-framework,rhblind/django-rest-framework,VishvajitP/django-rest-framework,rubendura/django-rest-framework,leeahoward/django-rest-framework,lubomir/django-rest-framework,maryokhin/django-rest-framework,justanr/django-rest-framework,gregmuellegger/django-rest-framework,rafaelcaricio/django-rest-framework,mgaitan/django-rest-framework,ticosax/django-rest-framework,ticosax/django-rest-framework,aericson/django-rest-framework,rafaelang/django-rest-framework,krinart/django-rest-framework,buptlsl/django-rest-framework,tomchristie/django-rest-framework,uruz/django-rest-framework,douwevandermeij/django-rest-framework,canassa/django-rest-framework,brandoncazander/django-rest-framework,thedrow/django-rest-framework-1,AlexandreProenca/django-rest-framework,cyberj/django-rest-framework,thedrow/django-rest-framework-1,callorico/django-rest-framework,atombrella/django-rest-framework,abdulhaq-e/django-rest-framework,cheif/django-rest-framework,d0ugal/django-rest-framework,rubendura/django-rest-framework,cyberj/django-rest-framework,pombredanne/django-rest-framework,qsorix/django-rest-framework,HireAnEsquire/django-rest-framework,kgeorgy/django-rest-framework,zeldalink0515/django-rest-framework,hnarayanan/django-rest-framework,andriy-s/django-rest-framework,VishvajitP/django-rest-framework,wzbozon/django-rest-framework,adambain-vokal/django-rest-framework,simudream/django-rest-framework,lubomir/django-rest-framework,arpheno/django-rest-framework,HireAnEsquire/django-rest-framework,tigeraniya/django-rest-framework,johnraz/django-rest-framework,brandoncazander/django-rest-framework,abdulhaq-e/django-rest-framework,ezheidtmann/django-rest-framework,sheppard/django-rest-framework,tcroiset/django-rest-framework,ashishfinoit/django-rest-framework,callorico/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,waytai/django-rest-framework,YBJAY00000/django-rest-framework,raphaelmerx/django-rest-framework,andriy-s/django-rest-framework,werthen/django-rest-framework,gregmuellegger/django-rest-framework,jpulec/django-rest-framework,bluedazzle/django-rest-framework,HireAnEsquire/django-rest-framework,simudream/django-rest-framework,paolopaolopaolo/django-rest-framework,nryoung/django-rest-framework,jpulec/django-rest-framework,pombredanne/django-rest-framework,akalipetis/django-rest-framework,leeahoward/django-rest-framework,gregmuellegger/django-rest-framework,buptlsl/django-rest-framework,aericson/django-rest-framework,andriy-s/django-rest-framework,sehmaschine/django-rest-framework,elim/django-rest-framework,agconti/django-rest-framework,pombredanne/django-rest-framework,ashishfinoit/django-rest-framework,linovia/django-rest-framework,antonyc/django-rest-framework,sbellem/django-rest-framework,uruz/django-rest-framework,potpath/django-rest-framework,edx/django-rest-framework,tomchristie/django-rest-framework,sheppard/django-rest-framework,wzbozon/django-rest-framework,canassa/django-rest-framework,alacritythief/django-rest-framework,xiaotangyuan/django-rest-framework,atombrella/django-rest-framework,thedrow/django-rest-framework-1,AlexandreProenca/django-rest-framework,wedaly/django-rest-framework,fishky/django-rest-framework,jerryhebert/django-rest-framework,linovia/django-rest-framework,rafaelang/django-rest-framework,cheif/django-rest-framework,antonyc/django-rest-framework,kylefox/django-rest-framework,agconti/django-rest-framework,rafaelang/django-rest-framework,edx/django-rest-framework,ticosax/django-rest-framework,hunter007/django-rest-framework,maryokhin/django-rest-framework,kezabelle/django-rest-framework,delinhabit/django-rest-framework,wedaly/django-rest-framework,rafaelcaricio/django-rest-framework,dmwyatt/django-rest-framework,buptlsl/django-rest-framework,yiyocx/django-rest-framework,tomchristie/django-rest-framework,ambivalentno/django-rest-framework,paolopaolopaolo/django-rest-framework,fishky/django-rest-framework,callorico/django-rest-framework,wwj718/django-rest-framework,kgeorgy/django-rest-framework,jerryhebert/django-rest-framework,abdulhaq-e/django-rest-framework,potpath/django-rest-framework,sbellem/django-rest-framework,iheitlager/django-rest-framework,johnraz/django-rest-framework,uploadcare/django-rest-framework,elim/django-rest-framework,adambain-vokal/django-rest-framework,d0ugal/django-rest-framework,tcroiset/django-rest-framework,rafaelcaricio/django-rest-framework,tigeraniya/django-rest-framework,nhorelik/django-rest-framework,raphaelmerx/django-rest-framework,maryokhin/django-rest-framework,yiyocx/django-rest-framework,James1345/django-rest-framework,tigeraniya/django-rest-framework,davesque/django-rest-framework,ajaali/django-rest-framework,agconti/django-rest-framework,jpadilla/django-rest-framework,douwevandermeij/django-rest-framework,dmwyatt/django-rest-framework,nryoung/django-rest-framework,jness/django-rest-framework,uploadcare/django-rest-framework,waytai/django-rest-framework,kylefox/django-rest-framework,antonyc/django-rest-framework,jtiai/django-rest-framework,damycra/django-rest-framework,wzbozon/django-rest-framework,kezabelle/django-rest-framework,ossanna16/django-rest-framework,krinart/django-rest-framework,mgaitan/django-rest-framework,jpadilla/django-rest-framework,d0ugal/django-rest-framework,kennydude/django-rest-framework,hnakamur/django-rest-framework,ambivalentno/django-rest-framework,sheppard/django-rest-framework,ossanna16/django-rest-framework,wangpanjun/django-rest-framework,rhblind/django-rest-framework,xiaotangyuan/django-rest-framework,hunter007/django-rest-framework,kylefox/django-rest-framework,sehmaschine/django-rest-framework,johnraz/django-rest-framework,James1345/django-rest-framework,justanr/django-rest-framework,arpheno/django-rest-framework,ajaali/django-rest-framework,alacritythief/django-rest-framework,wedaly/django-rest-framework,wangpanjun/django-rest-framework,xiaotangyuan/django-rest-framework,jpadilla/django-rest-framework,jness/django-rest-framework,wwj718/django-rest-framework,cyberj/django-rest-framework,raphaelmerx/django-rest-framework,delinhabit/django-rest-framework,ebsaral/django-rest-framework,linovia/django-rest-framework,kennydude/django-rest-framework,MJafarMashhadi/django-rest-framework,werthen/django-rest-framework,kgeorgy/django-rest-framework,damycra/django-rest-framework,vstoykov/django-rest-framework,simudream/django-rest-framework,brandoncazander/django-rest-framework,MJafarMashhadi/django-rest-framework,fishky/django-rest-framework,uruz/django-rest-framework,James1345/django-rest-framework,VishvajitP/django-rest-framework,kennydude/django-rest-framework,ebsaral/django-rest-framework,jtiai/django-rest-framework,ambivalentno/django-rest-framework,YBJAY00000/django-rest-framework,sehmaschine/django-rest-framework,hunter007/django-rest-framework,zeldalink0515/django-rest-framework,YBJAY00000/django-rest-framework,ossanna16/django-rest-framework,ashishfinoit/django-rest-framework,canassa/django-rest-framework,bluedazzle/django-rest-framework,akalipetis/django-rest-framework,qsorix/django-rest-framework,arpheno/django-rest-framework,elim/django-rest-framework,waytai/django-rest-framework,werthen/django-rest-framework,AlexandreProenca/django-rest-framework,yiyocx/django-rest-framework,qsorix/django-rest-framework,davesque/django-rest-framework,atombrella/django-rest-framework,tcroiset/django-rest-framework | rest_framework/runtests/runcoverage.py | rest_framework/runtests/runcoverage.py | #!/usr/bin/env python
"""
Useful tool to run the test suite for rest_framework and generate a coverage report.
"""
# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/
# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/
# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py
import os
import sys
# fix sys path so we don't need to setup PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
os.environ['DJANGO_SETTINGS_MODULE'] = 'rest_framework.runtests.settings'
from coverage import coverage
def main():
"""Run the tests for rest_framework and generate a coverage report."""
cov = coverage()
cov.erase()
cov.start()
from django.conf import settings
from django.test.utils import get_runner
TestRunner = get_runner(settings)
if hasattr(TestRunner, 'func_name'):
# Pre 1.2 test runners were just functions,
# and did not support the 'failfast' option.
import warnings
warnings.warn(
'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.',
DeprecationWarning
)
failures = TestRunner(['tests'])
else:
test_runner = TestRunner()
failures = test_runner.run_tests(['tests'])
cov.stop()
# Discover the list of all modules that we should test coverage for
import rest_framework
project_dir = os.path.dirname(rest_framework.__file__)
cov_files = []
for (path, dirs, files) in os.walk(project_dir):
# Drop tests and runtests directories from the test coverage report
if os.path.basename(path) in ['tests', 'runtests', 'migrations']:
continue
# Drop the compat and six modules from coverage, since we're not interested in the coverage
# of modules which are specifically for resolving environment dependant imports.
# (Because we'll end up getting different coverage reports for it for each environment)
if 'compat.py' in files:
files.remove('compat.py')
if 'six.py' in files:
files.remove('six.py')
# Same applies to template tags module.
# This module has to include branching on Django versions,
# so it's never possible for it to have full coverage.
if 'rest_framework.py' in files:
files.remove('rest_framework.py')
cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')])
cov.report(cov_files)
if '--html' in sys.argv:
cov.html_report(cov_files, directory='coverage')
sys.exit(failures)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Useful tool to run the test suite for rest_framework and generate a coverage report.
"""
# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/
# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/
# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py
import os
import sys
# fix sys path so we don't need to setup PYTHONPATH
sys.path.append(os.path.join(os.path.dirname(__file__), "../.."))
os.environ['DJANGO_SETTINGS_MODULE'] = 'rest_framework.runtests.settings'
from coverage import coverage
def main():
"""Run the tests for rest_framework and generate a coverage report."""
cov = coverage()
cov.erase()
cov.start()
from django.conf import settings
from django.test.utils import get_runner
TestRunner = get_runner(settings)
if hasattr(TestRunner, 'func_name'):
# Pre 1.2 test runners were just functions,
# and did not support the 'failfast' option.
import warnings
warnings.warn(
'Function-based test runners are deprecated. Test runners should be classes with a run_tests() method.',
DeprecationWarning
)
failures = TestRunner(['tests'])
else:
test_runner = TestRunner()
failures = test_runner.run_tests(['tests'])
cov.stop()
# Discover the list of all modules that we should test coverage for
import rest_framework
project_dir = os.path.dirname(rest_framework.__file__)
cov_files = []
for (path, dirs, files) in os.walk(project_dir):
# Drop tests and runtests directories from the test coverage report
if os.path.basename(path) in ['tests', 'runtests', 'migrations']:
continue
# Drop the compat module from coverage, since we're not interested in the coverage
# of a module which is specifically for resolving environment dependant imports.
# (Because we'll end up getting different coverage reports for it for each environment)
if 'compat.py' in files:
files.remove('compat.py')
# Same applies to template tags module.
# This module has to include branching on Django versions,
# so it's never possible for it to have full coverage.
if 'rest_framework.py' in files:
files.remove('rest_framework.py')
cov_files.extend([os.path.join(path, file) for file in files if file.endswith('.py')])
cov.report(cov_files)
if '--html' in sys.argv:
cov.html_report(cov_files, directory='coverage')
sys.exit(failures)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
37b154952245a83c0645d0db180f16fe7dc1f29b | copy babel.messages.plural doctests as unit tests | felixonmars/babel,lepistone/babel,nickretallack/babel,skybon/babel,julen/babel,iamshubh22/babel,srisankethu/babel,masklinn/babel,st4lk/babel,moreati/babel,jespino/babel,iamshubh22/babel,nickretallack/babel,skybon/babel,xlevus/babel,st4lk/babel,gutsy/babel,jmagnusson/babel,yoloseem/babel,mgax/babel,xlevus/babel,upman/babel,srisankethu/babel,yoloseem/babel,julen/babel,nandoflorestan/babel,javacruft/babel,yoloseem/babel,hanteng/babel,mgax/babel,xlevus/babel,mitsuhiko/babel,skybon/babel,prmtl/babel,gutsy/babel,jmagnusson/babel,prmtl/babel,python-babel/babel,upman/babel,jmagnusson/babel,lepistone/babel,masklinn/babel,moreati/babel,mbirtwell/babel,st4lk/babel,hanteng/babel,KIT-XXI/babel,gutsy/babel,nandoflorestan/babel,srisankethu/babel,nickretallack/babel,felixonmars/babel,mbirtwell/babel,javacruft/babel,julen/babel,mitsuhiko/babel,nandoflorestan/babel,python-babel/babel,mitsuhiko/babel,prmtl/babel,lepistone/babel,hanteng/babel,KIT-XXI/babel,upman/babel,iamshubh22/babel,felixonmars/babel,KIT-XXI/babel,moreati/babel,mbirtwell/babel,javacruft/babel,jespino/babel,masklinn/babel | tests/messages/test_plurals.py | tests/messages/test_plurals.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import doctest
import unittest
from babel.messages import plurals
def test_get_plural():
assert plurals.get_plural(locale='en') == (2, '(n != 1)')
assert plurals.get_plural(locale='ga') == (3, '(n==1 ? 0 : n==2 ? 1 : 2)')
tup = plurals.get_plural("ja")
assert tup.num_plurals == 1
assert tup.plural_expr == '0'
assert tup.plural_forms == 'npurals=1; plural=0'
assert str(tup) == 'npurals=1; plural=0'
def suite():
suite = unittest.TestSuite()
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2011 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import doctest
import unittest
from babel.messages import plurals
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(plurals))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| bsd-3-clause | Python |
9f58f2ec2d3bf5a42e2d81e12d30d8bb37c4e04b | fix python 2.6 tests | cyberdelia/metrology,zenoss/metrology,zenoss/metrology | tests/reporter/test_librato.py | tests/reporter/test_librato.py | import requests
import sys
import pytest
from mock import patch
from unittest import TestCase
from metrology import Metrology
from metrology.reporter.librato import LibratoReporter
@pytest.mark.skipif('"java" in sys.version.lower()')
class LibratoReporterTest(TestCase):
def setUp(self):
self.reporter = LibratoReporter("<email>", "<token>")
Metrology.meter('meter').mark()
Metrology.counter('counter').increment()
Metrology.timer('timer').update(1.5)
Metrology.utilization_timer('utimer').update(1.5)
def tearDown(self):
self.reporter.stop()
Metrology.stop()
@patch.object(requests, "post")
def test_write(self, mock):
self.reporter.write()
self.assertTrue(mock.send.assert_called())
self.assertTrue("gauges" in mock.call_args_list[0][1]['data'])
self.assertTrue("counters" in mock.call_args_list[0][1]['data'])
| import requests
import sys
from mock import patch
from unittest import TestCase, skipIf
from metrology import Metrology
from metrology.reporter.librato import LibratoReporter
@skipIf("java" in sys.version.lower(), "doesn't support jython")
class LibratoReporterTest(TestCase):
def setUp(self):
self.reporter = LibratoReporter("<email>", "<token>")
Metrology.meter('meter').mark()
Metrology.counter('counter').increment()
Metrology.timer('timer').update(1.5)
Metrology.utilization_timer('utimer').update(1.5)
def tearDown(self):
self.reporter.stop()
Metrology.stop()
@patch.object(requests, "post")
def test_write(self, mock):
self.reporter.write()
self.assertTrue(mock.send.assert_called())
self.assertTrue("gauges" in mock.call_args_list[0][1]['data'])
self.assertTrue("counters" in mock.call_args_list[0][1]['data'])
| mit | Python |
9397c176f6eb0a98ac901085465a9ef4377d2687 | Add comment about matplotlib backend | ESSS/pytest-regressions | tests/test_image_regression.py | tests/test_image_regression.py | import io
from functools import partial
from pytest_regressions.common import Path
from pytest_regressions.testing import check_regression_fixture_workflow
def test_image_regression(image_regression, datadir):
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
# this ensures matplot lib does not use a GUI backend (such as Tk)
matplotlib.use("Agg")
# Data for plotting
t = np.arange(0.0, 2.0, 0.01)
s = 1 + np.sin(2 * np.pi * t)
# Note that using plt.subplots below is equivalent to using
# fig = plt.figure() and then ax = fig.add_subplot(111)
fig, ax = plt.subplots()
ax.plot(t, s)
ax.set(
xlabel="time (s)",
ylabel="voltage (mV)",
title="About as simple as it gets, folks",
)
ax.grid()
image_filename = datadir / "test.png"
fig.savefig(str(image_filename))
image_regression.check(image_filename.read_bytes())
def test_image_regression_workflow(testdir, monkeypatch, datadir):
"""
:type testdir: _pytest.pytester.TmpTestdir
:type monkeypatch: _pytest.monkeypatch.monkeypatch
"""
import sys
from PIL import Image
def get_image(color):
f = io.BytesIO()
img = Image.new("RGB", (100, 100), color)
img.save(f, "PNG")
return f.getvalue()
monkeypatch.setattr(sys, "get_image", partial(get_image, "white"), raising=False)
source = """
import sys
def test_1(image_regression):
contents = sys.get_image()
image_regression.check(contents)
"""
def get_file_contents():
fn = Path(str(testdir.tmpdir)) / "test_file" / "test_1.png"
assert fn.is_file()
return fn.read_bytes()
check_regression_fixture_workflow(
testdir,
source,
data_getter=get_file_contents,
data_modifier=lambda: monkeypatch.setattr(
sys, "get_image", partial(get_image, "black"), raising=False
),
expected_data_1=partial(get_image, "white"),
expected_data_2=partial(get_image, "black"),
)
| import io
from functools import partial
from pytest_regressions.common import Path
from pytest_regressions.testing import check_regression_fixture_workflow
def test_image_regression(image_regression, datadir):
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
matplotlib.use("Agg")
# Data for plotting
t = np.arange(0.0, 2.0, 0.01)
s = 1 + np.sin(2 * np.pi * t)
# Note that using plt.subplots below is equivalent to using
# fig = plt.figure() and then ax = fig.add_subplot(111)
fig, ax = plt.subplots()
ax.plot(t, s)
ax.set(
xlabel="time (s)",
ylabel="voltage (mV)",
title="About as simple as it gets, folks",
)
ax.grid()
image_filename = datadir / "test.png"
fig.savefig(str(image_filename))
image_regression.check(image_filename.read_bytes())
def test_image_regression_workflow(testdir, monkeypatch, datadir):
"""
:type testdir: _pytest.pytester.TmpTestdir
:type monkeypatch: _pytest.monkeypatch.monkeypatch
"""
import sys
from PIL import Image
def get_image(color):
f = io.BytesIO()
img = Image.new("RGB", (100, 100), color)
img.save(f, "PNG")
return f.getvalue()
monkeypatch.setattr(sys, "get_image", partial(get_image, "white"), raising=False)
source = """
import sys
def test_1(image_regression):
contents = sys.get_image()
image_regression.check(contents)
"""
def get_file_contents():
fn = Path(str(testdir.tmpdir)) / "test_file" / "test_1.png"
assert fn.is_file()
return fn.read_bytes()
check_regression_fixture_workflow(
testdir,
source,
data_getter=get_file_contents,
data_modifier=lambda: monkeypatch.setattr(
sys, "get_image", partial(get_image, "black"), raising=False
),
expected_data_1=partial(get_image, "white"),
expected_data_2=partial(get_image, "black"),
)
| mit | Python |
3293783d0df47d864ecf2734869f8d23f0e83d0f | Integrate LLVM at llvm/llvm-project@81b51b61f849 | gautam1858/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "81b51b61f849fa91bdf5d0695918578abae846e3"
LLVM_SHA256 = "4efa8c26189f4b9d1d137d04f48895bae716320246108cf05885293be5d5b6bc"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = ["//third_party/llvm:macos_build_fix.patch", "//third_party/llvm:fix_ppc64le.patch", "//third_party/llvm:disable_typeid_check.patch"],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "deadda749aef22dba4727f5c4d76090ecca559ac"
LLVM_SHA256 = "ae39878b45d0047fc11569409355938b0254d1080e9aa5cfc50d97b498a6812f"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = ["//third_party/llvm:macos_build_fix.patch", "//third_party/llvm:fix_ppc64le.patch", "//third_party/llvm:disable_typeid_check.patch"],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
941ec9e2eb31686ecf51c753034b0d36fcc77f47 | Integrate LLVM at llvm/llvm-project@df7606a066b7 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "df7606a066b75ce55ae4a186c785f996e0985db1"
LLVM_SHA256 = "29a2854dab1f8a8282630d6475be7d59a03b78bd30c30f96ae0afe672a81995c"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "fa79dff8bc8b2da923d4935f4b2035f7e1e11e0a"
LLVM_SHA256 = "cf940b7e6f047fdbae94c8f01197400e81782685ff39736023bd07c249c6cf83"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
184fe1b24a6f5936f8688b81a05aa97fce31757f | Integrate LLVM at llvm/llvm-project@1830ec94ac02 | yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "1830ec94ac022ae0b6d6876fc2251e6b91e5931e"
LLVM_SHA256 = "a85d5c8dd40fe7a94a8eec5d5d4794cad018435899b68aac079812686779a858"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:BUILD.bazel",
patch_file = "//third_party/llvm:macos_build_fix.patch",
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "6069a6a5049497a32a50a49661c2f4169078bdba"
LLVM_SHA256 = "695c18f11fde8a2aebd2f4144c04878439a382b303e62a2da0d64553b9611efb"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:BUILD.bazel",
patch_file = "//third_party/llvm:macos_build_fix.patch",
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
2719fa58a5decb4f71a8f36d929117ec729231c3 | Integrate LLVM at llvm/llvm-project@86cdb2929cce | tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "86cdb2929ccea1968cbbac6387380a6162e78d21"
LLVM_SHA256 = "7ba716b33b48ba1da8cda299d1feb6a545aefdae67794a6da8a595473c6ed5ce"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = ["//third_party/llvm:macos_build_fix.patch", "//third_party/llvm:fix_ppc64le.patch", "//third_party/llvm:disable_typeid_check.patch"],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "bf59cd72447facdb7b17fc00c502d18a02135abb"
LLVM_SHA256 = "5321e1f1c16a7920f9e96c75bdf5bf112bf7bb5c086054b92e6f3008f4e787d5"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = ["//third_party/llvm:macos_build_fix.patch", "//third_party/llvm:fix_ppc64le.patch", "//third_party/llvm:disable_typeid_check.patch"],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
17fadbfa00cffddd6abb6c558b4ffa2099814de8 | Integrate LLVM at llvm/llvm-project@0e92cbd6a652 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "0e92cbd6a652c4f86fa76a3af2820009d5b6c300"
LLVM_SHA256 = "cac34f6de9b704259b26cfb4fa52ed9563330fc1dc01e1594266d4e00bafb91d"
tfrt_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "8e5f3d04f269dbe791076e775f1d1a098cbada01"
LLVM_SHA256 = "51f4950108027260a6dfeac4781fdad85dfde1d8594f8d26faea504e923ebcf2"
tfrt_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
a22a8dd23ade41fcefab17328d38a966afa41ec7 | Integrate LLVM at llvm/llvm-project@42102bce98e5 | yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "42102bce98e527f994a7bc68b2255d9e0462f6eb"
LLVM_SHA256 = "946a0b227e7435531e5286d84cad60011e9613a3198e0e3c07d199babee0db5c"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = "//third_party/llvm:macos_build_fix.patch",
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "d56b171ee965eba9ba30f4a479a9f2e1703105cf"
LLVM_SHA256 = "bce89fe2ac52b1d3165d8e85c3df02da1a222f4f61dced63bbfe2fc35ad97d4a"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = "//third_party/llvm:macos_build_fix.patch",
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| apache-2.0 | Python |
639232e16a85c7146f13b54a94ee388322129b27 | Integrate LLVM at llvm/llvm-project@03512ae9bf31 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "03512ae9bf31b725d8233c03094fd463b5f46285"
LLVM_SHA256 = "7022aac9ace736042b9363fb8becb5bc17efd78c045592506d866d88b34ba271"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "131f7bac63b8cc1700bbae908bdac60f438e69d1"
LLVM_SHA256 = "f3c917ddab6f8e629569ee59df37020098e718d7d7429e149d9df6e6aba211c4"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
b8b0a570c1762364d904e0e2fd82cafe8bee9726 | Integrate LLVM at llvm/llvm-project@54cc7de4bc01 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "54cc7de4bc01e6178213e4487d6ab49b809ba2b0"
LLVM_SHA256 = "a939c493893b2d27ea87d0d28efc79f33cdee4c56ac5954bc19ca2222a29af9d"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "077f90315bec29443784a1bb2c55f3d7fc2eab64"
LLVM_SHA256 = "b8204098753a27847e6084c95d7aa62083cc752ba29899b55058e38ea3f1c4f6"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
fe8e227f3e9b98341effa1ea92c4391cdc955c31 | Integrate LLVM at llvm/llvm-project@86bde99a9027 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "86bde99a9027f875383e38bfd3a863abae3d0e75"
LLVM_SHA256 = "bdd4964d0e0d389cc3a73f04b4ebbd2da3618622a0f3774dd97223d00c7f13a8"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "5e90f384243f64265814828896168316c80aabc0"
LLVM_SHA256 = "818a3264700fd07a24c6ff81fb64196638f17ab78de1afcfbdbd5138a087df77"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
0905a345b5a13ceffc3081f3579d4c0a4c5fe2f4 | Integrate LLVM at llvm/llvm-project@bf60a5af0a21 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "bf60a5af0a21323f257719a08d57b28a3389b283"
LLVM_SHA256 = "88a9441686bc19d9b559b4342af76c4757878197940a6dafa85b5022be384105"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "81b51b61f849fa91bdf5d0695918578abae846e3"
LLVM_SHA256 = "4efa8c26189f4b9d1d137d04f48895bae716320246108cf05885293be5d5b6bc"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
57b178cd6c81727feb6fad6a49831ab60de46a01 | Integrate LLVM at llvm/llvm-project@1fa4c188b5a4 | tensorflow/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,yongtang/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "1fa4c188b5a4187dba7e3809d8fd6d6eccff99f4"
LLVM_SHA256 = "24155665d0537320a3b20c668c8cc3142106a0bc2e901d6c99d9ca807c1ef141"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "57dfa12e4ca8d8f5642a3c3a1e80040fd5cba3c9"
LLVM_SHA256 = "9f54c2946fa692c1fa699b782c240dd8a118c8acfd910624b208736c0024a35d"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
link_files = {
"//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD",
"//third_party/mlir:BUILD": "mlir/BUILD",
"//third_party/mlir:build_defs.bzl": "mlir/build_defs.bzl",
"//third_party/mlir:linalggen.bzl": "mlir/linalggen.bzl",
"//third_party/mlir:tblgen.bzl": "mlir/tblgen.bzl",
"//third_party/mlir:test.BUILD": "mlir/test/BUILD",
},
)
| apache-2.0 | Python |
9fb08850e9b44bb420a74050a4ecd338fc686141 | Integrate LLVM at llvm/llvm-project@72136d8ba266 | tensorflow/runtime,tensorflow/runtime,tensorflow/runtime,tensorflow/runtime | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "72136d8ba266eea6ce30fbc0e521c7b01a13b378"
LLVM_SHA256 = "54d179116e7a79eb1fdf7819aad62b4d76bc0e15e8567871cae9b675f7dec5c1"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tfrt_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "4b33ea052ab7fbceed4c62debf1145f80d66b0d7"
LLVM_SHA256 = "b6c61a6c81b1910cc34ccd9800fd18afc2dc1b9d76c640dd767f9e550f94c8d6"
tfrt_http_archive(
name = name,
build_file = "//third_party/llvm:BUILD",
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT,
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.