commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
2b614aa7562563833ad17408966dee016be032af | Use correct deadline flag on darwin | mpolden/jarvis2,martinp/jarvis2,mpolden/jarvis2,mpolden/jarvis2,martinp/jarvis2,martinp/jarvis2 | jarvis/jobs/uptime.py | jarvis/jobs/uptime.py | #!/usr/bin/env python
from jobs import AbstractJob
from subprocess import Popen, PIPE
from sys import platform
class Uptime(AbstractJob):
def __init__(self, conf):
self.hosts = conf['hosts']
self.interval = conf['interval']
self.timeout = conf.get('timeout', 1)
def get(self):
hosts = []
for label, ip in self.hosts:
ping_cmd = 'ping6' if ':' in ip else 'ping'
deadline_flag = '-w'
# ping on darwin uses -t for deadline/timeout
if platform == 'darwin':
deadline_flag = '-t'
ping = '%s %s %d -c 1 %s' % (ping_cmd, deadline_flag, self.timeout,
ip)
p = Popen(ping.split(' '), stdout=PIPE, stderr=PIPE)
hosts.append({
'label': label,
'ip': ip,
'active': p.wait() == 0
})
return {'hosts': hosts}
| #!/usr/bin/env python
from jobs import AbstractJob
from subprocess import Popen, PIPE
class Uptime(AbstractJob):
def __init__(self, conf):
self.hosts = conf['hosts']
self.interval = conf['interval']
self.timeout = conf.get('timeout', 1)
def get(self):
hosts = []
for label, ip in self.hosts:
ping_cmd = 'ping6' if ':' in ip else 'ping'
ping = '%s -w %d -c 1 %s' % (ping_cmd, self.timeout, ip)
p = Popen(ping.split(' '), stdout=PIPE, stderr=PIPE)
hosts.append({
'label': label,
'ip': ip,
'active': p.wait() == 0
})
return {'hosts': hosts}
| mit | Python |
f9d1a34fdd4d8065acb2e9ab9ed4a7b0338b66bd | Fix nested HTML dictionaries. Closes #3314. | YBJAY00000/django-rest-framework,nhorelik/django-rest-framework,callorico/django-rest-framework,potpath/django-rest-framework,werthen/django-rest-framework,atombrella/django-rest-framework,potpath/django-rest-framework,sbellem/django-rest-framework,bluedazzle/django-rest-framework,agconti/django-rest-framework,sehmaschine/django-rest-framework,tcroiset/django-rest-framework,jpadilla/django-rest-framework,edx/django-rest-framework,sehmaschine/django-rest-framework,sehmaschine/django-rest-framework,potpath/django-rest-framework,sbellem/django-rest-framework,nhorelik/django-rest-framework,cyberj/django-rest-framework,dmwyatt/django-rest-framework,fishky/django-rest-framework,jerryhebert/django-rest-framework,sheppard/django-rest-framework,sbellem/django-rest-framework,xiaotangyuan/django-rest-framework,fishky/django-rest-framework,johnraz/django-rest-framework,davesque/django-rest-framework,ossanna16/django-rest-framework,davesque/django-rest-framework,uploadcare/django-rest-framework,jpadilla/django-rest-framework,kgeorgy/django-rest-framework,krinart/django-rest-framework,atombrella/django-rest-framework,werthen/django-rest-framework,waytai/django-rest-framework,ashishfinoit/django-rest-framework,wangpanjun/django-rest-framework,linovia/django-rest-framework,nryoung/django-rest-framework,linovia/django-rest-framework,gregmuellegger/django-rest-framework,James1345/django-rest-framework,gregmuellegger/django-rest-framework,dmwyatt/django-rest-framework,sheppard/django-rest-framework,yiyocx/django-rest-framework,abdulhaq-e/django-rest-framework,paolopaolopaolo/django-rest-framework,kylefox/django-rest-framework,paolopaolopaolo/django-rest-framework,wzbozon/django-rest-framework,nryoung/django-rest-framework,gregmuellegger/django-rest-framework,johnraz/django-rest-framework,jness/django-rest-framework,atombrella/django-rest-framework,rafaelcaricio/django-rest-framework,xiaotangyuan/django-rest-framework,abdulhaq-e/django-rest-framework,James1345/django-rest-framework,bluedazzle/django-rest-framework,xiaotangyuan/django-rest-framework,wedaly/django-rest-framework,davesque/django-rest-framework,jerryhebert/django-rest-framework,uruz/django-rest-framework,thedrow/django-rest-framework-1,jness/django-rest-framework,pombredanne/django-rest-framework,ossanna16/django-rest-framework,zeldalink0515/django-rest-framework,bluedazzle/django-rest-framework,YBJAY00000/django-rest-framework,wedaly/django-rest-framework,ebsaral/django-rest-framework,tcroiset/django-rest-framework,pombredanne/django-rest-framework,agconti/django-rest-framework,yiyocx/django-rest-framework,tomchristie/django-rest-framework,wzbozon/django-rest-framework,werthen/django-rest-framework,wzbozon/django-rest-framework,pombredanne/django-rest-framework,hunter007/django-rest-framework,thedrow/django-rest-framework-1,rhblind/django-rest-framework,waytai/django-rest-framework,ebsaral/django-rest-framework,paolopaolopaolo/django-rest-framework,rhblind/django-rest-framework,wangpanjun/django-rest-framework,wangpanjun/django-rest-framework,ashishfinoit/django-rest-framework,tcroiset/django-rest-framework,ossanna16/django-rest-framework,thedrow/django-rest-framework-1,cyberj/django-rest-framework,ebsaral/django-rest-framework,hunter007/django-rest-framework,abdulhaq-e/django-rest-framework,kgeorgy/django-rest-framework,linovia/django-rest-framework,johnraz/django-rest-framework,uploadcare/django-rest-framework,cyberj/django-rest-framework,kylefox/django-rest-framework,uruz/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,edx/django-rest-framework,douwevandermeij/django-rest-framework,callorico/django-rest-framework,tomchristie/django-rest-framework,edx/django-rest-framework,nryoung/django-rest-framework,jerryhebert/django-rest-framework,douwevandermeij/django-rest-framework,fishky/django-rest-framework,yiyocx/django-rest-framework,waytai/django-rest-framework,krinart/django-rest-framework,ashishfinoit/django-rest-framework,zeldalink0515/django-rest-framework,krinart/django-rest-framework,nhorelik/django-rest-framework,rafaelcaricio/django-rest-framework,hunter007/django-rest-framework,sheppard/django-rest-framework,wedaly/django-rest-framework,jpadilla/django-rest-framework,James1345/django-rest-framework,rhblind/django-rest-framework,douwevandermeij/django-rest-framework,zeldalink0515/django-rest-framework,dmwyatt/django-rest-framework,kylefox/django-rest-framework,kgeorgy/django-rest-framework,tomchristie/django-rest-framework,uruz/django-rest-framework,uploadcare/django-rest-framework,agconti/django-rest-framework,jness/django-rest-framework,callorico/django-rest-framework | rest_framework/utils/html.py | rest_framework/utils/html.py | """
Helpers for dealing with HTML input.
"""
import re
from django.utils.datastructures import MultiValueDict
def is_html_input(dictionary):
# MultiDict type datastructures are used to represent HTML form input,
# which may have more than one value for each key.
return hasattr(dictionary, 'getlist')
def parse_html_list(dictionary, prefix=''):
"""
Used to suport list values in HTML forms.
Supports lists of primitives and/or dictionaries.
* List of primitives.
{
'[0]': 'abc',
'[1]': 'def',
'[2]': 'hij'
}
-->
[
'abc',
'def',
'hij'
]
* List of dictionaries.
{
'[0]foo': 'abc',
'[0]bar': 'def',
'[1]foo': 'hij',
'[1]bar': 'klm',
}
-->
[
{'foo': 'abc', 'bar': 'def'},
{'foo': 'hij', 'bar': 'klm'}
]
"""
ret = {}
regex = re.compile(r'^%s\[([0-9]+)\](.*)$' % re.escape(prefix))
for field, value in dictionary.items():
match = regex.match(field)
if not match:
continue
index, key = match.groups()
index = int(index)
if not key:
ret[index] = value
elif isinstance(ret.get(index), dict):
ret[index][key] = value
else:
ret[index] = MultiValueDict({key: [value]})
return [ret[item] for item in sorted(ret.keys())]
def parse_html_dict(dictionary, prefix=''):
"""
Used to support dictionary values in HTML forms.
{
'profile.username': 'example',
'profile.email': 'example@example.com',
}
-->
{
'profile': {
'username': 'example',
'email': 'example@example.com'
}
}
"""
ret = MultiValueDict()
regex = re.compile(r'^%s\.(.+)$' % re.escape(prefix))
for field, value in dictionary.items():
match = regex.match(field)
if not match:
continue
key = match.groups()[0]
ret[key] = value
return ret
| """
Helpers for dealing with HTML input.
"""
import re
from django.utils.datastructures import MultiValueDict
def is_html_input(dictionary):
# MultiDict type datastructures are used to represent HTML form input,
# which may have more than one value for each key.
return hasattr(dictionary, 'getlist')
def parse_html_list(dictionary, prefix=''):
"""
Used to suport list values in HTML forms.
Supports lists of primitives and/or dictionaries.
* List of primitives.
{
'[0]': 'abc',
'[1]': 'def',
'[2]': 'hij'
}
-->
[
'abc',
'def',
'hij'
]
* List of dictionaries.
{
'[0]foo': 'abc',
'[0]bar': 'def',
'[1]foo': 'hij',
'[1]bar': 'klm',
}
-->
[
{'foo': 'abc', 'bar': 'def'},
{'foo': 'hij', 'bar': 'klm'}
]
"""
ret = {}
regex = re.compile(r'^%s\[([0-9]+)\](.*)$' % re.escape(prefix))
for field, value in dictionary.items():
match = regex.match(field)
if not match:
continue
index, key = match.groups()
index = int(index)
if not key:
ret[index] = value
elif isinstance(ret.get(index), dict):
ret[index][key] = value
else:
ret[index] = MultiValueDict({key: [value]})
return [ret[item] for item in sorted(ret.keys())]
def parse_html_dict(dictionary, prefix=''):
"""
Used to support dictionary values in HTML forms.
{
'profile.username': 'example',
'profile.email': 'example@example.com',
}
-->
{
'profile': {
'username': 'example',
'email': 'example@example.com'
}
}
"""
ret = {}
regex = re.compile(r'^%s\.(.+)$' % re.escape(prefix))
for field, value in dictionary.items():
match = regex.match(field)
if not match:
continue
key = match.groups()[0]
ret[key] = value
return ret
| bsd-2-clause | Python |
12fd6f6a4a2cc121e8fac071dede9e0b0d488908 | support django 1.11 | ieiayaobb/lushi8,ieiayaobb/lushi8,ieiayaobb/lushi8 | web/views.py | web/views.py | import requests
from django.http import Http404
from django.shortcuts import render, render_to_response, redirect
# Create your views here.
from django.template import RequestContext
from web.fetch import Fetcher
from settings import LEAN_CLOUD_ID, LEAN_CLOUD_SECRET
import leancloud
# @api_view(('GET',))
# def api_root(request, format=None):
# return Response({
# 'chairmans': reverse('chairman-list', request=request, format=format),
# })
def get_index(request):
# response = requests.get('http://127.0.0.1:8000/api/chairmans/')
# chairmans = response.json()
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
# Chairman = leancloud.Object.extend('Chairman')
query = leancloud.Query('Chairman')
chairmans = []
for chairman in query.add_descending('num').find():
chairman_view = {}
chairman_view.type = chairman.get('type')
chairman_view.href = chairman.get('href')
chairman_view.id = chairman.get('id')
chairman_view.title = chairman.get('title')
chairman_view.img = chairman.get('img')
chairman_view.name = chairman.get('name')
chairman_view.num = chairman.get('num')
chairmans.append(chairman_view)
return render_to_response('index.html', locals())
def fetch(request):
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
query = leancloud.Query('Chairman')
allDataCompleted = False
batch = 0
limit = 1000
while not allDataCompleted:
query.limit(limit)
query.skip(batch * limit)
query.add_ascending('createdAt')
resultList = query.find()
if len(resultList) < limit:
allDataCompleted = True
leancloud.Object.destroy_all(resultList)
batch += 1
fetcher = Fetcher()
fetcher.fetch_cc()
fetcher.fetch_douyu()
fetcher.fetch_longzhu()
fetcher.fetch_quanmin()
fetcher.fetch_xiongmao()
fetcher.fetch_zhanqi()
fetcher.fetch_huya()
for chairman in fetcher.chairmans:
try:
chairman.save()
except Exception, e:
print e
return redirect("/")
| import requests
from django.http import Http404
from django.shortcuts import render, render_to_response, redirect
# Create your views here.
from django.template import RequestContext
from web.fetch import Fetcher
from settings import LEAN_CLOUD_ID, LEAN_CLOUD_SECRET
import leancloud
# @api_view(('GET',))
# def api_root(request, format=None):
# return Response({
# 'chairmans': reverse('chairman-list', request=request, format=format),
# })
def get_index(request):
# response = requests.get('http://127.0.0.1:8000/api/chairmans/')
# chairmans = response.json()
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
# Chairman = leancloud.Object.extend('Chairman')
query = leancloud.Query('Chairman')
chairmans = []
for chairman in query.add_descending('num').find():
chairman_view = {}
chairman_view.type = chairman.get('type')
chairman_view.href = chairman.get('href')
chairman_view.id = chairman.get('id')
chairman_view.title = chairman.get('title')
chairman_view.img = chairman.get('img')
chairman_view.name = chairman.get('name')
chairman_view.num = chairman.get('num')
chairmans.append(chairman_view)
return render_to_response('index.html', locals(),
context_instance=RequestContext(request))
def fetch(request):
leancloud.init(LEAN_CLOUD_ID, LEAN_CLOUD_SECRET)
query = leancloud.Query('Chairman')
allDataCompleted = False
batch = 0
limit = 1000
while not allDataCompleted:
query.limit(limit)
query.skip(batch * limit)
query.add_ascending('createdAt')
resultList = query.find()
if len(resultList) < limit:
allDataCompleted = True
leancloud.Object.destroy_all(resultList)
batch += 1
fetcher = Fetcher()
fetcher.fetch_cc()
fetcher.fetch_douyu()
fetcher.fetch_longzhu()
fetcher.fetch_quanmin()
fetcher.fetch_xiongmao()
fetcher.fetch_zhanqi()
fetcher.fetch_huya()
for chairman in fetcher.chairmans:
try:
chairman.save()
except Exception, e:
print e
return redirect("/")
| mit | Python |
0254a3fe1180c57bd3596b4b5831398e99849c00 | Improve lagging message description (#24208) | commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot | common/realtime.py | common/realtime.py | """Utilities for reading real time clocks and keeping soft real time constraints."""
import gc
import os
import time
from typing import Optional
from setproctitle import getproctitle # pylint: disable=no-name-in-module
from common.clock import sec_since_boot # pylint: disable=no-name-in-module, import-error
from selfdrive.hardware import PC, TICI
# time step for each process
DT_CTRL = 0.01 # controlsd
DT_MDL = 0.05 # model
DT_TRML = 0.5 # thermald and manager
# driver monitoring
if TICI:
DT_DMON = 0.05
else:
DT_DMON = 0.1
class Priority:
# CORE 2
# - modeld = 55
# - camerad = 54
CTRL_LOW = 51 # plannerd & radard
# CORE 3
# - boardd = 55
CTRL_HIGH = 53
def set_realtime_priority(level: int) -> None:
if not PC:
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(level)) # type: ignore[attr-defined]
def set_core_affinity(core: int) -> None:
if not PC:
os.sched_setaffinity(0, [core,]) # type: ignore[attr-defined]
def config_realtime_process(core: int, priority: int) -> None:
gc.disable()
set_realtime_priority(priority)
set_core_affinity(core)
class Ratekeeper:
def __init__(self, rate: int, print_delay_threshold: Optional[float] = 0.0) -> None:
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative."""
self._interval = 1. / rate
self._next_frame_time = sec_since_boot() + self._interval
self._print_delay_threshold = print_delay_threshold
self._frame = 0
self._remaining = 0.0
self._process_name = getproctitle()
@property
def frame(self) -> int:
return self._frame
@property
def remaining(self) -> float:
return self._remaining
# Maintain loop rate by calling this at the end of each loop
def keep_time(self) -> bool:
lagged = self.monitor_time()
if self._remaining > 0:
time.sleep(self._remaining)
return lagged
# this only monitor the cumulative lag, but does not enforce a rate
def monitor_time(self) -> bool:
lagged = False
remaining = self._next_frame_time - sec_since_boot()
self._next_frame_time += self._interval
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold:
print(f"{self._process_name} lagging by {-remaining * 1000:.2f} ms")
lagged = True
self._frame += 1
self._remaining = remaining
return lagged
| """Utilities for reading real time clocks and keeping soft real time constraints."""
import gc
import os
import time
import multiprocessing
from typing import Optional
from common.clock import sec_since_boot # pylint: disable=no-name-in-module, import-error
from selfdrive.hardware import PC, TICI
# time step for each process
DT_CTRL = 0.01 # controlsd
DT_MDL = 0.05 # model
DT_TRML = 0.5 # thermald and manager
# driver monitoring
if TICI:
DT_DMON = 0.05
else:
DT_DMON = 0.1
class Priority:
# CORE 2
# - modeld = 55
# - camerad = 54
CTRL_LOW = 51 # plannerd & radard
# CORE 3
# - boardd = 55
CTRL_HIGH = 53
def set_realtime_priority(level: int) -> None:
if not PC:
os.sched_setscheduler(0, os.SCHED_FIFO, os.sched_param(level)) # type: ignore[attr-defined]
def set_core_affinity(core: int) -> None:
if not PC:
os.sched_setaffinity(0, [core,]) # type: ignore[attr-defined]
def config_realtime_process(core: int, priority: int) -> None:
gc.disable()
set_realtime_priority(priority)
set_core_affinity(core)
class Ratekeeper:
def __init__(self, rate: int, print_delay_threshold: Optional[float] = 0.0) -> None:
"""Rate in Hz for ratekeeping. print_delay_threshold must be nonnegative."""
self._interval = 1. / rate
self._next_frame_time = sec_since_boot() + self._interval
self._print_delay_threshold = print_delay_threshold
self._frame = 0
self._remaining = 0.0
self._process_name = multiprocessing.current_process().name
@property
def frame(self) -> int:
return self._frame
@property
def remaining(self) -> float:
return self._remaining
# Maintain loop rate by calling this at the end of each loop
def keep_time(self) -> bool:
lagged = self.monitor_time()
if self._remaining > 0:
time.sleep(self._remaining)
return lagged
# this only monitor the cumulative lag, but does not enforce a rate
def monitor_time(self) -> bool:
lagged = False
remaining = self._next_frame_time - sec_since_boot()
self._next_frame_time += self._interval
if self._print_delay_threshold is not None and remaining < -self._print_delay_threshold:
print(f"{self._process_name} lagging by {-remaining * 1000:.2f} ms")
lagged = True
self._frame += 1
self._remaining = remaining
return lagged
| mit | Python |
ade17d0c100d618c9306738df8dd1e467e3b8e93 | change label (plans to tasks) | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/djcelery_dbaas/admin.py | dbaas/djcelery_dbaas/admin.py | from django.contrib import admin
from djcelery.admin import PeriodicTaskAdmin
from djcelery.models import PeriodicTask
class PeriodicTaskDbaas(PeriodicTaskAdmin):
actions = ['action_enable_plans','action_disable_plans']
def action_enable_plans(self, request, queryset):
queryset.update(enabled=True)
action_enable_plans.short_description = "Enable selected tasks"
def action_disable_plans(self, request, queryset):
queryset.update(enabled=False)
action_disable_plans.short_description = "Disable selected tasks"
admin.site.unregister(PeriodicTask)
admin.site.register(PeriodicTask, PeriodicTaskDbaas)
| from django.contrib import admin
from djcelery.admin import PeriodicTaskAdmin
from djcelery.models import PeriodicTask
class PeriodicTaskDbaas(PeriodicTaskAdmin):
actions = ['action_enable_plans','action_disable_plans']
def action_enable_plans(self, request, queryset):
queryset.update(enabled=True)
action_enable_plans.short_description = "Enable selected plans"
def action_disable_plans(self, request, queryset):
queryset.update(enabled=False)
action_disable_plans.short_description = "Disable selected plans"
admin.site.unregister(PeriodicTask)
admin.site.register(PeriodicTask, PeriodicTaskDbaas)
| bsd-3-clause | Python |
9457f9ebff4d84f1a6f6b83669dc2aa6a3c1521e | remove .well-known prefix from path | cloudfleet/blimp-wellknown,cloudfleet/blimp-wellknown | wellknown.py | wellknown.py | from flask import Flask, jsonify, request
import settings, getopt, sys, os
app = Flask(__name__)
@app.route('/.well-known/host-meta.json', methods=['GET'])
def send_host_meta_json():
return jsonify({
"links": settings.LINKS
})
@app.route('/host-meta', methods=['GET'])
def send_host_meta_xrd():
return settings.JINJA_ENV.get_template("host-meta.xrd.tpl").render(links=settings.LINKS)
opts, args = getopt.getopt(sys.argv[1:], "h:p:", ["host=", "port="])
options = dict(opts)
hostname = "0.0.0.0"
port = 80
if "-h" in options:
hostname = options["-h"]
if "--host" in options:
hostname = options["--host"]
if "-p" in options:
port = int(options["-p"])
if "--port" in options:
port = int(options["--port"])
if __name__ == '__main__':
app.run(host=hostname, port=port)
| from flask import Flask, jsonify, request
import settings, getopt, sys, os
app = Flask(__name__)
@app.route('/.well-known/host-meta.json', methods=['GET'])
def send_host_meta_json():
return jsonify({
"links": settings.LINKS
})
@app.route('/.well-known/host-meta', methods=['GET'])
def send_host_meta_xrd():
return settings.JINJA_ENV.get_template("host-meta.xrd.tpl").render(links=settings.LINKS)
opts, args = getopt.getopt(sys.argv[1:], "h:p:", ["host=", "port="])
options = dict(opts)
hostname = "0.0.0.0"
port = 80
if "-h" in options:
hostname = options["-h"]
if "--host" in options:
hostname = options["--host"]
if "-p" in options:
port = int(options["-p"])
if "--port" in options:
port = int(options["--port"])
if __name__ == '__main__':
app.run(host=hostname, port=port)
| agpl-3.0 | Python |
d6e87778c82eecc07b73a91d50cc2d9034a4428c | Fix imports for Python 2 & 3 compatibility | suchow/judicious,suchow/judicious,suchow/judicious | judicious/__init__.py | judicious/__init__.py | # -*- coding: utf-8 -*-
"""Top-level package for judicious."""
__author__ = """Jordan W. Suchow"""
__email__ = 'jwsuchow@gmail.com'
__version__ = '0.1.0'
from .judicious import (
BASE_URL,
register,
)
__all__ = (
"BASE_URL",
"register",
)
| # -*- coding: utf-8 -*-
"""Top-level package for judicious."""
__author__ = """Jordan W. Suchow"""
__email__ = 'jwsuchow@gmail.com'
__version__ = '0.1.0'
from judicious import (
BASE_URL,
register,
)
__all__ = (
"BASE_URL",
"register",
)
| mit | Python |
0461581dae5d1f0cb922cddd4bc484e7a1e0dfb7 | remove __init__ from pipelines list | audy/cram | metacram/cram_cli.py | metacram/cram_cli.py | #!/usr/bin/env python
import sys
import os
from glob import glob
from metacram import *
# get list of pipelines
# this returns a list of paths to pipelines
# pipeline name is the full path minus directories and extension
# PIPELINES = { 'name': 'path', ... }
this_dir, this_filename = os.path.split(__file__)
PIPELINES = { os.path.basename(i).replace('.py', ''): i for i in glob(os.path.join(this_dir, 'pipelines', '*.py')) }
del PIPELINES['__init__']
def create_project(script, directory):
''' creates a new pipeline in directory given a pipeline module '''
# create directory
if os.path.exists(directory):
print >> sys.stderr, "%s exists. Move or delete yourself" % directory
# quit(-1)
else:
os.mkdir(directory)
# Copy pipeline script to directory
script_out_file = os.path.join(directory, os.path.basename(script))
with open(script_out_file, 'w') as output:
# open script for reading
with open(script) as handle:
output.write(handle.read())
ohai('%s created in %s' % (os.path.basename(script), directory))
ohai('run %s to execute pipeline' % os.path.basename(script))
def main():
''' meat & potatoes '''
# TODO add this to ARGV. Value gets sent to task function
if len(sys.argv) < 2:
print_usage()
quit(-1)
# run task or die
# forward argv to task
task = sys.argv[1]
directory = sys.argv[2]
path = PIPELINES.get(task, False)
# Do something
if path:
create_project(path, directory)
# Print task list
elif task in ['-h', '--help', 'tasks', 'list']:
print_usage()
# Break
else:
ohno('no task, %s' % task)
def print_usage():
''' prints usage '''
print "** MetaCRAM **\n\nPipelines:"
print " Type metacram <name> <directory> to create a new project.\n"
for name in PIPELINES:
print " %s" % (name)
if __name__ == '__main__':
main() | #!/usr/bin/env python
import sys
import os
from glob import glob
from metacram import *
# get list of pipelines
# this returns a list of paths to pipelines
# pipeline name is the full path minus directories and extension
# PIPELINES = { 'name': 'path', ... }
this_dir, this_filename = os.path.split(__file__)
PIPELINES = { os.path.basename(i).replace('.py',''): i for i in glob(os.path.join(this_dir, 'pipelines', '*.py')) }
def create_project(script, directory):
''' creates a new pipeline in directory given a pipeline module '''
# create directory
if os.path.exists(directory):
print >> sys.stderr, "%s exists. Move or delete yourself" % directory
# quit(-1)
else:
os.mkdir(directory)
# Copy pipeline script to directory
script_out_file = os.path.join(directory, os.path.basename(script))
with open(script_out_file, 'w') as output:
# open script for reading
with open(script) as handle:
output.write(handle.read())
ohai('%s created in %s' % (os.path.basename(script), directory))
ohai('run %s to execute pipeline' % os.path.basename(script))
def main():
''' meat & potatoes '''
# TODO add this to ARGV. Value gets sent to task function
if len(sys.argv) < 2:
print_usage()
quit(-1)
# run task or die
# forward argv to task
task = sys.argv[1]
directory = sys.argv[2]
path = PIPELINES.get(task, False)
# Do something
if path:
create_project(path, directory)
# Print task list
elif task in ['-h', '--help', 'tasks', 'list']:
print_usage()
# Break
else:
ohno('no task, %s' % task)
def print_usage():
''' prints usage '''
print "** MetaCRAM **\n\nPipelines:"
print " Type metacram <name> <directory> to create a new project.\n"
for name in PIPELINES:
print " %s" % (name)
if __name__ == '__main__':
main() | bsd-3-clause | Python |
758b7de1b15cf85df30190c63634167df1a114fb | Clean up | Eric89GXL/vispy,Eric89GXL/vispy,Eric89GXL/vispy | examples/basics/scene/mesh_texture.py | examples/basics/scene/mesh_texture.py | import argparse
import numpy as np
from vispy import app, scene
from vispy.io import imread, load_data_file, read_mesh
from vispy.scene.visuals import Mesh
from vispy.visuals.filters import TextureFilter
parser = argparse.ArgumentParser()
parser.add_argument('--shading', default='smooth',
choices=['none', 'flat', 'smooth'],
help="shading mode")
args = parser.parse_args()
mesh_path = load_data_file('spot/spot.obj.gz')
texture_path = load_data_file('spot/spot.png')
vertices, faces, normals, texcoords = read_mesh(mesh_path)
texture = np.flipud(imread(texture_path))
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600))
view = canvas.central_widget.add_view()
view.camera = 'arcball'
# Adapt the depth to the scale of the mesh to avoid rendering artefacts.
view.camera.depth_value = 10 * (vertices.max() - vertices.min())
shading = None if args.shading == 'none' else args.shading
mesh = Mesh(vertices, faces, shading=shading, color='lightgreen')
mesh.shininess = 1e-3
view.add(mesh)
texture_filter = TextureFilter(texture, texcoords)
mesh.attach(texture_filter)
@canvas.events.key_press.connect
def on_key_press(event):
if event.key == "t":
texture_filter.enabled = not texture_filter.enabled
mesh.update()
def attach_headlight(mesh, view, canvas):
light_dir = (0, -1, 0, 0)
mesh.light_dir = light_dir[:3]
initial_light_dir = view.camera.transform.imap(light_dir)
@view.scene.transform.changed.connect
def on_transform_change(event):
transform = view.camera.transform
mesh.light_dir = transform.map(initial_light_dir)[:3]
attach_headlight(mesh, view, canvas)
canvas.show()
if __name__ == "__main__":
app.run()
| import argparse
import numpy as np
from vispy import app, scene
from vispy.io import imread, load_data_file, read_mesh
from vispy.scene.visuals import Mesh
from vispy.visuals.filters import TextureFilter
parser = argparse.ArgumentParser()
parser.add_argument('--shading', default='smooth',
choices=['none', 'flat', 'smooth'],
help="shading mode")
args = parser.parse_args()
mesh_path = load_data_file('spot/spot.obj.gz')
texture_path = load_data_file('spot/spot.png')
vertices, faces, normals, texcoords = read_mesh(mesh_path)
texture = np.flipud(imread(texture_path))
canvas = scene.SceneCanvas(keys='interactive', bgcolor='white',
size=(800, 600))
view = canvas.central_widget.add_view()
view.camera = 'arcball'
# Adapt the depth to the scale of the mesh to avoid rendering artefacts.
view.camera.depth_value = 10 * (vertices.max() - vertices.min())
shading = None if args.shading == 'none' else args.shading
mesh = Mesh(vertices, faces, shading=shading, color='lightgreen')
mesh.shininess = 1e-3
view.add(mesh)
texture_filter = TextureFilter(texture, texcoords)
mesh.attach(texture_filter)
@canvas.events.key_press.connect
def on_key_press(event):
if event.key == "t":
texture_filter.enabled = not texture_filter.enabled
mesh.update()
def attach_headlight(mesh, view, canvas):
light_dir = (0, -1, 0, 0)
mesh.light_dir = light_dir[:3]
initial_light_dir = view.camera.transform.imap(light_dir)
initial_light_dir[3] = 0
@view.scene.transform.changed.connect
def on_transform_change(event):
transform = view.camera.transform
mesh.light_dir = transform.map(initial_light_dir)[:3]
attach_headlight(mesh, view, canvas)
canvas.show()
if __name__ == "__main__":
app.run()
| bsd-3-clause | Python |
938ea70e284a7ee0562468471ad0368d01112b5d | Allow null hstore fields | alukach/django-hstore-mixin,OspreyInformatics/django-hstore-mixin | django_hstore_mixin/models.py | django_hstore_mixin/models.py | import datetime
import json
from django.core.exceptions import ValidationError
from django.db import models
from django_hstore import hstore
from django_hstore_mixin.data_types import JsonDict
from django_hstore_mixin.serializers import toJson
class HstoreMixin(models.Model):
""" Data field to be added to model to enable Hstore field. Actual
hstore field hidden with underscore, property field serializes and
deserializes data upon setting/getting. """
_data = hstore.DictionaryField(
'KeyValueStore',
db_index=True,
default={},
blank=True,
null=True
)
objects = hstore.HStoreManager()
class Meta:
abstract = True
def clean(self):
""" Ensure that all Hstore data is stored as valid JSON.
NOTE: By default, this is not called automatically when you call
save() method. """
for key, value in self._data.items():
try:
json.loads(value)
except ValueError:
msg = "The value of key \"%s\" does not appear to be valid JSON: %s. " % (key, value)
msg += "Hstore values must be stored as JSON. Maybe you meant to use %s?" % json.dumps(value)
raise ValidationError(msg)
return super(HstoreMixin, self).clean()
@property
def data(self):
""" Decode data from JSON """
return JsonDict(self._data, modelInstance=self)
@data.setter
def data(self, value):
""" Encode data to JSON """
if not self._data:
self._data = {k: toJson(v) for k, v in value.items()}
else:
self._data = JsonDict(value, modelInstance=self)
| import datetime
import json
from django.core.exceptions import ValidationError
from django.db import models
from django_hstore import hstore
from django_hstore_mixin.data_types import JsonDict
from django_hstore_mixin.serializers import toJson
class HstoreMixin(models.Model):
""" Data field to be added to model to enable Hstore field. Actual
hstore field hidden with underscore, property field serializes and
deserializes data upon setting/getting. """
_data = hstore.DictionaryField(
'KeyValueStore',
db_index=True,
default={},
blank=True
)
objects = hstore.HStoreManager()
class Meta:
abstract = True
def clean(self):
""" Ensure that all Hstore data is stored as valid JSON.
NOTE: By default, this is not called automatically when you call
save() method. """
for key, value in self._data.items():
try:
json.loads(value)
except ValueError:
msg = "The value of key \"%s\" does not appear to be valid JSON: %s. " % (key, value)
msg += "Hstore values must be stored as JSON. Maybe you meant to use %s?" % json.dumps(value)
raise ValidationError(msg)
return super(HstoreMixin, self).clean()
@property
def data(self):
""" Decode data from JSON """
return JsonDict(self._data, modelInstance=self)
@data.setter
def data(self, value):
""" Encode data to JSON """
if not self._data:
self._data = {k: toJson(v) for k, v in value.items()}
else:
self._data = JsonDict(value, modelInstance=self)
| mit | Python |
b12e23dbb70cad868fc07f0686f765865a4d0ca3 | Remove redundant sublime.Region call | everyonesdesign/OpenSearchInNewTab | OpenSearchInNewTab.py | OpenSearchInNewTab.py | import re
from threading import Timer
import sublime_plugin
import sublime
DEFAULT_NAME = 'Find Results'
ALT_NAME_BASE = DEFAULT_NAME + ' '
MAX_QUERY = 16
NEXT_LINE_SYMBOL = '↲'
ELLIPSIS = '…'
def truncate(str):
return str[:MAX_QUERY].rstrip() + ELLIPSIS if len(str) > MAX_QUERY else str
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
t = Timer(.001, self.update_view, (view,))
t.start()
def get_alt_name(self, view):
first_line_coords = view.full_line(sublime.Region(0, 0))
first_line = view.substr(first_line_coords)
match = re.search('^Searching \d+ files for "(.*?)(")?$', first_line)
if match:
query = match.group(1)
is_multiline = not match.group(2)
if is_multiline:
query = query.rstrip() + ' ' + NEXT_LINE_SYMBOL
query = truncate(query)
return DEFAULT_NAME + ': "' + query + '"'
return ALT_NAME_BASE
def update_view(self, view):
view.set_name(self.get_alt_name(view))
def is_search_view(self, view):
name = view.name()
return (
name == DEFAULT_NAME or
name == ALT_NAME_BASE or
name == self.get_alt_name(view)
)
| import re
from threading import Timer
import sublime_plugin
import sublime
DEFAULT_NAME = 'Find Results'
ALT_NAME_BASE = DEFAULT_NAME + ' '
MAX_QUERY = 16
NEXT_LINE_SYMBOL = '↲'
ELLIPSIS = '…'
def truncate(str):
return str[:MAX_QUERY].rstrip() + ELLIPSIS if len(str) > MAX_QUERY else str
class OpenSearchInNewTab(sublime_plugin.EventListener):
# set a bit changed name
# so the tab won't be bothered
# during new search
def on_activated(self, view):
if self.is_search_view(view):
t = Timer(.001, self.update_view, (view,))
t.start()
def get_alt_name(self, view):
first_line_coords = view.full_line(sublime.Region(0, 0))
first_line = view.substr(sublime.Region(*first_line_coords))
match = re.search('^Searching \d+ files for "(.*?)(")?$', first_line)
if match:
query = match.group(1)
is_multiline = not match.group(2)
if is_multiline:
query = query.rstrip() + ' ' + NEXT_LINE_SYMBOL
query = truncate(query)
return DEFAULT_NAME + ': "' + query + '"'
return ALT_NAME_BASE
def update_view(self, view):
view.set_name(self.get_alt_name(view))
def is_search_view(self, view):
name = view.name()
return (
name == DEFAULT_NAME or
name == ALT_NAME_BASE or
name == self.get_alt_name(view)
)
| mit | Python |
4c36b2470b7ef4c33a13cf4dfeb3088dce7c934c | Fix example typo | AlienVault-Labs/OTX-Python-SDK | examples/is_malicious/is_malicious.py | examples/is_malicious/is_malicious.py | # This script tells if a File, IP, Domain or URL may be malicious according to the data in OTX
from OTXv2 import OTXv2
import argparse
import get_malicious
import hashlib
# Your API key
API_KEY = ''
OTX_SERVER = 'https://otx.alienvault.com/'
otx = OTXv2(API_KEY, server=OTX_SERVER)
parser = argparse.ArgumentParser(description='OTX CLI Example')
parser.add_argument('-ip', help='IP eg; 4.4.4.4', required=False)
parser.add_argument('-host',
help='Hostname eg; www.alienvault.com', required=False)
parser.add_argument(
'-url', help='URL eg; http://www.alienvault.com', required=False)
parser.add_argument(
'-hash', help='Hash of a file eg; 7b42b35832855ab4ff37ae9b8fa9e571', required=False)
parser.add_argument(
'-file', help='Path to a file, eg; malware.exe', required=False)
args = vars(parser.parse_args())
if args['ip']:
alerts = get_malicious.ip(otx, args['ip'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['host']:
alerts = get_malicious.hostname(otx, args['host'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['url']:
alerts = get_malicious.url(otx, args['url'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['hash']:
alerts = get_malicious.file(otx, args['hash'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['file']:
hash = hashlib.md5(open(args['file'], 'rb').read()).hexdigest()
alerts = get_malicious.file(otx, hash)
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
| # This script tells if a File, IP, Domain or URL may be malicious according to the data in OTX
from OTXv2 import OTXv2
import argparse
import get_malicious
import hashlib
# Your API key
API_KEY = ''
OTX_SERVER = 'https://otx.alienvault.com/'
otx = OTXv2(API_KEY, server=OTX_SERVER)
parser = argparse.ArgumentParser(description='OTX CLI Example')
parser.add_argument('-ip', help='IP eg; 4.4.4.4', required=False)
parser.add_argument('-host',
help='Hostname eg; www.alienvault.com', required=False)
parser.add_argument(
'-url', help='URL eg; http://www.alienvault.com', required=False)
parser.add_argument(
'-hash', help='Hash of a file eg; 7b42b35832855ab4ff37ae9b8fa9e571', required=False)
parser.add_argument(
'-file', help='Path to a file, eg; malware.exe', required=False)
args = vars(parser.parse_args())
if args['ip']:
alerts = get_malicious.ip(otx, args['ip'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['host']:
alerts = get_malicious.hostname(otx, args['hostname'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['url']:
alerts = get_malicious.url(otx, args['url'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['hash']:
alerts = get_malicious.file(otx, args['hash'])
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
if args['file']:
hash = hashlib.md5(open(args['file'], 'rb').read()).hexdigest()
alerts = get_malicious.file(otx, hash)
if len(alerts) > 0:
print 'Identified as potentially malicious'
print str(alerts)
else:
print 'Unknown or not identified as malicious'
| apache-2.0 | Python |
317a5e903f5b1c3214c0a03b59cfb36314acd5ed | Allow commands that do not require a permission. | automatron/automatron | twisted/plugins/automatron_control.py | twisted/plugins/automatron_control.py | from twisted.internet import defer
from zope.interface import classProvides, implements
from automatron.command import IAutomatronCommandHandler
from automatron.plugin import IAutomatronPluginFactory, STOP
class AutomatronControlPlugin(object):
classProvides(IAutomatronPluginFactory)
implements(IAutomatronCommandHandler)
name = 'notify_control'
priority = 100
def __init__(self, controller):
self.controller = controller
command_map = {
#command: (help, min_args, max_args, permission)
'join': ('<channel> [key]', 1, 2, 'channel'),
'leave': ('<channel> [reason]', 1, 2, 'channel'),
'say': ('<channel> <message>', 2, 2, 'say'),
}
def on_command(self, client, user, command, args):
if command in self.command_map:
self._on_command(client, user, command, args)
return STOP
@defer.inlineCallbacks
def _on_command(self, client, user, command, args):
nickname = client.parse_user(user)[0]
config = self.command_map[command]
if config[3] is not None:
if not (yield self.controller.config.has_permission(client.server, None, user, config[3])):
client.msg(nickname, 'You\'re not authorized to do that.')
return
if not config[1] <= len(args) <= config[2]:
client.msg(nickname, 'Invalid syntax. Use: %s %s' % (command, config[0]))
return
getattr(self, '_on_command_%s' % command)(client, user, *args)
def _on_command_join(self, client, user, channel, key=None):
if key is not None:
self.controller.config.update_value('channel', client.server, channel, 'key', key)
client.join(channel, key)
else:
d = self.controller.config.get_value('channel', client.server, channel, 'key')
d.addCallback(lambda channel_key, _: client.join(channel, channel_key))
def _on_command_leave(self, client, user, channel, reason=None):
client.leave(channel, reason if reason is not None else 'Leaving...')
def _on_command_say(self, client, user, channel, message):
client.msg(channel, message)
| from twisted.internet import defer
from zope.interface import classProvides, implements
from automatron.command import IAutomatronCommandHandler
from automatron.plugin import IAutomatronPluginFactory, STOP
class AutomatronControlPlugin(object):
classProvides(IAutomatronPluginFactory)
implements(IAutomatronCommandHandler)
name = 'notify_control'
priority = 100
def __init__(self, controller):
self.controller = controller
command_map = {
#command: (help, min_args, max_args, permission)
'join': ('<channel> [key]', 1, 2, 'channel'),
'leave': ('<channel> [reason]', 1, 2, 'channel'),
'say': ('<channel> <message>', 2, 2, 'say'),
}
def on_command(self, client, user, command, args):
if command in self.command_map:
self._on_command(client, user, command, args)
return STOP
@defer.inlineCallbacks
def _on_command(self, client, user, command, args):
nickname = client.parse_user(user)[0]
config = self.command_map[command]
if not (yield self.controller.config.has_permission(client.server, None, user, config[3])):
client.msg(nickname, 'You\'re not authorized to do that.')
return
if not config[1] <= len(args) <= config[2]:
client.msg(nickname, 'Invalid syntax. Use: %s %s' % (command, config[0]))
return
getattr(self, '_on_command_%s' % command)(client, user, *args)
def _on_command_join(self, client, user, channel, key=None):
if key is not None:
self.controller.config.update_value('channel', client.server, channel, 'key', key)
client.join(channel, key)
else:
d = self.controller.config.get_value('channel', client.server, channel, 'key')
d.addCallback(lambda channel_key, _: client.join(channel, channel_key))
def _on_command_leave(self, client, user, channel, reason=None):
client.leave(channel, reason if reason is not None else 'Leaving...')
def _on_command_say(self, client, user, channel, message):
client.msg(channel, message)
| mit | Python |
24b10cbfcfb1334a096463a008c2ada4e23b03fe | Fix the tox tool | allmightyspiff/softlayer-python,softlayer/softlayer-python | SoftLayer/CLI/virt/capacity/__init__.py | SoftLayer/CLI/virt/capacity/__init__.py | """Manages Reserved Capacity."""
# :license: MIT, see LICENSE for more details.
import importlib
import os
import click
CONTEXT = {'help_option_names': ['-h', '--help'],
'max_content_width': 999}
class CapacityCommands(click.MultiCommand):
"""Loads module for capacity related commands.
Will automatically replace _ with - where appropriate.
I'm not sure if this is better or worse than using a long list of manual routes, so I'm trying it here.
CLI/virt/capacity/create_guest.py -> slcli vs capacity create-guest
"""
def __init__(self, **attrs):
click.MultiCommand.__init__(self, **attrs)
self.path = os.path.dirname(__file__)
def list_commands(self, ctx):
"""List all sub-commands."""
commands = []
for filename in os.listdir(self.path):
if filename == '__init__.py':
continue
if filename.endswith('.py'):
commands.append(filename[:-3].replace("_", "-"))
commands.sort()
return commands
def get_command(self, ctx, cmd_name):
"""Get command for click."""
path = "%s.%s" % (__name__, cmd_name)
path = path.replace("-", "_")
try:
module = importlib.import_module(path)
return getattr(module, 'cli')
except ModuleNotFoundError as ex:
print(ex.name)
# Required to get the sub-sub-sub command to work.
@click.group(cls=CapacityCommands, context_settings=CONTEXT)
def cli():
"""Base command for all capacity related concerns"""
| """Manages Reserved Capacity."""
# :license: MIT, see LICENSE for more details.
import importlib
import os
import click
CONTEXT = {'help_option_names': ['-h', '--help'],
'max_content_width': 999}
class CapacityCommands(click.MultiCommand):
"""Loads module for capacity related commands.
Will automatically replace _ with - where appropriate.
I'm not sure if this is better or worse than using a long list of manual routes, so I'm trying it here.
CLI/virt/capacity/create_guest.py -> slcli vs capacity create-guest
"""
def __init__(self, **attrs):
click.MultiCommand.__init__(self, **attrs)
self.path = os.path.dirname(__file__)
def list_commands(self, ctx):
"""List all sub-commands."""
commands = []
for filename in os.listdir(self.path):
if filename == '__init__.py':
continue
if filename.endswith('.py'):
commands.append(filename[:-3].replace("_", "-"))
commands.sort()
return commands
def get_command(self, ctx, cmd_name):
"""Get command for click."""
path = "%s.%s" % (__name__, cmd_name)
path = path.replace("-", "_")
try:
module = importlib.import_module(path)
return getattr(module, 'cli')
except ModuleNotFoundError as ex:
print(ex.name)
# Required to get the sub-sub-sub command to work.
@click.group(cls=CapacityCommands, context_settings=CONTEXT)
def cli():
"""Base command for all capacity related concerns"""
| mit | Python |
546bc3d6b25daba9619152183904031bf602f9f2 | test archaeo convnet | reinvantveer/Topology-Learning,reinvantveer/Topology-Learning,reinvantveer/Topology-Learning | model/grid_search.py | model/grid_search.py | import os
import socket
import sys
# import numpy as np
from sklearn.model_selection import ParameterGrid
from topoml_util.slack_send import notify
SCRIPT_NAME = os.path.basename(__file__)
SCRIPT_VERSION = '1.0.1'
SIGNATURE = '{} {} on {}'.format(SCRIPT_NAME, SCRIPT_VERSION, socket.gethostname())
N_TIMES = 1
if len(sys.argv) > 1:
script_name = sys.argv[1]
else: # resort to default, for
# script_name = 'neighborhood_dense.py'
# script_name = 'neighborhood_convnet.py'
# script_name = 'neighborhood_lstm.py'
# script_name = 'building_dense.py'
# script_name = 'building_convnet.py'
# script_name = 'building_lstm.py'
# script_name = 'archaeology_dense.py'
script_name = 'archaeology_convnet.py'
# script_name = 'archaeology_lstm.py'
HYPERPARAMS = {
# 'BATCH_SIZE': [512],
# 'REPEAT_DEEP_ARCH': [1, 2],
# 'KERNEL_SIZE': np.linspace(1, 8, 8, dtype=int),
# 'LSTM_SIZE': np.linspace(64, 128, 3, dtype=int),
# 'DENSE_SIZE': [64],
# 'EPOCHS': [200],
# 'LEARNING_RATE': [1e-3, 8e-4, 6e-4, 4e-4, 2e-4],
'LEARNING_RATE': [1e-2, 8e-3, 6e-3, 4e-3, 2e-3, 1e-3],
# 'GEOM_SCALE': [1e0, 1e-1, 1e-2, 1e-3],
# 'RECURRENT_DROPOUT': [0.0],
# 'PATIENCE': [0, 1, 4, 8, 16, 32],
# 'EARLY_STOPPING': [0],
}
grid = list(ParameterGrid(HYPERPARAMS))
for configuration in grid:
envs = []
# Set environment variables (this allows you to do hyperparam searches from any scripting environment)
for key, value in configuration.items():
os.environ[key] = str(value)
# repeat to get a sense of results spread
for _ in range(N_TIMES):
r_code = os.system('python3 {}'.format(script_name))
if not r_code == 0:
print('Grid search exited with error')
notify(SIGNATURE, 'error')
sys.exit(1)
notify(SIGNATURE, 'success')
print('Grid search {} finished successfully'.format(SIGNATURE))
| import os
import socket
import sys
# import numpy as np
from sklearn.model_selection import ParameterGrid
from topoml_util.slack_send import notify
SCRIPT_NAME = os.path.basename(__file__)
SCRIPT_VERSION = '1.0.1'
SIGNATURE = '{} {} on {}'.format(SCRIPT_NAME, SCRIPT_VERSION, socket.gethostname())
N_TIMES = 1
if len(sys.argv) > 1:
script_name = sys.argv[1]
else: # resort to default, for
# script_name = 'neighborhood_dense.py'
# script_name = 'neighborhood_convnet.py'
# script_name = 'neighborhood_lstm.py'
# script_name = 'building_dense.py'
# script_name = 'building_convnet.py'
# script_name = 'building_lstm.py'
script_name = 'archaeology_dense.py'
# script_name = 'archaeology_convnet.py'
# script_name = 'archaeology_lstm.py'
HYPERPARAMS = {
# 'BATCH_SIZE': [512],
# 'REPEAT_DEEP_ARCH': [1, 2],
# 'KERNEL_SIZE': np.linspace(1, 8, 8, dtype=int),
# 'LSTM_SIZE': np.linspace(64, 128, 3, dtype=int),
# 'DENSE_SIZE': [64],
# 'EPOCHS': [200],
'LEARNING_RATE': [1e-3, 8e-4, 6e-4, 4e-4, 2e-4],
# 'LEARNING_RATE': [1e-2, 8e-3, 4e-3, 2e-3, 1e-3],
# 'GEOM_SCALE': [1e0, 1e-1, 1e-2, 1e-3],
# 'RECURRENT_DROPOUT': [0.0],
# 'PATIENCE': [0, 1, 4, 8, 16, 32],
# 'EARLY_STOPPING': [0],
}
grid = list(ParameterGrid(HYPERPARAMS))
for configuration in grid:
envs = []
# Set environment variables (this allows you to do hyperparam searches from any scripting environment)
for key, value in configuration.items():
os.environ[key] = str(value)
# repeat to get a sense of results spread
for _ in range(N_TIMES):
r_code = os.system('python3 {}'.format(script_name))
if not r_code == 0:
print('Grid search exited with error')
notify(SIGNATURE, 'error')
sys.exit(1)
notify(SIGNATURE, 'success')
print('Grid search {} finished successfully'.format(SIGNATURE))
| mit | Python |
c5f85200315750fbd8fec1fd1eb020cc1216aaaf | Revise time/space complexity | bowen0701/algorithms_data_structures | lc046_permutations.py | lc046_permutations.py | """Leetcode 46. Permutations
Medium
URL: https://leetcode.com/problems/permutations/
Given a collection of distinct integers, return all possible permutations.
Example:
Input: [1,2,3]
Output:
[
[1,2,3],
[1,3,2],
[2,1,3],
[2,3,1],
[3,1,2],
[3,2,1]
]
"""
class Solution(object):
def _backtrack(self, permutations, temps, nums):
if len(temps) == len(nums):
# One of permutations is completed.
permutations.append(temps[:])
return None
for i in range(len(nums)):
# If num i was used, skip it; otherwise add it to temps.
if nums[i] in temps:
continue
temps.append(nums[i])
# Apply DFS by recursion with backtracking.
self._backtrack(permutations, temps, nums)
temps.pop()
def permute(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
Time complexity: O(n * n!), where
- the 1st component, n, is for copying temps,
- the 2nd component, n!, is for permutation.
Space complexity: O(n).
"""
permutations = []
self._backtrack(permutations, [], nums)
return permutations
def main():
nums = [1, 2, 3]
print Solution().permute(nums)
if __name__ == '__main__':
main()
| """Leetcode 46. Permutations
Medium
URL: https://leetcode.com/problems/permutations/
Given a collection of distinct integers, return all possible permutations.
Example:
Input: [1,2,3]
Output:
[
[1,2,3],
[1,3,2],
[2,1,3],
[2,3,1],
[3,1,2],
[3,2,1]
]
"""
class Solution(object):
def _backtrack(self, permutations, temps, nums):
if len(temps) == len(nums):
# One of permutations is completed.
permutations.append(temps[:])
return None
for i in range(len(nums)):
# Constraint: If num i was used, skip it; otherwise add it to temps.
if nums[i] in temps:
continue
temps.append(nums[i])
# Apply DFS by recursion with backtracking.
self._backtrack(permutations, temps, nums)
temps.pop()
def permute(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
Time complexity: O(n!).
Space complexity: O(n).
"""
permutations = []
self._backtrack(permutations, [], nums)
return permutations
def main():
nums = [1, 2, 3]
print Solution().permute(nums)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
d2a8e54aaf40a3c69a18fb0140ea0309fea713ee | Fix typo | harvitronix/rl-rc-car | rl-rc-car/rccar_server.py | rl-rc-car/rccar_server.py | """
This lets us connect to the Pi that controls the RCCar. It acts as an
interface to the RCcar class.
"""
import socket
from rccar import RCCar
class RCCarServer:
def __init__(self, host='', port=8888, size=1024, backlog=5):
print("Setting up server.")
self.size = size
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# self.s.bind((socket.gethostname(), port))
self.s.bind((host, port))
self.s.listen(backlog)
self.car = RCCar(apply_time=0.2, wait_time=0.4)
print("Ready.")
def cleanup_gpio(self):
self.car.cleanup_gpio()
def step(self, action):
self.car.step(action)
def recover(self):
self.car.recover()
if __name__ == '__main__':
car_server = RCCarServer()
while True:
conn, address = car_server.s.accept()
data = conn.recv(car_server.size)
data = data.decode()
print("Received: %s" % data)
conn.close()
if 'step' in data:
action = data.split('-')[1]
car_server.step(action)
elif data == 'cleanup_gpio':
car_server.cleanup_gpio()
elif data == 'recover':
car_server.recover()
| """
This lets us connect to the Pi that controls the RCCar. It acts as an
interface to the RCcar class.
"""
import socket
from rccar import RCCar
class RCCarServer:
def __init__(self, host='', port=8888, size=1024, backlog=5):
print("Setting up server.")
self.size = size
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# self.s.bind((socket.gethostname(), port))
self.s.bind((host, port))
self.s.listen(backlog)
self.car = RCCar(apply_time=0.2, wait_time=0.4)
print("Ready.")
def cleanup_gpio(self):
self.car.cleanup_gpio()
def step(self, action):
self.car.step(action)
def recover(self):
self.car.recover()
if __name__ == '__main__':
car_server = RCCarServer()
while True:
conn, address = car_server.s.accept()
data = conn.recv(car_server.size)
data = data.decode()
print("Received: %s" data)
conn.close()
if 'step' in data:
action = data.split('-')[1]
car_server.step(action)
elif data == 'cleanup_gpio':
car_server.cleanup_gpio()
elif data == 'recover':
car_server.recover()
| mit | Python |
977aeae2bfb8c7c70cd99db7aa23b368e52368b7 | Bump version to 0.11.1 | diefenbach/lfs-theme,diefenbach/lfs-theme | lfs_theme/__init__.py | lfs_theme/__init__.py | __version__ = "0.11.1"
| __version__ = "0.11"
| bsd-3-clause | Python |
615ca7c6ee8964779240656862e030c1d457e0db | use environment variables to set parameters for postgresql | armijnhemel/binaryanalysis | src/bat/batdb.py | src/bat/batdb.py | #!/usr/bin/python
## Binary Analysis Tool
## Copyright 2015 Armijn Hemel for Tjaldur Software Governance Solutions
## Licensed under Apache 2.0, see LICENSE file for details
'''
Abstraction class for BAT databases. Currently supported: sqlite3, postgresql
'''
import os.path
class BatDb():
def __init__(self, dbbackend):
self.conn = None
self.dbbackend = dbbackend
def getConnection(self, database, scanenv={}):
if self.dbbackend == 'sqlite3':
## check if the database file exists
if not os.path.exists(database):
return
import sqlite3
self.conn = sqlite3.connect(database)
self.conn.text_factory = str
elif self.dbbackend == 'postgresql':
import psycopg2
if not 'POSTGRESQL_USER' in scanenv:
return
if not 'POSTGRESQL_PASSWORD' in scanenv:
return
if not 'POSTGRESQL_DB' in scanenv:
return
try:
self.conn = psycopg2.connect("dbname=%s user=%s password=%s" % (scanenv['POSTGRESQL_DB'],scanenv['POSTGRESQL_USER'],scanenv['POSTGRESQL_PASSWORD']))
except Exception, e:
print e
return
return self.conn
| #!/usr/bin/python
## Binary Analysis Tool
## Copyright 2015 Armijn Hemel for Tjaldur Software Governance Solutions
## Licensed under Apache 2.0, see LICENSE file for details
'''
Abstraction class for BAT databases. Currently supported: sqlite3, postgresql
'''
import os.path
class BatDb():
def __init__(self, dbbackend):
self.conn = None
self.dbbackend = dbbackend
def getConnection(self, database):
if self.dbbackend == 'sqlite3':
## check if the database file exists
if not os.path.exists(database):
return
import sqlite3
self.conn = sqlite3.connect(database)
self.conn.text_factory = str
elif self.dbbackend == 'postgresql':
import psycopg2
## TODO: use environment variables for this instead of hardcoding
try:
self.conn = psycopg2.connect("dbname=bat user=bat password=bat")
except Exception, e:
return
return self.conn
| apache-2.0 | Python |
0038ad7cf15a1cb9cd29bc446e91e5cc0144d3c9 | Update tests_output.py | RonsenbergVI/trendpy,RonsenbergVI/trendpy | trendpy/tests/tests_output.py | trendpy/tests/tests_output.py | # -*- coding: utf-8 -*-
# tests_output.py
# MIT License
# Copyright (c) 2017 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(os.path.dirname(current_dir))
sys.path.insert(0,parent_dir)
import trendpy.output
class TestsOutput(unittest.testcase):
def setUp(self):
pass
def tearDown(self):
pass
| # -*- coding: utf-8 -*-
# tests_output.py
# MIT License
# Copyright (c) 2017 Rene Jean Corneille
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import sys
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(os.path.dirname(current_dir))
sys.path.insert(0,parent_dir)
import trendpy.output
class TestsOutputs(unittest.testcase):
def setUp(self):
pass
def tearDown(self):
pass
| mit | Python |
8e16ce1605201187e3618a2f3650951ac40e188e | Make it keyword-only. | mwchase/class-namespaces,mwchase/class-namespaces | class_namespaces/inspector.py | class_namespaces/inspector.py | """Inspector.
Wrapper around objects, helps expose protocols.
"""
import collections
class _Inspector(collections.namedtuple('_Inspector', ['object', 'dict'])):
"""Wrapper around objects. Provides access to protocold."""
__slots__ = ()
def __new__(cls, obj, *, mro):
dct = collections.ChainMap(*[vars(cls) for cls in mro])
return super().__new__(cls, obj, dct)
def get_as_attribute(self, key):
"""Return attribute with the given name, or raise AttributeError."""
try:
return self.dict[key]
# These lines will execute if an incomplete data descriptor is used for
# the operation it doesn't define.
except KeyError:
raise AttributeError(key)
| """Inspector.
Wrapper around objects, helps expose protocols.
"""
import collections
class _Inspector(collections.namedtuple('_Inspector', ['object', 'dict'])):
"""Wrapper around objects. Provides access to protocold."""
__slots__ = ()
def __new__(cls, obj, mro):
dct = collections.ChainMap(*[vars(cls) for cls in mro])
return super().__new__(cls, obj, dct)
def get_as_attribute(self, key):
"""Return attribute with the given name, or raise AttributeError."""
try:
return self.dict[key]
# These lines will execute if an incomplete data descriptor is used for
# the operation it doesn't define.
except KeyError:
raise AttributeError(key)
| mit | Python |
b6451ef1f00bdfe1ed2cbbc88526fee4cb50e6ca | Remove signxml requirement (#1081) | cloudify-cosmo/cloudify-manager,cloudify-cosmo/cloudify-manager,cloudify-cosmo/cloudify-manager | rest-service/setup.py | rest-service/setup.py | ########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from setuptools import setup, find_packages
install_requires = [
'Flask==0.10.1',
'flask-restful==0.2.5',
'flask-restful-swagger==0.12',
'flask-sqlalchemy==2.1',
'flask-security==1.7.5',
'flask-migrate==2.0.3',
'ldappy',
'supervise==1.1.1',
'cloudify-dsl-parser==4.3.dev1',
'requests>=2.7.0,<3.0.0',
'gunicorn==18.0',
'PyYAML==3.10',
'elasticsearch==1.6.0',
'celery==3.1.17',
'psutil==3.3.0',
'jsonpickle==0.9.2',
'wagon[venv]==0.6.1',
'python-dateutil==2.5.3',
'voluptuous==0.9.3',
'toolz==0.8.2',
'pyrabbit==1.1.0',
]
if os.environ.get('REST_SERVICE_BUILD'):
# since psycopg2 installation require postgres,
# we're adding it only to the build process,
# where we know there is postgresql..
# tests will use pg8000, which doesn't require postgres
install_requires.append('psycopg2==2.6.2')
setup(
name='cloudify-rest-service',
version='4.3.dev1',
author='Gigaspaces',
author_email='cosmo-admin@gigaspaces.com',
packages=find_packages(
include='manager_rest*', exclude=('manager_rest.test*',)
),
package_data={'manager_rest': ['VERSION']},
license='LICENSE',
description='Cloudify manager rest service',
zip_safe=False,
install_requires=install_requires,
extras_require={
'dbus': ['dbus-python==1.2.4'],
},
)
| ########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import os
from setuptools import setup, find_packages
install_requires = [
'Flask==0.10.1',
'flask-restful==0.2.5',
'flask-restful-swagger==0.12',
'flask-sqlalchemy==2.1',
'flask-security==1.7.5',
'flask-migrate==2.0.3',
'ldappy',
'supervise==1.1.1',
'cloudify-dsl-parser==4.3.dev1',
'requests>=2.7.0,<3.0.0',
'gunicorn==18.0',
'PyYAML==3.10',
'elasticsearch==1.6.0',
'celery==3.1.17',
'psutil==3.3.0',
'jsonpickle==0.9.2',
'wagon[venv]==0.6.1',
'python-dateutil==2.5.3',
'voluptuous==0.9.3',
'toolz==0.8.2',
'pyrabbit==1.1.0',
'signxml==2.4.0',
]
if os.environ.get('REST_SERVICE_BUILD'):
# since psycopg2 installation require postgres,
# we're adding it only to the build process,
# where we know there is postgresql..
# tests will use pg8000, which doesn't require postgres
install_requires.append('psycopg2==2.6.2')
setup(
name='cloudify-rest-service',
version='4.3.dev1',
author='Gigaspaces',
author_email='cosmo-admin@gigaspaces.com',
packages=find_packages(
include='manager_rest*', exclude=('manager_rest.test*',)
),
package_data={'manager_rest': ['VERSION']},
license='LICENSE',
description='Cloudify manager rest service',
zip_safe=False,
install_requires=install_requires,
extras_require={
'dbus': ['dbus-python==1.2.4'],
},
)
| apache-2.0 | Python |
7917dadd98207688fb1c40451310c953827c7589 | Add comments and contour line label to the decision plot | eliben/deep-learning-samples,eliben/deep-learning-samples | logistic-regression/plot_binary_decision.py | logistic-regression/plot_binary_decision.py | # Helper code to plot a binary decision region.
#
# Eli Bendersky (http://eli.thegreenplace.net)
# This code is in the public domain
from __future__ import print_function
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# Our input is (x,y) -- 2D. Output is the scalar y^ computed by the
# dot-product of (1, x, y) with theta (1 is for the bias, and theta_0 is the
# bias parameter). This is a plane equation in 3D.
# Therefore, the plot we aim to produce is 3D -- some scalar as a function
# of two parameters. The contours are then the equi-value lines of the 3D
# plot, and we're only interested in the main contour at value 0 -- meaning
# the line where the plane intersects the x/y plane.
#
# Note: if we flip all values here we get the same intersection.
theta = np.array([[-4], [0.5], [1]])
fig, ax = plt.subplots()
fig.set_tight_layout(True)
xs = np.linspace(-4, 8, 200)
ys = np.linspace(-4, 8, 200)
xsgrid, ysgrid = np.meshgrid(xs, ys)
plane = np.zeros_like(xsgrid)
for i in range(xsgrid.shape[0]):
for j in range(xsgrid.shape[1]):
plane[i, j] = np.array([1, xsgrid[i, j], ysgrid[i, j]]).dot(theta)
cs = ax.contour(xsgrid, ysgrid, plane, levels=[0])
cs.clabel(inline=1)
ax.grid(True)
ax.annotate(r'here $\hat{y}(x) > 0$', xy=(4, 4), fontsize=20)
ax.annotate(r'here $\hat{y}(x) < 0$', xy=(0, 0), fontsize=20)
fig.savefig('line.png', dpi=80)
plt.show()
| # Helper code to plot a binary decision region.
#
# Eli Bendersky (http://eli.thegreenplace.net)
# This code is in the public domain
from __future__ import print_function
import matplotlib.pyplot as plt
import numpy as np
if __name__ == '__main__':
# Note: if we flip all values here we get the same intersection.
theta = np.array([[-4], [0.5], [1]])
fig, ax = plt.subplots()
fig.set_tight_layout(True)
xs = np.linspace(-4, 8, 200)
ys = np.linspace(-4, 8, 200)
xsgrid, ysgrid = np.meshgrid(xs, ys)
plane = np.zeros_like(xsgrid)
for i in range(xsgrid.shape[0]):
for j in range(xsgrid.shape[1]):
plane[i, j] = np.array([1, xsgrid[i, j], ysgrid[i, j]]).dot(theta)
ax.contour(xsgrid, ysgrid, plane, levels=[0])
ax.grid(True)
ax.annotate(r'here $\hat{y}(x) > 0$', xy=(4, 4), fontsize=20)
ax.annotate(r'here $\hat{y}(x) < 0$', xy=(0, 0), fontsize=20)
fig.savefig('line.png', dpi=80)
plt.show()
| unlicense | Python |
8b27fd2b172373dc8fe59449076b01c332676cf8 | Update comment to match example code | matplotlib/basemap,matplotlib/basemap,guziy/basemap,guziy/basemap | doc/users/figures/contour1.py | doc/users/figures/contour1.py | from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
# set up orthographic map projection with
# perspective of satellite looking down at 45N, 100W.
# use low resolution coastlines.
map = Basemap(projection='ortho',lat_0=45,lon_0=-100,resolution='l')
# draw coastlines, country boundaries, fill continents.
map.drawcoastlines(linewidth=0.25)
map.drawcountries(linewidth=0.25)
map.fillcontinents(color='coral',lake_color='aqua')
# draw the edge of the map projection region (the projection limb)
map.drawmapboundary(fill_color='aqua')
# draw lat/lon grid lines every 30 degrees.
map.drawmeridians(np.arange(0,360,30))
map.drawparallels(np.arange(-90,90,30))
# make up some data on a regular lat/lon grid.
nlats = 73; nlons = 145; delta = 2.*np.pi/(nlons-1)
lats = (0.5*np.pi-delta*np.indices((nlats,nlons))[0,:,:])
lons = (delta*np.indices((nlats,nlons))[1,:,:])
wave = 0.75*(np.sin(2.*lats)**8*np.cos(4.*lons))
mean = 0.5*np.cos(2.*lats)*((np.sin(2.*lats))**2 + 2.)
# compute native map projection coordinates of lat/lon grid.
x, y = map(lons*180./np.pi, lats*180./np.pi)
# contour data over the map.
cs = map.contour(x,y,wave+mean,15,linewidths=1.5)
plt.title('contour lines over filled continent background')
plt.show()
| from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
# set up orthographic map projection with
# perspective of satellite looking down at 50N, 100W.
# use low resolution coastlines.
map = Basemap(projection='ortho',lat_0=45,lon_0=-100,resolution='l')
# draw coastlines, country boundaries, fill continents.
map.drawcoastlines(linewidth=0.25)
map.drawcountries(linewidth=0.25)
map.fillcontinents(color='coral',lake_color='aqua')
# draw the edge of the map projection region (the projection limb)
map.drawmapboundary(fill_color='aqua')
# draw lat/lon grid lines every 30 degrees.
map.drawmeridians(np.arange(0,360,30))
map.drawparallels(np.arange(-90,90,30))
# make up some data on a regular lat/lon grid.
nlats = 73; nlons = 145; delta = 2.*np.pi/(nlons-1)
lats = (0.5*np.pi-delta*np.indices((nlats,nlons))[0,:,:])
lons = (delta*np.indices((nlats,nlons))[1,:,:])
wave = 0.75*(np.sin(2.*lats)**8*np.cos(4.*lons))
mean = 0.5*np.cos(2.*lats)*((np.sin(2.*lats))**2 + 2.)
# compute native map projection coordinates of lat/lon grid.
x, y = map(lons*180./np.pi, lats*180./np.pi)
# contour data over the map.
cs = map.contour(x,y,wave+mean,15,linewidths=1.5)
plt.title('contour lines over filled continent background')
plt.show()
| mit | Python |
82a5b0262c3f8d2dd16a860b7d4689b76969b98a | bump version to 0.8.2 | ivilata/pymultihash | multihash/version.py | multihash/version.py | __version__ = '0.8.2'
| __version__ = '0.9.0.dev1'
| mit | Python |
d5f75398a80d466c5638b8af5ad254eb6229cd87 | Apply homography | superquadratic/beat-bricks | lego.py | lego.py | import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.homography = None
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index] = [x, y]
if self.has_roi():
self.compute_homography()
def has_roi(self):
return self.rect_index == 3
def compute_homography(self):
src_points = self.rect
dst_points = np.zeros_like(src_points)
dst_points[1][0] = 512
dst_points[2] = [512, 512]
dst_points[3][1] = 512
self.homography = cv2.findHomography(src_points, dst_points)[0]
def process_frame(self, frame):
if self.homography is not None:
return cv2.warpPerspective(frame, self.homography, (512, 512))
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
| import numpy as np
import cv2
WINDOW_NAME = 'hello'
def global_on_mouse(event, x, y, unknown, lego_player):
lego_player.on_mouse(event, x, y)
class LegoPlayer(object):
def __init__(self):
self.rect = np.empty((4, 2))
self.rect_index = -1
cv2.namedWindow(WINDOW_NAME)
cv2.setMouseCallback(WINDOW_NAME, global_on_mouse, self)
self.capture = cv2.VideoCapture(0)
def on_mouse(self, event, x, y):
if event == cv2.EVENT_LBUTTONDOWN:
if self.has_roi():
self.rect_index = -1
self.rect_index += 1
self.rect[self.rect_index] = [x, y]
if self.has_roi():
self.compute_homography()
def has_roi(self):
return self.rect_index == 3
def compute_homography(self):
src_points = self.rect
dst_points = np.zeros_like(src_points)
dst_points[1][0] = 512
dst_points[2] = [512, 512]
dst_points[3][1] = 512
print cv2.findHomography(src_points, dst_points)
def process_frame(self, frame):
return cv2.split(frame)[2]
def loop(self):
while True:
success, frame = self.capture.read()
result = self.process_frame(frame)
cv2.imshow(WINDOW_NAME, result)
cv2.waitKey(10)
if __name__ == '__main__':
lego_player = LegoPlayer()
lego_player.loop()
| mit | Python |
64d3bc5190ec101a59891c6a085056bc6a780ff4 | remove unnecessary import | CyberReboot/vcontrol,cglewis/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol | vcontrol/rest/commands/plugins/remove.py | vcontrol/rest/commands/plugins/remove.py | from ...helpers import get_allowed
import ast
import json
import subprocess
import web
class RemovePluginCommandR:
"""
This endpoint is for removing a new plugin repository on a Vent machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def OPTIONS(self):
return self.POST()
def POST(self):
web.header('Access-Control-Allow-Origin', self.allow_origin)
data = web.data()
payload = {}
try:
payload = ast.literal_eval(data)
if type(payload) != dict:
payload = ast.literal_eval(json.loads(data))
except:
return "malformed json body"
try:
if "machine" in payload.keys():
if "url" in payload.keys():
url = payload["url"]
cmd = "/usr/local/bin/docker-machine ssh "+payload["machine"]+" \"python2.7 /data/plugin_parser.py remove_plugins "+url+"\""
output = subprocess.check_output(cmd, shell=True)
if output == "":
output = "successfully removed "+url
else:
output = "failed to remove plugin -- no url specified"
else:
output = "failed to remove plugin -- no machine specified"
except Exception as e:
output = str(e)
return output
| from ...helpers import get_allowed
import ast
import json
import os
import subprocess
import web
class RemovePluginCommandR:
"""
This endpoint is for removing a new plugin repository on a Vent machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def OPTIONS(self):
return self.POST()
def POST(self):
web.header('Access-Control-Allow-Origin', self.allow_origin)
data = web.data()
payload = {}
try:
payload = ast.literal_eval(data)
if type(payload) != dict:
payload = ast.literal_eval(json.loads(data))
except:
return "malformed json body"
try:
if "machine" in payload.keys():
if "url" in payload.keys():
url = payload["url"]
cmd = "/usr/local/bin/docker-machine ssh "+payload["machine"]+" \"python2.7 /data/plugin_parser.py remove_plugins "+url+"\""
output = subprocess.check_output(cmd, shell=True)
if output == "":
output = "successfully removed "+url
else:
output = "failed to remove plugin -- no url specified"
else:
output = "failed to remove plugin -- no machine specified"
except Exception as e:
output = str(e)
return output
| apache-2.0 | Python |
6fe5a416ed229e7ec8efab9d6b3dac43f16515b6 | Add the new domains db | dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/domain/__init__.py | corehq/apps/domain/__init__.py | from corehq.preindex import ExtraPreindexPlugin
from django.conf import settings
ExtraPreindexPlugin.register('domain', __file__, (
settings.NEW_DOMAINS_DB,
settings.NEW_USERS_GROUPS_DB,
settings.NEW_FIXTURES_DB,
'meta',
))
| from corehq.preindex import ExtraPreindexPlugin
from django.conf import settings
ExtraPreindexPlugin.register('domain', __file__, (
settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta'))
| bsd-3-clause | Python |
1de53534c48d1eecc7fea5d2040977afd97dacb2 | Make sure AVAILABLE_LAYOUTS is a tuple | ghisvail/vispy,drufat/vispy,drufat/vispy,Eric89GXL/vispy,ghisvail/vispy,michaelaye/vispy,Eric89GXL/vispy,drufat/vispy,michaelaye/vispy,ghisvail/vispy,Eric89GXL/vispy,michaelaye/vispy | vispy/visuals/graphs/layouts/__init__.py | vispy/visuals/graphs/layouts/__init__.py | import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold,
'spring_layout': fruchterman_reingold
}
AVAILABLE_LAYOUTS = tuple(_layout_map.keys())
def get_layout(name, *args, **kwargs):
"""
Retrieve a graph layout
Some graph layouts accept extra options. Please refer to their
documentation for more information.
Parameters
----------
name : string
The name of the layout. The variable `AVAILABLE_LAYOUTS`
contains all available layouts.
*args
Positional arguments which are passed to the layout.
**kwargs
Keyword arguments which are passed to the layout.
Returns
-------
layout : callable
The callable generator which will calculate the graph layout
"""
if name not in _layout_map:
raise KeyError(
"Graph layout '{}' not found. Should be one of {}".format(
name, ", ".join(AVAILABLE_LAYOUTS)
)
)
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| import inspect
from .random import random
from .circular import circular
from .force_directed import fruchterman_reingold
_layout_map = {
'random': random,
'circular': circular,
'force_directed': fruchterman_reingold,
'spring_layout': fruchterman_reingold
}
AVAILABLE_LAYOUTS = _layout_map.keys()
def get_layout(name, *args, **kwargs):
"""
Retrieve a graph layout
Some graph layouts accept extra options. Please refer to their
documentation for more information.
Parameters
----------
name : string
The name of the layout. The variable `AVAILABLE_LAYOUTS`
contains all available layouts.
*args
Positional arguments which are passed to the layout.
**kwargs
Keyword arguments which are passed to the layout.
Returns
-------
layout : callable
The callable generator which will calculate the graph layout
"""
if name not in _layout_map:
raise KeyError(
"Graph layout '{}' not found. Should be one of {}".format(
name, ", ".join(AVAILABLE_LAYOUTS)
)
)
layout = _layout_map[name]
if inspect.isclass(layout):
layout = layout(*args, **kwargs)
return layout
| bsd-3-clause | Python |
023e3cd0a2ef5717c5923c986690c28d070bdaaa | Update 0001.py | rusia-rak/My-Solutions-For-Show-Me-The-Code | rusia-rak/0001/0001.py | rusia-rak/0001/0001.py | #!/usr/bin/env python3
# This script generates 200 codes of length 10 and writes them to file result.txt
# Number varies in range of 0 to 9 inclusive.
import random
f = open("result.txt", 'w')
for _ in range(200):
code = ''
for _ in range(10):
num = random.randint(0,9)
code += str(num)
f.write(code + '\n')
f.close()
| #!/usr/bin/env python3
# This script generates 200 codes of length 10 and writes them to file results.txt
# Number varies in range of 0 to 9 inclusive.
import random
f = open("result.txt", 'w')
for _ in range(200):
code = ''
for _ in range(10):
num = random.randint(0,9)
code += str(num)
f.write(code + '\n')
f.close()
| mpl-2.0 | Python |
742b35fad64fbf2019877b75bcc92af71ad0bd5a | Fix minor issue with linux install | hamnox/dotfiles,lahwran/dotfiles,lahwran/dotfiles | dotfiles/os_specific/linux.py | dotfiles/os_specific/linux.py | from dotfiles import wrap_process
debian_mapping = {
"pip": "python-pip",
"ntp-daemon": "ntp"
}
def install_packages(packages):
deps = [debian_mapping.get(package, package) for package in packages]
wrap_process.call("apt-get", ["apt-get", "install", "-y"] + deps)
| from dotfiles import wrap_process
debian_mapping = {
"git": "git",
"vim": "vim",
"pip": "python-pip",
"fail2ban": "fail2ban",
"build-essential": "build-essential",
"python-dev": "python-dev",
"ntp-daemon": "ntp"
}
def install_packages(packages):
deps = [debian_mapping[package] for package in packages]
wrap_process.call("apt-get", ["apt-get", "install", "-y"] + deps)
| mit | Python |
e12432b0c97d1ddebf16df821fe6c77bb8b6a66b | Move Sites to the settings menu (and use decorator syntax for hooks) | mixxorz/wagtail,wagtail/wagtail,KimGlazebrook/wagtail-experiment,gasman/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,jnns/wagtail,serzans/wagtail,hanpama/wagtail,iho/wagtail,marctc/wagtail,kurtw/wagtail,nilnvoid/wagtail,nrsimha/wagtail,gasman/wagtail,jorge-marques/wagtail,Toshakins/wagtail,rsalmaso/wagtail,takeflight/wagtail,Tivix/wagtail,chimeno/wagtail,nilnvoid/wagtail,hanpama/wagtail,torchbox/wagtail,takeflight/wagtail,marctc/wagtail,hanpama/wagtail,chimeno/wagtail,iho/wagtail,taedori81/wagtail,benjaoming/wagtail,takeshineshiro/wagtail,bjesus/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,Tivix/wagtail,torchbox/wagtail,Klaudit/wagtail,rv816/wagtail,m-sanders/wagtail,nealtodd/wagtail,kaedroho/wagtail,FlipperPA/wagtail,chrxr/wagtail,mayapurmedia/wagtail,nrsimha/wagtail,WQuanfeng/wagtail,zerolab/wagtail,KimGlazebrook/wagtail-experiment,benjaoming/wagtail,janusnic/wagtail,taedori81/wagtail,tangentlabs/wagtail,iho/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,mephizzle/wagtail,thenewguy/wagtail,Pennebaker/wagtail,mikedingjan/wagtail,gasman/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,timorieber/wagtail,bjesus/wagtail,m-sanders/wagtail,stevenewey/wagtail,kurtrwall/wagtail,marctc/wagtail,darith27/wagtail,inonit/wagtail,mixxorz/wagtail,chrxr/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,JoshBarr/wagtail,nutztherookie/wagtail,davecranwell/wagtail,mephizzle/wagtail,KimGlazebrook/wagtail-experiment,Tivix/wagtail,jordij/wagtail,janusnic/wagtail,mayapurmedia/wagtail,rjsproxy/wagtail,hamsterbacke23/wagtail,mixxorz/wagtail,davecranwell/wagtail,jorge-marques/wagtail,gasman/wagtail,davecranwell/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,dresiu/wagtail,chimeno/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,nealtodd/wagtail,mjec/wagtail,dresiu/wagtail,serzans/wagtail,quru/wagtail,jnns/wagtail,nimasmi/wagtail,tangentlabs/wagtail,kaedroho/wagtail,bjesus/wagtail,takeshineshiro/wagtail,takeshineshiro/wagtail,nutztherookie/wagtail,dresiu/wagtail,rjsproxy/wagtail,torchbox/wagtail,kaedroho/wagtail,nutztherookie/wagtail,rv816/wagtail,kurtw/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,timorieber/wagtail,quru/wagtail,Pennebaker/wagtail,kurtw/wagtail,jordij/wagtail,dresiu/wagtail,zerolab/wagtail,serzans/wagtail,Pennebaker/wagtail,thenewguy/wagtail,iho/wagtail,iansprice/wagtail,Toshakins/wagtail,gasman/wagtail,stevenewey/wagtail,m-sanders/wagtail,JoshBarr/wagtail,zerolab/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,inonit/wagtail,thenewguy/wagtail,rjsproxy/wagtail,wagtail/wagtail,taedori81/wagtail,jorge-marques/wagtail,jnns/wagtail,chrxr/wagtail,Klaudit/wagtail,iansprice/wagtail,nealtodd/wagtail,takeflight/wagtail,Klaudit/wagtail,takeflight/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,Klaudit/wagtail,FlipperPA/wagtail,Toshakins/wagtail,chimeno/wagtail,timorieber/wagtail,quru/wagtail,gogobook/wagtail,mixxorz/wagtail,rv816/wagtail,stevenewey/wagtail,nilnvoid/wagtail,jnns/wagtail,mikedingjan/wagtail,rjsproxy/wagtail,nimasmi/wagtail,darith27/wagtail,mayapurmedia/wagtail,takeshineshiro/wagtail,nealtodd/wagtail,wagtail/wagtail,bjesus/wagtail,mjec/wagtail,janusnic/wagtail,jordij/wagtail,thenewguy/wagtail,marctc/wagtail,rsalmaso/wagtail,zerolab/wagtail,janusnic/wagtail,kurtw/wagtail,quru/wagtail,inonit/wagtail,chrxr/wagtail,gogobook/wagtail,rv816/wagtail,FlipperPA/wagtail,darith27/wagtail,benjaoming/wagtail,taedori81/wagtail,davecranwell/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,hanpama/wagtail,rsalmaso/wagtail,inonit/wagtail,nimasmi/wagtail,WQuanfeng/wagtail,stevenewey/wagtail,thenewguy/wagtail,taedori81/wagtail,m-sanders/wagtail,Toshakins/wagtail,gogobook/wagtail,iansprice/wagtail,JoshBarr/wagtail,chimeno/wagtail,jordij/wagtail,nrsimha/wagtail,Tivix/wagtail,jorge-marques/wagtail,WQuanfeng/wagtail,darith27/wagtail,mephizzle/wagtail,mephizzle/wagtail,JoshBarr/wagtail,nrsimha/wagtail,gogobook/wagtail,iansprice/wagtail,benjaoming/wagtail,mjec/wagtail,mjec/wagtail,mikedingjan/wagtail,torchbox/wagtail,rsalmaso/wagtail,kurtrwall/wagtail,dresiu/wagtail,serzans/wagtail,KimGlazebrook/wagtail-experiment | wagtail/wagtailsites/wagtail_hooks.py | wagtail/wagtailsites/wagtail_hooks.py | from django.conf.urls import include, url
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore import hooks
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailsites import urls
@hooks.register('register_admin_urls')
def register_admin_urls():
return [
url(r'^sites/', include(urls)),
]
class SitesMenuItem(MenuItem):
def is_shown(self, request):
return request.user.is_superuser
@hooks.register('register_settings_menu_item')
def register_sites_menu_item():
return MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
| from django.conf.urls import include, url
from django.core import urlresolvers
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore import hooks
from wagtail.wagtailadmin.menu import MenuItem
from wagtail.wagtailsites import urls
def register_admin_urls():
return [
url(r'^sites/', include(urls)),
]
hooks.register('register_admin_urls', register_admin_urls)
def construct_main_menu(request, menu_items):
if request.user.is_superuser:
menu_items.append(
MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
)
hooks.register('construct_main_menu', construct_main_menu)
| bsd-3-clause | Python |
89b567c199ccf4835afa90ac9370191038918ed2 | Create method to open and release a connection from pyramid to the db. | bm5w/learning-journal,bm5w/learning-journal | journal.py | journal.py | # -*- coding: utf-8 -*-
import os
import logging
from pyramid.config import Configurator
from pyramid.session import SignedCookieSessionFactory
from pyramid.view import view_config
from waitress import serve
import psycopg2
from contextlib import closing
from pyramid.events import NewRequest, subscriber
DB_SCHEMA = """
CREATE TABLE IF NOT EXISTS entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
# add this just below the SQL table definition we just created
logging.basicConfig()
log = logging.getLogger(__file__)
@view_config(route_name='home', renderer='string')
def home(request):
return "Hello World"
# connect to the db
def connect_db(settings):
"""Return a connection to the configured database"""
return psycopg2.connect(settings['db'])
# a function to initialize db
def init_db():
"""Create database dables defined by DB_SCHEMA
Warning: This function will not update existing table definitions
"""
settings = {}
settings['db'] = os.environ.get(
'DATABASE_URL', 'dbname=learning-journal user=mark'
)
with closing(connect_db(settings)) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@subscriber(NewRequest)
def open_connection(event):
request = event.request
settings = request.registry.settings
request.db = connect_db(settings)
request.add_finished_callback(close_connection)
def close_connection(request):
"""close the database connection for this request
If there has been an error in the processing of the request, abort any
open transactions.
"""
db = getattr(request, 'db', None)
if db is not None:
if request.exception is not None:
db.rollback()
else:
db.commit()
request.db.close()
def main():
"""Create a configured wsgi app"""
settings = {}
settings['reload_all'] = os.environ.get('DEBUG', True)
settings['debug_all'] = os.environ.get('DEBUG', True)
settings['db'] = os.environ.get(
'DATABASE_URL', 'dbname=learning-journal user=mark'
)
# secret value for session signing:
secret = os.environ.get('JOURNAL_SESSION_SECRET', 'itsaseekrit')
session_factory = SignedCookieSessionFactory(secret)
# configuration setup
config = Configurator(
settings=settings,
session_factory=session_factory
)
config.add_route('home', '/')
config.scan()
app = config.make_wsgi_app()
return app
if __name__ == '__main__':
app = main()
port = os.environ.get('PORT', 5000)
serve(app, host='0.0.0.0', port=port)
| # -*- coding: utf-8 -*-
import os
import logging
from pyramid.config import Configurator
from pyramid.session import SignedCookieSessionFactory
from pyramid.view import view_config
from waitress import serve
import psycopg2
from contextlib import closing
DB_SCHEMA = """
CREATE TABLE IF NOT EXISTS entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
# add this just below the SQL table definition we just created
logging.basicConfig()
log = logging.getLogger(__file__)
@view_config(route_name='home', renderer='string')
def home(request):
return "Hello World"
# connect to the db
def connect_db(settings):
"""Return a connection to the configured database"""
return psycopg2.connect(settings['db'])
# a function to initialize db
def init_db():
"""Create database dables defined by DB_SCHEMA
Warning: This function will not update existing table definitions
"""
settings = {}
settings['db'] = os.environ.get(
'DATABASE_URL', 'dbname=learning-journal user=mark'
)
with closing(connect_db(settings)) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def main():
"""Create a configured wsgi app"""
settings = {}
settings['reload_all'] = os.environ.get('DEBUG', True)
settings['debug_all'] = os.environ.get('DEBUG', True)
settings['db'] = os.environ.get(
'DATABASE_URL', 'dbname=learning-journal user=mark'
)
# secret value for session signing:
secret = os.environ.get('JOURNAL_SESSION_SECRET', 'itsaseekrit')
session_factory = SignedCookieSessionFactory(secret)
# configuration setup
config = Configurator(
settings=settings,
session_factory=session_factory
)
config.add_route('home', '/')
config.scan()
app = config.make_wsgi_app()
return app
if __name__ == '__main__':
app = main()
port = os.environ.get('PORT', 5000)
serve(app, host='0.0.0.0', port=port)
| mit | Python |
4103384be8a7407c92711bf2f6652cc36a0efbe4 | Add doc string to cmd module | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/functions/cmd.py | salt/functions/cmd.py | '''
Module for shelling out commands, inclusion of this module should be
configurable for security reasons
'''
| apache-2.0 | Python | |
377dcc76ebd986154070a220b2b035b275085090 | Change server error email domain. | bboe/update_checker_app | update_checker_app/helpers.py | update_checker_app/helpers.py | """Defines various one-off helpers for the package."""
from functools import wraps
from time import time
import requests
def configure_logging(app):
"""Send ERROR log to ADMINS emails."""
ADMINS = ['bbzbryce@gmail.com']
if not app.debug:
import logging
from logging.handlers import SMTPHandler
mail_handler = SMTPHandler(
'127.0.0.1', 'server-error@updatechecker.bryceboe.com',
ADMINS, 'UpdateChecker Failed')
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
def package_cache(function):
"""Memoize the wrapped function."""
CACHE_TIME = 600
stored = {}
@wraps(function)
def wrapped(package_name):
now = time()
if package_name in stored:
updated, data = stored[package_name]
if now < updated + CACHE_TIME:
return data
data = function(package_name)
stored[package_name] = (now, data)
return data
return wrapped
@package_cache
def get_current_version(package):
"""Return information about the current version of package."""
r = requests.get('http://pypi.python.org/pypi/{0}/json'.format(package))
if r.status_code != 200:
return {'success': False}
upload_time = None
json_data = r.json()
for file_info in json_data['urls']:
if file_info['packagetype'] == 'sdist':
upload_time = file_info['upload_time']
return {'success': True, 'data': {'upload_time': upload_time,
'version': json_data['info']['version']}}
| """Defines various one-off helpers for the package."""
from functools import wraps
from time import time
import requests
def configure_logging(app):
"""Send ERROR log to ADMINS emails."""
ADMINS = ['bbzbryce@gmail.com']
if not app.debug:
import logging
from logging.handlers import SMTPHandler
mail_handler = SMTPHandler(
'127.0.0.1', 'server-error@update_checker.bryceboe.com',
ADMINS, 'UpdateChecker Failed')
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
def package_cache(function):
"""Memoize the wrapped function."""
CACHE_TIME = 600
stored = {}
@wraps(function)
def wrapped(package_name):
now = time()
if package_name in stored:
updated, data = stored[package_name]
if now < updated + CACHE_TIME:
return data
data = function(package_name)
stored[package_name] = (now, data)
return data
return wrapped
@package_cache
def get_current_version(package):
"""Return information about the current version of package."""
r = requests.get('http://pypi.python.org/pypi/{0}/json'.format(package))
if r.status_code != 200:
return {'success': False}
upload_time = None
json_data = r.json()
for file_info in json_data['urls']:
if file_info['packagetype'] == 'sdist':
upload_time = file_info['upload_time']
return {'success': True, 'data': {'upload_time': upload_time,
'version': json_data['info']['version']}}
| bsd-2-clause | Python |
85db19ec07f70ed6fdc19b5c5eecfb45eedded28 | Update Adafruit16CServoDriverPi.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/Mats/Adafruit16CServoDriverPi.py | home/Mats/Adafruit16CServoDriverPi.py | webgui = Runtime.createAndStart("WebGui","WebGui")
# Create and start the RasPi
raspi = Runtime.createAndStart("RasPi","RasPi")
# Start the Adafruit16CServidriver that can be used for all PCA9685 devices
# and connect it to the Arduino i2c interface using the default bus and
# address
adaFruit16c = Runtime.createAndStart("AdaFruit16C","Adafruit16CServoDriver")
adaFruit16c.setController("RasPi")
# create a new servo
thumb = Runtime.createAndStart("Thumb", "Servo")
elbow = Runtime.createAndStart("Elbow", "Servo")
# attach it to the pwm board - pin 3 & 8
thumb.attach(adaFruit16c,3)
elbow.attach(adaFruit16c,8)
# When this script has been executed you should be able to
# move the servos using the GUI or using python
| webgui = Runtime.createAndStart("WebGui","WebGui")
# Create and start the RasPi
raspi = Runtime.createAndStart("RasPi","RasPi")
# Start the Adafruit16CServidriver that can be used for all PCA9685 devices
# and connect it to the Arduino i2c interface using the default bus and
# address
adaFruit16c = Runtime.createAndStart("AdaFruit16C","Adafruit16CServoDriver")
adaFruit16c.setController("RasPi")
# create a new servo
thumb = Runtime.createAndStart("Thumb", "Servo")
elbow = Runtime.createAndStart("Elbow", "Servo")
# attach it to the pwm board - pin 3 & 8
adaFruit16c.attach(thumb,3)
adaFruit16c.attach(elbow,8)
# When this script has been executed you should be able to
# move the servos using the GUI or using python
| apache-2.0 | Python |
cbb914476696c32988a3ab661c7c6babd5f436fd | Remove utests for exposed parsing APIs that have been removed | HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework | utest/api/test_exposed_api.py | utest/api/test_exposed_api.py | import unittest
from os.path import abspath, join
from robot import api, model, reporting, result, running
from robot.utils.asserts import assert_equal, assert_true
class TestExposedApi(unittest.TestCase):
def test_execution_result(self):
assert_equal(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equal(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equal(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equal(api.SuiteVisitor, model.SuiteVisitor)
assert_equal(api.ResultVisitor, result.ResultVisitor)
class TestModelObjects(unittest.TestCase):
"""These model objects are part of the public API.
They are only seldom needed directly and thus not exposed via the robot.api
package. Tests just validate they are not removed accidentally.
"""
def test_running_objects(self):
assert_true(running.TestSuite)
assert_true(running.TestCase)
assert_true(running.Keyword)
def test_result_objects(self):
assert_true(result.TestSuite)
assert_true(result.TestCase)
assert_true(result.Keyword)
assert_true(result.Message)
class TestTestSuiteBuilder(unittest.TestCase):
sources = [join(abspath(__file__), '..', '..', '..', 'atest', 'testdata', 'misc', n)
for n in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equal(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equal(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| import unittest
from os.path import abspath, join
from robot import api, model, parsing, reporting, result, running
from robot.utils.asserts import assert_equal, assert_true
class TestExposedApi(unittest.TestCase):
def test_test_case_file(self):
assert_equal(api.TestCaseFile, parsing.TestCaseFile)
def test_test_data_directory(self):
assert_equal(api.TestDataDirectory, parsing.TestDataDirectory)
def test_resource_file(self):
assert_equal(api.ResourceFile, parsing.ResourceFile)
def test_test_data(self):
assert_equal(api.TestData, parsing.TestData)
def test_execution_result(self):
assert_equal(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equal(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equal(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equal(api.SuiteVisitor, model.SuiteVisitor)
assert_equal(api.ResultVisitor, result.ResultVisitor)
class TestModelObjects(unittest.TestCase):
"""These model objects are part of the public API.
They are only seldom needed directly and thus not exposed via the robot.api
package. Tests just validate they are not removed accidentally.
"""
def test_running_objects(self):
assert_true(running.TestSuite)
assert_true(running.TestCase)
assert_true(running.Keyword)
def test_result_objects(self):
assert_true(result.TestSuite)
assert_true(result.TestCase)
assert_true(result.Keyword)
assert_true(result.Message)
class TestTestSuiteBuilder(unittest.TestCase):
sources = [join(abspath(__file__), '..', '..', '..', 'atest', 'testdata', 'misc', n)
for n in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equal(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equal(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
6b2fca4549783e7ca89583948a98434d0047136c | Update "unicode_to_ascii.py" utility. | colour-science/colour-hdri | utilities/unicode_to_ascii.py | utilities/unicode_to_ascii.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unicode to ASCII Utility
========================
"""
import sys
if sys.version_info[0] < 3:
# Smelly hack for Python 2.x: https://stackoverflow.com/q/3828723/931625
reload(sys) # noqa
sys.setdefaultencoding('utf-8')
import codecs
import os
import unicodedata
__copyright__ = 'Copyright (C) 2015-2019 - Colour Developers'
__license__ = 'New BSD License - http://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-science@googlegroups.com'
__status__ = 'Production'
__all__ = ['SUBSTITUTIONS', 'unicode_to_ascii']
SUBSTITUTIONS = {
'–': '-',
'“': '"',
'”': '"',
'‘': "'",
'’': "'",
'′': "'",
}
def unicode_to_ascii(root_directory):
"""
Recursively converts from unicode to ASCII *.py*, *.bib* and *.rst* files
in given directory.
Parameters
----------
root_directory : unicode
Directory to convert the files from unicode to ASCII.
"""
for root, dirnames, filenames in os.walk(root_directory):
for filename in filenames:
if (not filename.endswith('.tex') and not filename.endswith('.py')
and not filename.endswith('.bib')
and not filename.endswith('.rst')):
continue
if filename == 'unicode_to_ascii.py':
continue
filename = os.path.join(root, filename)
with codecs.open(filename, encoding='utf8') as file_handle:
content = file_handle.read()
with codecs.open(filename, 'w', encoding='utf8') as file_handle:
for key, value in SUBSTITUTIONS.items():
content = content.replace(key, value)
content = unicodedata.normalize('NFD', content)
file_handle.write(content)
if __name__ == '__main__':
unicode_to_ascii(os.path.join('..', 'colour_hdri'))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Unicode to ASCII Utility
========================
"""
import sys
if sys.version_info[0] < 3:
# Smelly hack for Python 2.x: https://stackoverflow.com/q/3828723/931625
reload(sys) # noqa
sys.setdefaultencoding('utf-8')
import codecs
import os
import unicodedata
__copyright__ = 'Copyright (C) 2015-2019 - Colour Developers'
__license__ = 'New BSD License - http://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-science@googlegroups.com'
__status__ = 'Production'
__all__ = ['SUBSTITUTIONS', 'unicode_to_ascii']
SUBSTITUTIONS = {
'–': '-',
'“': '"',
'”': '"',
'‘': "'",
'’': "'",
'′': "'",
}
def unicode_to_ascii(root_directory):
"""
Recursively converts from unicode to ASCII *.py*, *.bib* and *.rst* files
in given directory.
Parameters
----------
root_directory : unicode
Directory to convert the files from unicode to ASCII.
"""
for root, dirnames, filenames in os.walk(root_directory):
for filename in filenames:
if (not filename.endswith('.py') and
not filename.endswith('.bib') and
not filename.endswith('.rst')):
continue
if filename == 'unicode_to_ascii.py':
continue
filename = os.path.join(root, filename)
with codecs.open(filename, encoding='utf8') as file_handle:
content = file_handle.read()
with codecs.open(filename, 'w', encoding='utf8') as file_handle:
for key, value in SUBSTITUTIONS.items():
content = content.replace(key, value)
content = unicodedata.normalize('NFD', content).encode(
'ascii', 'ignore')
file_handle.write(content)
if __name__ == '__main__':
unicode_to_ascii(os.path.join('..', 'colour_hdri'))
| bsd-3-clause | Python |
efd4b69a95d4bd18e675a8211def3e8b29752024 | Fix type in authors. | rschnapka/hr,charbeljc/hr,vrenaville/hr,Eficent/hr,raycarnes/hr,VitalPet/hr,yelizariev/hr,microcom/hr,abstract-open-solutions/hr,damdam-s/hr,hbrunn/hr,Antiun/hr,yelizariev/hr,xpansa/hr,open-synergy/hr,microcom/hr,iDTLabssl/hr,Antiun/hr,VitalPet/hr,hbrunn/hr,Eficent/hr,charbeljc/hr,vrenaville/hr,abstract-open-solutions/hr,acsone/hr,Vauxoo/hr,thinkopensolutions/hr,iDTLabssl/hr,thinkopensolutions/hr,xpansa/hr,acsone/hr,rschnapka/hr,alanljj/oca_hr,feketemihai/hr,raycarnes/hr,feketemihai/hr,Vauxoo/hr,alanljj/oca_hr,open-synergy/hr,damdam-s/hr,Endika/hr,Endika/hr | hr_contract_multi_jobs/__openerp__.py | hr_contract_multi_jobs/__openerp__.py | # -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'HR Contract Multi Jobs',
'version': '1.0',
'license': 'AGPL-3',
'category': 'Generic Modules/Human Resources',
'author': 'Savoir-faire Linux, '
'Fekete Mihai (Forest and Biomass Services Romania), '
'Odoo Community Association (OCA)',
'website': 'https://www.savoirfairelinux.com/',
'depends': [
'hr_contract'
],
'data': [
'security/ir.model.access.csv',
'views/hr_contract_view.xml',
],
'test': [],
'demo': [],
'installable': True,
}
| # -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'HR Contract Multi Jobs',
'version': '1.0',
'license': 'AGPL-3',
'category': 'Generic Modules/Human Resources',
'author': 'Savoir-faire Linux, '
'Fekete Mihai (Forest and Biomass Services Romania, '
'Odoo Community Association (OCA)',
'website': 'https://www.savoirfairelinux.com/',
'depends': [
'hr_contract'
],
'data': [
'security/ir.model.access.csv',
'views/hr_contract_view.xml',
],
'test': [],
'demo': [],
'installable': True,
}
| agpl-3.0 | Python |
5bd8e58785773a9a43bba6956dd8d236da36ab66 | Remove debug print statement | SysTheron/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,SysTheron/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,SysTheron/adhocracy,phihag/adhocracy,liqd/adhocracy,phihag/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,phihag/adhocracy,liqd/adhocracy,alkadis/vcv,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,phihag/adhocracy,phihag/adhocracy,liqd/adhocracy,alkadis/vcv | adhocracy/lib/tiles/badge_tiles.py | adhocracy/lib/tiles/badge_tiles.py | def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
return render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
| def badge(badge):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badge', badge=badge,
cached=True)
def badges(badges):
from adhocracy.lib.templating import render_def
return render_def('/badge/tiles.html', 'badges', badges=badges,
cached=True)
def badge_styles():
'''
Render a <style>-block with dyamic badge styles
'''
from adhocracy.lib.templating import render_def
from adhocracy.model import Badge
badges = Badge.all()
r = render_def('/badge/tiles.html', 'badge_styles', badges=badges,
cached=True)
print r
return r
| agpl-3.0 | Python |
a3004d2de9c15b9d7efebb98ea6533a1a6e07062 | Improve dispatch of actions in main | fancystats/nhlstats | nhlstats/__init__.py | nhlstats/__init__.py |
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(updates=False):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
if updates:
# Get a list of recently finished games to check for updates on
pass
else:
# Get a list of active games.
pass
def main(action):
"""
The main entry point for the application
"""
GetDataForGames(GetGames(action))
| mit | Python |
39e6cc46486ef603db977eedf5d8a87ce526fe47 | Fix #4073, the versions, if from git, can be compared without | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/runners/manage.py | salt/runners/manage.py | '''
General management functions for salt, tools like seeing what hosts are up
and what hosts are down
'''
import distutils.version
# Import salt libs
import salt.key
import salt.client
import salt.output
def status(output=True):
'''
Print the status of all known salt minions
'''
client = salt.client.LocalClient(__opts__['conf_file'])
minions = client.cmd('*', 'test.ping', timeout=__opts__['timeout'])
key = salt.key.Key(__opts__)
keys = key.list_keys()
ret = {}
ret['up'] = sorted(minions)
ret['down'] = sorted(set(keys['minions']) - set(minions))
if output:
salt.output.display_output(ret, '', __opts__)
return ret
def down():
'''
Print a list of all the down or unresponsive salt minions
'''
ret = status(output=False).get('down', [])
for minion in ret:
salt.output.display_output(minion, '', __opts__)
return ret
def up(): # pylint: disable-msg=C0103
'''
Print a list of all of the minions that are up
'''
ret = status(output=False).get('up', [])
for minion in ret:
salt.output.display_output(minion, '', __opts__)
return ret
def versions():
'''
Check the version of active minions
'''
client = salt.client.LocalClient(__opts__['conf_file'])
minions = client.cmd('*', 'test.version', timeout=__opts__['timeout'])
labels = {
-1: 'Minion requires update',
0: 'Up to date',
1: 'Minion newer than master',
}
version_status = {}
comps = salt.__version__.split('-')
if len(comps) == 3:
master_version = '-'.join(comps[0:2])
else:
master_version = salt.__version__
for minion in minions:
comps = minions[minion].split('-')
if len(comps) == 3:
minion_version = '-'.join(comps[0:2])
else:
minion_version = minions[minion]
ver_diff = cmp(minion_version, master_version)
if ver_diff not in version_status:
version_status[ver_diff] = []
version_status[ver_diff].append(minion)
ret = {}
for key in version_status:
for minion in sorted(version_status[key]):
ret.setdefault(labels[key], []).append(minion)
salt.output.display_output(ret, '', __opts__)
return ret
| '''
General management functions for salt, tools like seeing what hosts are up
and what hosts are down
'''
import distutils.version
# Import salt libs
import salt.key
import salt.client
import salt.output
def status(output=True):
'''
Print the status of all known salt minions
'''
client = salt.client.LocalClient(__opts__['conf_file'])
minions = client.cmd('*', 'test.ping', timeout=__opts__['timeout'])
key = salt.key.Key(__opts__)
keys = key.list_keys()
ret = {}
ret['up'] = sorted(minions)
ret['down'] = sorted(set(keys['minions']) - set(minions))
if output:
salt.output.display_output(ret, '', __opts__)
return ret
def down():
'''
Print a list of all the down or unresponsive salt minions
'''
ret = status(output=False).get('down', [])
for minion in ret:
salt.output.display_output(minion, '', __opts__)
return ret
def up(): # pylint: disable-msg=C0103
'''
Print a list of all of the minions that are up
'''
ret = status(output=False).get('up', [])
for minion in ret:
salt.output.display_output(minion, '', __opts__)
return ret
def versions():
'''
Check the version of active minions
'''
client = salt.client.LocalClient(__opts__['conf_file'])
minions = client.cmd('*', 'test.version', timeout=__opts__['timeout'])
labels = {
-1: 'Minion requires update',
0: 'Up to date',
1: 'Minion newer than master',
}
version_status = {}
master_version = distutils.version.StrictVersion(salt.__version__)
for minion in minions:
minion_version = distutils.version.StrictVersion(minions[minion])
ver_diff = cmp(minion_version, master_version)
if ver_diff not in version_status:
version_status[ver_diff] = []
version_status[ver_diff].append(minion)
ret = {}
for key in version_status:
for minion in sorted(version_status[key]):
ret.setdefault(labels[key], []).append(minion)
salt.output.display_output(ret, '', __opts__)
return ret
| apache-2.0 | Python |
472dd5d5d97dbb9860e1d2a132217b4080db2a5c | Bump catalog version | edgedb/edgedb,edgedb/edgedb,edgedb/edgedb | edb/server/defines.py | edb/server/defines.py | #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
EDGEDB_PORT = 5656
EDGEDB_SUPERUSER = 'edgedb'
EDGEDB_TEMPLATE_DB = 'edgedb0'
EDGEDB_SUPERUSER_DB = 'edgedb'
EDGEDB_ENCODING = 'utf-8'
EDGEDB_VISIBLE_METADATA_PREFIX = r'EdgeDB metadata follows, do not modify.\n'
# Increment this whenever the database layout or stdlib changes.
EDGEDB_CATALOG_VERSION = 2020_01_11_00_00
# Resource limit on open FDs for the server process.
# By default, at least on macOS, the max number of open FDs
# is 256, which is low and can cause 'edb test' to hang.
# We try to bump the rlimit on server start if pemitted.
EDGEDB_MIN_RLIMIT_NOFILE = 2048
_MAX_QUERIES_CACHE = 1000
_QUERY_ROLLING_AVG_LEN = 10
_QUERIES_ROLLING_AVG_LEN = 300
DEFAULT_MODULE_ALIAS = 'default'
HTTP_PORT_QUERY_CACHE_SIZE = 500
HTTP_PORT_MAX_CONCURRENCY = 250
| #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
EDGEDB_PORT = 5656
EDGEDB_SUPERUSER = 'edgedb'
EDGEDB_TEMPLATE_DB = 'edgedb0'
EDGEDB_SUPERUSER_DB = 'edgedb'
EDGEDB_ENCODING = 'utf-8'
EDGEDB_VISIBLE_METADATA_PREFIX = r'EdgeDB metadata follows, do not modify.\n'
# Increment this whenever the database layout or stdlib changes.
EDGEDB_CATALOG_VERSION = 20200109_00_01
# Resource limit on open FDs for the server process.
# By default, at least on macOS, the max number of open FDs
# is 256, which is low and can cause 'edb test' to hang.
# We try to bump the rlimit on server start if pemitted.
EDGEDB_MIN_RLIMIT_NOFILE = 2048
_MAX_QUERIES_CACHE = 1000
_QUERY_ROLLING_AVG_LEN = 10
_QUERIES_ROLLING_AVG_LEN = 300
DEFAULT_MODULE_ALIAS = 'default'
HTTP_PORT_QUERY_CACHE_SIZE = 500
HTTP_PORT_MAX_CONCURRENCY = 250
| apache-2.0 | Python |
1ffaf904b2d66e5da2fd0d0171802b347273d0b4 | Update run.py | ngoduykhanh/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin,ngoduykhanh/PowerDNS-Admin | run.py | run.py | #!/usr/bin/env python3
from app import app
from config import PORT
from config import BIND_ADDRESS
if __name__ == '__main__':
app.run(debug = True, host=BIND_ADDRESS, port=PORT)
| #!/usr/bin/env python3
from app import app
from config import PORT
from config import BIND_ADDRESS
if __name__ == '__main__':
app.run(debug = True, host=BIND_ADDRESS, port=PORT, use_reloader=False)
| mit | Python |
3ef9916c99003ceba71d9d54a7ba86747f09b714 | fix comment | rapidpro/dash,peterayeni/dash,rapidpro/dash,peterayeni/dash,caktus/dash,caktus/dash | dash/orgs/tasks.py | dash/orgs/tasks.py | import logging
import time
from celery import shared_task
from django_redis import get_redis_connection
from .models import Invitation, Org
logger = logging.getLogger(__name__)
@shared_task(track_started=True, name='send_invitation_email_task')
def send_invitation_email_task(invitation_id):
invitation = Invitation.objects.get(pk=invitation_id)
invitation.send_email()
@shared_task(name='orgs.build_boundaries')
def build_boundaries():
start = time.time()
r = get_redis_connection()
key = 'build_boundaries'
if not r.get(key):
with r.lock(key, timeout=900):
active_orgs = Org.objects.filter(is_active=True)
for org in active_orgs:
logger.debug("=" * 40)
org.build_boundaries()
logger.debug("Task: build_boundaries took %ss" % (time.time() - start))
@shared_task(track_started=True, name='fetch_poll')
def rebuild_org_boundaries(org_id):
try:
# get our org
from .models import Org
org = Org.objects.get(pk=org_id)
org.build_boundaries()
except Exception as e:
logger.exception("Error building org boundaries refresh: %s" % str(e))
| import logging
import time
from celery import shared_task
from django_redis import get_redis_connection
from .models import Invitation, Org
logger = logging.getLogger(__name__)
@shared_task(track_started=True, name='send_invitation_email_task')
def send_invitation_email_task(invitation_id):
invitation = Invitation.objects.get(pk=invitation_id)
invitation.send_email()
@shared_task(name='orgs.build_boundaries')
def build_boundaries():
start = time.time()
r = get_redis_connection()
key = 'build_boundaries'
if not r.get(key):
with r.lock(key, timeout=900):
active_orgs = Org.objects.filter(is_active=True)
for org in active_orgs:
logger.debug("=" * 40)
org.build_boundaries()
logger.debug("Task: build_boundaries took %ss" % (time.time() - start))
@shared_task(track_started=True, name='fetch_poll')
def rebuild_org_boundaries(org_id):
try:
# get our poll
from .models import Org
org = Org.objects.get(pk=org_id)
org.build_boundaries()
except Exception as e:
logger.exception("Error building org boundaries refresh: %s" % str(e))
| bsd-3-clause | Python |
91065cd14998855853f207f81af93c747bb54753 | add daemon() option | USCC-LAB/Weekly | scheduler/scheduler.py | scheduler/scheduler.py | import sched, time, datetime, threading
class Scheduler:
def __init__(self, timef = time.time, delayf = time.sleep):
self.sched_obj = sched.scheduler(timef, delayf)
def show(self):
print('*' * 20)
print('Total Event Number: %d\n' %len(self.sched_obj.queue))
for index, item in enumerate(self.sched_obj.queue):
print('Event %d' %index, item)
print('*' * 20)
# @instance: would be date or delta timesec
# @argv: would be tuple as a pointer. It's quite similar with pthead_create
def regist(self, instance, act, argv, prio = 0):
if type(instance) == datetime.datetime:
self.sched_obj.enterabs(instance.timestamp(), prio, act, argv)
else: # include type of time.time
# Prototype: sched.enter(timesec, prio, act, *argv, **kwarg)
self.sched_obj.enter(instance, prio, act, argv)
def cancel(self, event_index):
self.sched_obj.cancel(self.sched_obj.queue[event_index])
def run(self, blocking = True):
self.sched_obj.run(blocking)
def daemon(self, blocking = True):
thrd = threading.Thread(target=self.run, args = [blocking])
thrd.start()
| import sched, time, datetime
class Scheduler:
def __init__(self, timef = time.time, delayf = time.sleep):
self.sched_obj = sched.scheduler(timef, delayf)
def show(self):
print('*' * 20)
print('Total Event Number: %d\n' %len(self.sched_obj.queue))
for index, item in enumerate(self.sched_obj.queue):
print('Event %d' %index, item)
print('*' * 20)
# @instance: would be date or delta timesec
# @argv: would be tuple as a pointer. It's quite similar with pthead_create
def regist(self, instance, act, argv, prio = 0):
if type(instance) == datetime.datetime:
self.sched_obj.enterabs(instance.timestamp(), prio, act, argv)
else: # include type of time.time
# Prototype: sched.enter(timesec, prio, act, *argv, **kwarg)
self.sched_obj.enter(instance, prio, act, argv)
def cancel(self, event_index):
self.sched_obj.cancel(self.sched_obj.queue[event_index])
def run(self, blocking = True):
self.sched_obj.run(blocking)
def daemon():
pass
| apache-2.0 | Python |
b4cdb38c92210fc0c909ea8f0622654972a4886a | use __all__ to bypass unused import | awesto/django-shop,nimbis/django-shop,nimbis/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,divio/django-shop,nimbis/django-shop,divio/django-shop,nimbis/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,khchine5/django-shop | shop/money/serializers.py | shop/money/serializers.py | # -*- coding: utf-8 -*-
"""
Override django.core.serializers.json.Serializer which renders our MoneyType as float.
"""
from __future__ import unicode_literals
import json
from django.core.serializers.json import DjangoJSONEncoder, Serializer as DjangoSerializer
from django.core.serializers.json import Deserializer
from .money_maker import AbstractMoney
__all__ = ['JSONEncoder', 'Serializer', 'Deserializer']
class JSONEncoder(DjangoJSONEncoder):
"""
Money type aware JSON encoder for reciprocal usage, such as import/export/dumpdata/loaddata.
"""
def default(self, obj):
if isinstance(obj, AbstractMoney):
return float(obj)
return super(JSONEncoder, self).default(obj)
class Serializer(DjangoSerializer):
"""
Money type aware JSON serializer.
"""
def end_object(self, obj):
# self._current has the field data
indent = self.options.get("indent")
if not self.first:
self.stream.write(",")
if not indent:
self.stream.write(" ")
if indent:
self.stream.write("\n")
json.dump(self.get_dump_object(obj), self.stream, cls=JSONEncoder, **self.json_kwargs)
self._current = None
| # -*- coding: utf-8 -*-
"""
Override django.core.serializers.json.Serializer which renders our MoneyType as float.
"""
from __future__ import unicode_literals
import json
from django.core.serializers.json import DjangoJSONEncoder, Serializer as DjangoSerializer
from django.core.serializers.json import Deserializer # nopyflakes
from .money_maker import AbstractMoney
class JSONEncoder(DjangoJSONEncoder):
"""
Money type aware JSON encoder for reciprocal usage, such as import/export/dumpdata/loaddata.
"""
def default(self, obj):
if isinstance(obj, AbstractMoney):
return float(obj)
return super(JSONEncoder, self).default(obj)
class Serializer(DjangoSerializer):
"""
Money type aware JSON serializer.
"""
def end_object(self, obj):
# self._current has the field data
indent = self.options.get("indent")
if not self.first:
self.stream.write(",")
if not indent:
self.stream.write(" ")
if indent:
self.stream.write("\n")
json.dump(self.get_dump_object(obj), self.stream, cls=JSONEncoder, **self.json_kwargs)
self._current = None
| bsd-3-clause | Python |
6689858b2364a668b362a5f00d4c86e57141dc37 | Reorder FloatModel checks in ascending order | cpcloud/numba,numba/numba,numba/numba,seibert/numba,cpcloud/numba,cpcloud/numba,seibert/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,numba/numba | numba/cuda/models.py | numba/cuda/models.py | from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| from llvmlite import ir
from numba.core.datamodel.registry import register_default
from numba.core.extending import register_model, models
from numba.core import types
from numba.cuda.types import Dim3, GridGroup, CUDADispatcher
@register_model(Dim3)
class Dim3Model(models.StructModel):
def __init__(self, dmm, fe_type):
members = [
('x', types.int32),
('y', types.int32),
('z', types.int32)
]
super().__init__(dmm, fe_type, members)
@register_model(GridGroup)
class GridGroupModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
be_type = ir.IntType(64)
super().__init__(dmm, fe_type, be_type)
@register_default(types.Float)
class FloatModel(models.PrimitiveModel):
def __init__(self, dmm, fe_type):
if fe_type == types.float32:
be_type = ir.FloatType()
elif fe_type == types.float16:
be_type = ir.IntType(16)
elif fe_type == types.float64:
be_type = ir.DoubleType()
else:
raise NotImplementedError(fe_type)
super(FloatModel, self).__init__(dmm, fe_type, be_type)
register_model(CUDADispatcher)(models.OpaqueModel)
| bsd-2-clause | Python |
385a88f22d0ff4f850a3ca6c60403d7d3e426ae3 | Bump version to 1.14.0 | open-io/oio-swift,open-io/oio-swift | oioswift/__init__.py | oioswift/__init__.py | __version__ = '1.14.0'
| __version__ = '1.13.0'
| apache-2.0 | Python |
5d9356ef82af6b377f8fe4c4982724cf273624a8 | Fix popular | RiiConnect24/File-Maker,RiiConnect24/File-Maker | Channels/Check_Mii_Out_Channel/popular.py | Channels/Check_Mii_Out_Channel/popular.py | from cmoc import QuickList, Prepare
import MySQLdb
from json import load
from time import sleep
with open("/var/rc24/File-Maker/Channels/Check_Mii_Out_Channel/config.json", "r") as f:
config = load(f)
# gets the most popular miis ordered by their volatile likes which resets to 0 when spot_list resets
ql = QuickList()
pr = Prepare()
db = MySQLdb.connect("localhost", config["dbuser"], config["dbpass"], "cmoc")
cursor = db.cursor()
cursor.execute("SELECT COUNT(*) FROM mii WHERE likes > 0")
count = int(cursor.fetchone()[0])
print("Popular Count:", count)
# popular is always sorted by volatile likes first, but we combine miis ordered by permlikes to fill in the rest to equal 100 total miis
if count >= 100:
extraCount = 0
count = 100
else:
extraCount = 100 - count
cursor.execute(
"SELECT mii.entryno, mii.initial, mii.permlikes, mii.skill, mii.country, mii.miidata, artisan.miidata, artisan.craftsno, artisan.master FROM mii, artisan WHERE mii.craftsno=artisan.craftsno ORDER BY mii.likes DESC LIMIT %s",
[count],
)
popularMiis = cursor.fetchall()
cursor.execute(
"SELECT mii.entryno, mii.initial, mii.permlikes, mii.skill, mii.country, mii.miidata, artisan.miidata, artisan.craftsno, artisan.master FROM mii, artisan WHERE mii.permlikes < 25 AND mii.craftsno=artisan.craftsno ORDER BY mii.permlikes DESC LIMIT %s",
[extraCount],
)
extraMiis = cursor.fetchall()
cursor.execute(
"UPDATE mii SET likes = 0"
) # reset everyone's likes, but not their permlikes
db.commit()
db.close()
for country in [0, 150]:
data = ql.build("SL", (popularMiis + extraMiis), country)
with open(
"{}/{}/spot_list.ces".format(config["miicontest_path"], country), "wb"
) as file:
file.write(pr.prepare(data))
with open("decfiles/spot_list.dec", "wb") as file:
file.write(data)
| from cmoc import QuickList, Prepare
import MySQLdb
from json import load
from time import sleep
with open("/var/rc24/File-Maker/Channels/Check_Mii_Out_Channel/config.json", "r") as f:
config = load(f)
# gets the most popular miis ordered by their volatile likes which resets to 0 when spot_list resets
ql = QuickList()
pr = Prepare()
db = MySQLdb.connect("localhost", config["dbuser"], config["dbpass"], "cmoc")
cursor = db.cursor()
cursor.execute("SELECT COUNT(*) FROM mii WHERE likes > 0")
count = int(cursor.fetchone()[0])
print("Popular Count:", count)
# popular is always sorted by volatile likes first, but we combine miis ordered by permlikes to fill in the rest to equal 100 total miis
if count >= 100:
extraCount = 0
count = 100
else:
extraCount = 100 - count
cursor.execute(
"SELECT mii.entryno, mii.initial, mii.permlikes, mii.skill, mii.country, mii.miidata, artisan.miidata, artisan.craftsno, artisan.master FROM mii, artisan WHERE mii.craftsno=artisan.craftsno ORDER BY mii.likes DESC LIMIT %s",
[count],
)
popularMiis = cursor.fetchall()
cursor.execute(
"SELECT mii.entryno, mii.initial, mii.permlikes, mii.skill, mii.country, mii.miidata, artisan.miidata, artisan.craftsno, artisan.master FROM mii, artisan WHERE mii.permlikes < 25 AND mii.craftsno=artisan.craftsno ORDER BY mii.permlikes DESC LIMIT %s",
[extraCount],
)
extraMiis = cursor.fetchall()
cursor.execute(
"UPDATE mii SET likes = 0"
) # reset everyone's likes, but not their permlikes
db.commit()
db.close()
for country in [0, 150]:
data = ql.build("SL", (popularMiis + extraMiis), country)
with open("{}/{}/spot_list.ces".format(config["miicontest_path"], country), "wb") as file:
file.write(pr.prepare(data))
with open("decfiles/spot_list.dec", "wb") as file:
file.write(data)
| agpl-3.0 | Python |
4dc7fef8d244ce0964ec3949fe99f70344f268e4 | Kill noisy debug log | HumanDynamics/OpenBadge,HumanDynamics/OpenBadge,HumanDynamics/OpenBadge,HumanDynamics/OpenBadge | IntegrationTests/test_5_record_no_gaps.py | IntegrationTests/test_5_record_no_gaps.py | import time
from integration_test import *
from BadgeFramework.badge import timestamps_to_time
TEST_LENGTH_SECONDS = 3 * 60;
SAMPLE_PERIOD_MS = 50
SAMPLES_PER_SECOND = 1000 / SAMPLE_PERIOD_MS
# Maximum allowed delay between recording start command sent and first sample recorded in seconds.
MAX_ALLOWED_STARTUP_DELAY = 10
class RecordNoGapsTestCase(IntegrationTest):
def testCase(self, badge, logger):
# Sync time
status = badge.get_status()
time.sleep(.25)
badge.start_recording()
test_start_time = time.time()
time.sleep(TEST_LENGTH_SECONDS)
badge.stop_recording()
mic_data = badge.get_mic_data(timestamp_seconds=test_start_time)
num_samples_taken = 0
# We give extra leway on the first chunk to allow for startup time.
first_chunk_header, first_chunk_data = mic_data[0]
first_chunk_time = timestamps_to_time(first_chunk_header.timestamp_seconds, first_chunk_header.timestamp_miliseconds)
self.assertAlmostEqual(test_start_time, first_chunk_time, delta=MAX_ALLOWED_STARTUP_DELAY)
# If we passed our startup delay test, use our first_chunk_time to calibrate all other expected times
expected_next_chunk_time = first_chunk_time
for header, data in mic_data:
# Check that there's the correct number of samples
self.assertEqual(header.num_samples_in_chunk, len(data))
num_samples_taken += header.num_samples_in_chunk
# Check that timestamps are continous
sample_time = timestamps_to_time(header.timestamp_seconds, header.timestamp_miliseconds)
self.assertAlmostEqual(expected_next_chunk_time, sample_time, delta=0.0005)
expected_next_chunk_time = sample_time + (float(header.num_samples_in_chunk) / SAMPLES_PER_SECOND)
# Check that there were the correct number of total samples for the amount of time spent recording
actual_test_duration = expected_next_chunk_time - first_chunk_time
expected_num_samples = actual_test_duration * SAMPLES_PER_SECOND
self.assertAlmostEqual(TEST_LENGTH_SECONDS, actual_test_duration, delta=2.5)
self.assertAlmostEqual(num_samples_taken, expected_num_samples, delta=1)
if __name__ == "__main__":
testCase = RecordNoGapsTestCase()
testCase.runTest() | import time
from integration_test import *
from BadgeFramework.badge import timestamps_to_time
TEST_LENGTH_SECONDS = 5 * 60;
SAMPLE_PERIOD_MS = 50
SAMPLES_PER_SECOND = 1000 / SAMPLE_PERIOD_MS
# Maximum allowed delay between recording start command sent and first sample recorded in seconds.
MAX_ALLOWED_STARTUP_DELAY = 10
class RecordNoGapsTestCase(IntegrationTest):
def testCase(self, badge, logger):
# Sync time
status = badge.get_status()
time.sleep(.25)
badge.start_recording()
test_start_time = time.time()
time.sleep(TEST_LENGTH_SECONDS)
badge.stop_recording()
mic_data = badge.get_mic_data(timestamp_seconds=test_start_time)
num_samples_taken = 0
# We give extra leway on the first chunk to allow for startup time.
first_chunk_header, first_chunk_data = mic_data[0]
first_chunk_time = timestamps_to_time(first_chunk_header.timestamp_seconds, first_chunk_header.timestamp_miliseconds)
self.assertAlmostEqual(test_start_time, first_chunk_time, delta=MAX_ALLOWED_STARTUP_DELAY)
# If we passed our startup delay test, use our first_chunk_time to calibrate all other expected times
expected_next_chunk_time = first_chunk_time
for header, data in mic_data:
# Check that there's the correct number of samples
self.assertEqual(header.num_samples_in_chunk, len(data))
num_samples_taken += header.num_samples_in_chunk
# Check that timestamps are continous
sample_time = timestamps_to_time(header.timestamp_seconds, header.timestamp_miliseconds)
self.assertAlmostEqual(expected_next_chunk_time, sample_time, delta=0.0005)
expected_next_chunk_time = sample_time + (float(header.num_samples_in_chunk) / SAMPLES_PER_SECOND)
# Check that there were the correct number of total samples for the amount of time spent recording
actual_test_duration = expected_next_chunk_time - first_chunk_time
expected_num_samples = actual_test_duration * SAMPLES_PER_SECOND
self.assertAlmostEqual(TEST_LENGTH_SECONDS, actual_test_duration, delta=2.5)
self.assertAlmostEqual(num_samples_taken, expected_num_samples, delta=1)
if __name__ == "__main__":
testCase = RecordNoGapsTestCase()
testCase.runTest() | mit | Python |
b865cfa3bdd19f93993f832b7218aead84532153 | Improve gdb build; depend on python. | BreakawayConsulting/xyz | rules/gdb.py | rules/gdb.py | import xyz
import os
import shutil
class Gdb(xyz.BuildProtocol):
crosstool = True
pkg_name = 'gdb'
supported_targets = ['arm-none-eabi']
deps = ['texinfo', 'python']
def check(self, config):
target = config.get('target')
if target not in self.supported_targets:
raise xyz.UsageError("Invalid target ({}) for {}".format(target, self.pkg_name))
def configure(self, builder, config):
if config['host'].endswith('darwin'):
ldflags = '{standard_ldflags} -F/Library/Frameworks -F/System/Library/Frameworks'
else:
ldflags = '{standard_ldflags}'
builder.cross_configure('--disable-nls', '--enable-lto', '--enable-ld=yes', '--without-zlib',
config=config, env={'LDFLAGS': ldflags})
# After configuring we need to ice python, but we need
# to ensure we do it using the built version of Python, not
# this Python
xyz.ensure_dir('gdb')
with xyz.chdir('gdb'):
builder.cmd('{devtree_dir_abs}/{host}/bin/python3', '{root_dir_abs}/ice/ice.py', 'stdlib', config=config)
rules = Gdb()
| import xyz
import os
import shutil
class Gdb(xyz.BuildProtocol):
pkg_name = 'gdb'
supported_targets = ['arm-none-eabi']
deps = ['expat']
def check(self, builder):
if builder.target not in self.supported_targets:
raise xyz.UsageError("Invalid target ({}) for {}".format(builder.target, self.pkg_name))
def configure(self, builder, config):
builder.cross_configure('--disable-nls',
'--disable-tui',
'--with-python=no',
config=config)
def install(self, builder, config):
super().install(builder, config)
# For some reason binutils plonks libiberty.a in the output directory
libdir = builder.j('{install_dir_abs}', config['eprefix'][1:], 'lib', config=config)
if os.path.exists(libdir):
shutil.rmtree(libdir)
rules = Gdb()
| mit | Python |
ae8a0416dcdbb908cd592cd43422a45333d567ff | Add missing import | mattupstate/flask-social-example,rmelly/flask-social-example,rmelly/flask-social-example,mattupstate/flask-social-example,mattupstate/flask-social-example,talizon/flask-social-example,talizon/flask-social-example,talizon/flask-social-example,rmelly/flask-social-example | wsgi.py | wsgi.py |
import os
from app import create_app
application = create_app()
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
application.run(host='0.0.0.0', port=port) | from app import create_app
application = create_app()
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
application.run(host='0.0.0.0', port=port) | mit | Python |
4a650922ee97b9cb54b203cab9709d511487d9ff | Add factory for the Provider model | PressLabs/silver,PressLabs/silver,PressLabs/silver | silver/tests/factories.py | silver/tests/factories.py | import factory
from silver.models import Provider
class ProviderFactory(factory.django.DjangoModelFactory):
class Meta:
model = Provider
| """Factories for the silver app."""
# import factory
# from .. import models
| apache-2.0 | Python |
bc279265f1e266ac2601c655d1ba37a8403aeba0 | test rsa | ffee21/samil,ffee21/samil,ffee21/samil | runserver.py | runserver.py | from waitress import serve
from samil import app
# app.run(debug=True, host="0.0.0.0", port=8080)
serve(app, port=8080)
# test | from waitress import serve
from samil import app
# app.run(debug=True, host="0.0.0.0", port=8080)
serve(app, port=8080) | mit | Python |
8c4c8e6bbee7cc73a94a566e2a89829e737ddc52 | update code stats script | commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot,commaai/openpilot | scripts/code_stats.py | scripts/code_stats.py | #!/usr/bin/env python3
import os
import ast
import stat
import subprocess
fouts = set([x.decode('utf-8') for x in subprocess.check_output(['git', 'ls-files']).strip().split()])
pyf = []
for d in ["cereal", "common", "scripts", "selfdrive", "tools"]:
for root, dirs, files in os.walk(d):
for f in files:
if f.endswith(".py"):
pyf.append(os.path.join(root, f))
imps = set()
class Analyzer(ast.NodeVisitor):
def visit_Import(self, node):
for alias in node.names:
imps.add(alias.name)
self.generic_visit(node)
def visit_ImportFrom(self, node):
imps.add(node.module)
self.generic_visit(node)
tlns = 0
carlns = 0
scriptlns = 0
testlns = 0
for f in sorted(pyf):
if f not in fouts:
continue
xbit = bool(os.stat(f)[stat.ST_MODE] & stat.S_IXUSR)
src = open(f).read()
lns = len(src.split("\n"))
tree = ast.parse(src)
Analyzer().visit(tree)
print("%5d %s %s" % (lns, f, xbit))
if 'test' in f:
testlns += lns
elif f.startswith('tools/') or f.startswith('scripts/') or f.startswith('selfdrive/debug'):
scriptlns += lns
elif f.startswith('selfdrive/car'):
carlns += lns
else:
tlns += lns
print("%d lines of openpilot python" % tlns)
print("%d lines of car ports" % carlns)
print("%d lines of tools/scripts/debug" % scriptlns)
print("%d lines of tests" % testlns)
#print(sorted(list(imps)))
| #!/usr/bin/env python3
import os
import ast
import stat
import subprocess
fouts = set([x.decode('utf-8') for x in subprocess.check_output(['git', 'ls-files']).strip().split()])
pyf = []
for d in ["cereal", "common", "scripts", "selfdrive", "tools"]:
for root, dirs, files in os.walk(d):
for f in files:
if f.endswith(".py"):
pyf.append(os.path.join(root, f))
imps = set()
class Analyzer(ast.NodeVisitor):
def visit_Import(self, node):
for alias in node.names:
imps.add(alias.name)
self.generic_visit(node)
def visit_ImportFrom(self, node):
imps.add(node.module)
self.generic_visit(node)
tlns = 0
for f in sorted(pyf):
if f not in fouts:
continue
xbit = bool(os.stat(f)[stat.ST_MODE] & stat.S_IXUSR)
src = open(f).read()
lns = len(src.split("\n"))
tree = ast.parse(src)
Analyzer().visit(tree)
print("%5d %s %s" % (lns, f, xbit))
tlns += lns
print("%d lines of parsed openpilot python" % tlns)
#print(sorted(list(imps)))
| mit | Python |
3a05e26f843867bbfe8d68c500b227c2d011b03f | Add response headers logging | steven-lee-qadium/django-request-logging,rhumbixsf/django-request-logging,Rhumbix/django-request-logging | request_logging/middleware.py | request_logging/middleware.py | import logging
import re
from django.utils.termcolors import colorize
from django.utils.deprecation import MiddlewareMixin
MAX_BODY_LENGTH = 50000 # log no more than 3k bytes of content
request_logger = logging.getLogger('django.request')
class LoggingMiddleware(MiddlewareMixin):
def process_request(self, request):
request_logger.info(colorize("{} {}".format(request.method, request.get_full_path()), fg="cyan"))
headers = {k: v for k, v in request.META.items() if k.startswith('HTTP')}
if request.body:
self.log(self.chunked_to_max(request.body))
if headers:
self.log(headers)
def process_response(self, request, response):
resp_log = "{} {} - {}".format(request.method, request.get_full_path(), response.status_code)
if (response.status_code in range(400, 600)):
request_logger.info(colorize(resp_log, fg="magenta"))
self.log_resp_body(response, level=logging.ERROR)
else:
request_logger.info(colorize(resp_log, fg="cyan"))
self.log_resp_body(response)
return response
def log_resp_body(self, response, level=logging.DEBUG):
if (not re.match('^application/json', response.get('Content-Type', ''), re.I)): # only log content type: 'application/xxx'
return
self.log(self.chunked_to_max(response.content), level)
self.log(response._headers, level)
@staticmethod
def log(msg, level=logging.DEBUG):
for line in str(msg).split('\n'):
line = colorize(line, fg="magenta") if (level >= logging.ERROR) else colorize(line, fg="cyan")
request_logger.log(level, line)
def chunked_to_max(self, msg):
if (len(msg) > MAX_BODY_LENGTH):
return "{0}\n...\n".format(msg[0:MAX_BODY_LENGTH])
else:
return msg
| import logging
import re
from django.utils.termcolors import colorize
from django.utils.deprecation import MiddlewareMixin
MAX_BODY_LENGTH = 50000 # log no more than 3k bytes of content
request_logger = logging.getLogger('django.request')
class LoggingMiddleware(MiddlewareMixin):
def process_request(self, request):
request_logger.info(colorize("{} {}".format(request.method, request.get_full_path()), fg="cyan"))
headers = {k: v for k, v in request.META.items() if k.startswith('HTTP')}
if request.body:
self.log(self.chunked_to_max(request.body))
if headers:
self.log(headers)
def process_response(self, request, response):
resp_log = "{} {} - {}".format(request.method, request.get_full_path(), response.status_code)
if (response.status_code in range(400, 600)):
request_logger.info(colorize(resp_log, fg="magenta"))
self.log_resp_body(response, level=logging.ERROR)
else:
request_logger.info(colorize(resp_log, fg="cyan"))
self.log_resp_body(response)
return response
def log_resp_body(self, response, level=logging.DEBUG):
if (not re.match('^application/json', response.get('Content-Type', ''), re.I)): # only log content type: 'application/xxx'
return
self.log(self.chunked_to_max(response.content), level)
@staticmethod
def log(msg, level=logging.DEBUG):
for line in str(msg).split('\n'):
line = colorize(line, fg="magenta") if (level >= logging.ERROR) else colorize(line, fg="cyan")
request_logger.log(level, line)
def chunked_to_max(self, msg):
if (len(msg) > MAX_BODY_LENGTH):
return "{0}\n...\n".format(msg[0:MAX_BODY_LENGTH])
else:
return msg
| mit | Python |
f63d451d5f3351c5f8f0b74285eaf7bebd1062bb | Move default config file location to /tmp to avoid access problems | fgrsnau/sipa,agdsn/sipa,agdsn/sipa,MarauderXtreme/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,lukasjuhrich/sipa,agdsn/sipa,fgrsnau/sipa,MarauderXtreme/sipa,fgrsnau/sipa | sipa/default_config.py | sipa/default_config.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
all configuration options and dicts of external
information (dormitory mapping etc.)
Project-specific options should be included in the `config_local.py`,
which is a file not tracked in git containing IPs, user names, passwords, etc.
"""
from flask.ext.babel import gettext
SECRET_KEY = ""
LOG_FILE = '/tmp/error.log'
FLATPAGES_ROOT = ""
FLATPAGES_EXTENSION = '.md'
FLATPAGES_MARKDOWN_EXTENSIONS = [
'sane_lists',
'sipa.utils.bootstraped_tables',
'nl2br',
'meta',
'attr_list'
]
LOGGING_CONFIG_LOCATION = "sipa/default_log_config"
# Mail configuration
MAILSERVER_HOST = "127.0.0.1"
MAILSERVER_PORT = 25
# LDAP configuration
LDAP_HOST = "127.0.0.1"
LDAP_PORT = 389
LDAP_SEARCH_BASE = ""
# MySQL configuration
DB_ATLANTIS_HOST = "127.0.0.1"
DB_ATLANTIS_USER = ""
DB_ATLANTIS_PASSWORD = ""
# MySQL Helios configuration
DB_HELIOS_HOST = "127.0.0.1"
DB_HELIOS_PORT = 3307 # alternative port for 2nd db
DB_HELIOS_USER = ""
DB_HELIOS_PASSWORD = ""
SQL_TIMEOUT = 15
# todo further modularization. id mappings are rather specific than generous.
# MySQL id mappings
DORMITORIES = [
u'Wundstraße 5',
u'Wundstraße 7',
u'Wundstraße 9',
u'Wundstraße 11',
u'Wundstraße 1',
u'Wundstraße 3',
u'Zellescher Weg 41',
u'Zellescher Weg 41A',
u'Zellescher Weg 41B',
u'Zellescher Weg 41C',
u'Zellescher Weg 41D'
]
STATUS = {
# todo vervollständigen oder mindestens fehlerresistent machen!
# (Hat ein Nutzer einen unten nicht enthaltenen Status, gibts einen Fehler)
1: gettext(u'Bezahlt, verbunden'),
2: gettext(u'Nicht bezahlt, Netzanschluss gesperrt'),
7: gettext(u'Verstoß gegen Netzordnung, Netzanschluss gesperrt'),
9: gettext(u'Exaktiv'),
12: gettext(u'Trafficlimit überschritten, Netzanschluss gesperrt')
}
WEEKDAYS = {
'0': gettext('Sonntag'),
'1': gettext('Montag'),
'2': gettext('Dienstag'),
'3': gettext('Mittwoch'),
'4': gettext('Donnerstag'),
'5': gettext('Freitag'),
'6': gettext('Samstag')
}
# Languages
LANGUAGES = {
'de': 'Deutsch',
'en': 'English'
}
# Bus & tram stops
BUSSTOPS = [
"Zellescher Weg",
"Strehlener Platz",
"Weberplatz"
]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
all configuration options and dicts of external
information (dormitory mapping etc.)
Project-specific options should be included in the `config_local.py`,
which is a file not tracked in git containing IPs, user names, passwords, etc.
"""
from flask.ext.babel import gettext
SECRET_KEY = ""
LOG_FILE = 'error.log'
FLATPAGES_ROOT = ""
FLATPAGES_EXTENSION = '.md'
FLATPAGES_MARKDOWN_EXTENSIONS = [
'sane_lists',
'sipa.utils.bootstraped_tables',
'nl2br',
'meta',
'attr_list'
]
LOGGING_CONFIG_LOCATION = "sipa/default_log_config"
# Mail configuration
MAILSERVER_HOST = "127.0.0.1"
MAILSERVER_PORT = 25
# LDAP configuration
LDAP_HOST = "127.0.0.1"
LDAP_PORT = 389
LDAP_SEARCH_BASE = ""
# MySQL configuration
DB_ATLANTIS_HOST = "127.0.0.1"
DB_ATLANTIS_USER = ""
DB_ATLANTIS_PASSWORD = ""
# MySQL Helios configuration
DB_HELIOS_HOST = "127.0.0.1"
DB_HELIOS_PORT = 3307 # alternative port for 2nd db
DB_HELIOS_USER = ""
DB_HELIOS_PASSWORD = ""
SQL_TIMEOUT = 15
# todo further modularization. id mappings are rather specific than generous.
# MySQL id mappings
DORMITORIES = [
u'Wundstraße 5',
u'Wundstraße 7',
u'Wundstraße 9',
u'Wundstraße 11',
u'Wundstraße 1',
u'Wundstraße 3',
u'Zellescher Weg 41',
u'Zellescher Weg 41A',
u'Zellescher Weg 41B',
u'Zellescher Weg 41C',
u'Zellescher Weg 41D'
]
STATUS = {
# todo vervollständigen oder mindestens fehlerresistent machen!
# (Hat ein Nutzer einen unten nicht enthaltenen Status, gibts einen Fehler)
1: gettext(u'Bezahlt, verbunden'),
2: gettext(u'Nicht bezahlt, Netzanschluss gesperrt'),
7: gettext(u'Verstoß gegen Netzordnung, Netzanschluss gesperrt'),
9: gettext(u'Exaktiv'),
12: gettext(u'Trafficlimit überschritten, Netzanschluss gesperrt')
}
WEEKDAYS = {
'0': gettext('Sonntag'),
'1': gettext('Montag'),
'2': gettext('Dienstag'),
'3': gettext('Mittwoch'),
'4': gettext('Donnerstag'),
'5': gettext('Freitag'),
'6': gettext('Samstag')
}
# Languages
LANGUAGES = {
'de': 'Deutsch',
'en': 'English'
}
# Bus & tram stops
BUSSTOPS = [
"Zellescher Weg",
"Strehlener Platz",
"Weberplatz"
]
| mit | Python |
ce386516eef2ded1ddd8ed45c07d7e0b5f751c3a | Fix a bug where batch creators were not stored properly | ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive | server/models/batch.py | server/models/batch.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
from girder.models.model_base import Model
class Batch(Model):
def initialize(self):
self.name = 'batch'
# TODO: add indexes
def createBatch(self, dataset, creator, signature):
now = datetime.datetime.utcnow()
batch = self.save({
'datasetId': dataset['_id'],
'created': now,
'creatorId': creator['_id'],
'signature': signature
})
return batch
def validate(self, doc, **kwargs):
# TODO: implement
return doc
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
from girder.models.model_base import Model
class Batch(Model):
def initialize(self):
self.name = 'batch'
# TODO: add indexes
def createBatch(self, dataset, creator, signature):
now = datetime.datetime.utcnow()
batch = self.save({
'datasetId': dataset['_id'],
'created': now,
'creatorId': creator,
'signature': signature
})
return batch
def validate(self, doc, **kwargs):
# TODO: implement
return doc
| apache-2.0 | Python |
8d7c283768dbb42487c3fc1fbe72c977a42ab165 | load configs from config file | googleinterns/cloud-monitoring-notification-delivery-integration-sample-code,googleinterns/cloud-monitoring-notification-delivery-integration-sample-code | main.py | main.py | # Copyright 2019 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Source code from https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/run/pubsub/main.py
# [START run_pubsub_server_setup]
import base64
import os
from flask import Flask, request
app = Flask(__name__)
app.config.from_object('config.DevConfig')
# [END run_pubsub_server_setup]
# [START run_pubsub_handler]
@app.route('/', methods=['POST'])
def index():
envelope = request.get_json()
if not envelope:
msg = 'no Pub/Sub message received'
print(f'error: {msg}')
return f'Bad Request: {msg}', 400
if not isinstance(envelope, dict) or 'message' not in envelope:
msg = 'invalid Pub/Sub message format'
print(f'error: {msg}')
return f'Bad Request: {msg}', 400
pubsub_message = envelope['message']
name = 'World'
if isinstance(pubsub_message, dict) and 'data' in pubsub_message:
name = base64.b64decode(pubsub_message['data']).decode('utf-8').strip()
print(f'Hello {name}!')
return ('', 204)
# [END run_pubsub_handler]
if __name__ == '__main__':
PORT = int(os.getenv('PORT')) if os.getenv('PORT') else 8080
# This is used when running locally. Gunicorn is used to run the
# application on Cloud Run. See entrypoint in Dockerfile.
app.run(host='127.0.0.1', port=PORT, debug=True) | # Copyright 2019 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Source code from https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/run/pubsub/main.py
# [START run_pubsub_server_setup]
import base64
import os
from flask import Flask, request
app = Flask(__name__)
# [END run_pubsub_server_setup]
# [START run_pubsub_handler]
@app.route('/', methods=['POST'])
def index():
envelope = request.get_json()
if not envelope:
msg = 'no Pub/Sub message received'
print(f'error: {msg}')
return f'Bad Request: {msg}', 400
if not isinstance(envelope, dict) or 'message' not in envelope:
msg = 'invalid Pub/Sub message format'
print(f'error: {msg}')
return f'Bad Request: {msg}', 400
pubsub_message = envelope['message']
name = 'World'
if isinstance(pubsub_message, dict) and 'data' in pubsub_message:
name = base64.b64decode(pubsub_message['data']).decode('utf-8').strip()
print(f'Hello {name}!')
return ('', 204)
# [END run_pubsub_handler]
if __name__ == '__main__':
PORT = int(os.getenv('PORT')) if os.getenv('PORT') else 8080
# This is used when running locally. Gunicorn is used to run the
# application on Cloud Run. See entrypoint in Dockerfile.
app.run(host='127.0.0.1', port=PORT, debug=True) | apache-2.0 | Python |
086b5ecd90c65e975d138cc3ac396dd3bb84a1df | Remove comment from codebase. I hate comments. | AmosGarner/PyInventory | main.py | main.py | from editCollection import editCollection
from ObjectFactories.ItemFactory import ItemFactory
from osOps import *
from collectionOps import *
import datetime, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=False)
parser.add_argument('--name', dest='collectionName', required=False)
parser.add_argument('--type', dest='collectionType', required=False)
parser.add_argument('--input', dest='inputData', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "/" + username + "_" + collectionName + "_collection.dat"
def installCollectionsDirectory(username):
createDirectory(CONST_COLLECTIONS_NAME + '/' + username)
def main():
arguments = generateArgumentsFromParser()
if arguments.action.lower() == "install":
installCollectionsDirectory(arguments.username)
return None
collectionFilePath = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create_collection":
baseCollection = Collection(arguments.username, arguments.collectionName, arguments.collectionType)
writeCollectionToFile(collectionFilePath, baseCollection)
elif arguments.action.lower() == "edit_collection":
editCollection(collectionFilePath, arguments.inputData)
elif arguments.action.lower() == "insert_item":
collectionLength = len(getCollection(collectionFilePath).items)
dateTime = datetime.datetime.now()
if arguments.collectionType.lower() == "item":
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, arguments.inputData, str(dateTime), str(dateTime)]))
else:
inputDataArr = arguments.inputData.split('~')
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime), inputDataArr[1]]))
elif arguments.action.lower() == "remove_collection":
removeFile(collectionFilePath)
if __name__ == '__main__':
main()
| from editCollection import editCollection
from ObjectFactories.ItemFactory import ItemFactory
from osOps import *
from collectionOps import *
import datetime, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=False)
parser.add_argument('--name', dest='collectionName', required=False)
parser.add_argument('--type', dest='collectionType', required=False)
parser.add_argument('--input', dest='inputData', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "/" + username + "_" + collectionName + "_collection.dat"
def installCollectionsDirectory(username):
createDirectory(CONST_COLLECTIONS_NAME + '/' + username)
def main():
arguments = generateArgumentsFromParser()
if arguments.action.lower() == "install":
installCollectionsDirectory(arguments.username)
return None
#placed this code here because I want this to run only if the user is not installing the program directories.
collectionFilePath = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create_collection":
baseCollection = Collection(arguments.username, arguments.collectionName, arguments.collectionType)
writeCollectionToFile(collectionFilePath, baseCollection)
elif arguments.action.lower() == "edit_collection":
editCollection(collectionFilePath, arguments.inputData)
elif arguments.action.lower() == "insert_item":
collectionLength = len(getCollection(collectionFilePath).items)
dateTime = datetime.datetime.now()
if arguments.collectionType.lower() == "item":
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, arguments.inputData, str(dateTime), str(dateTime)]))
else:
inputDataArr = arguments.inputData.split('~')
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime), inputDataArr[1]]))
elif arguments.action.lower() == "remove_collection":
removeFile(collectionFilePath)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
431c88741ef324ef4801d69ebad905ccfa096eab | Add reference to LICENSE | ohyou/vk-poster | main.py | main.py | '''
For licence, see: LICENSE
'''
import http.client, sys, json, os, vk, datetime, time, imghdr
import requests as r
from PIL import Image
class Connection:
def __init__(self):
self.established = False
self.attempts = 0
self.vkapi = None
# API requests here
def authorize(self):
if os.path.exists("access_token"):
file = open("access_token")
session = vk.Session(access_token=file.read())
self.vkapi = vk.API(session)
return self.vkapi != None
# def request
# return bool
# def get # wall/scheduled data
# return json?
def establish(self):
conn = http.client.HTTPConnection('www.google.com')
try:
conn.request("HEAD", "/")
self.established = True
except:
print("Connection error:", sys.exc_info()[0])
self.established = False
return self.established
class Group:
def __init__ (self, connection, name, id):
self.conn = connection
self.name = name
self.id = id
self.history = []
self.history_file = self.name + ".json"
self.scheduled_posts = []
#self.downloaded = False
# Downloading with os.system("downloader.py")
# Posting logic here
# Utils here or in a separate class?
# TODO: posting time management
# def download
# return len(arr)
# def getTime # find a hole or +1h from last post
# return string?
# def addTime # utils?
# return string?
def getScheduledPosts(self):
self.scheduled_posts = self.conn.vkapi.wall.get(owner_id=self.id, count=100, filter="postponed")
return self.scheduled_posts[0]
def loadHistory(self):
mode = 'a' if os.path.exists(self.history_file) else 'w'
with open(self.history_file, mode) as data_file:
try:
data = json.load(data_file)
self.history = data['files']
except ValueError:
print ("WARNING: Empty file or broken json structure")
return len(self.history)
def post(self):
if self.conn.establish() == False:
print ("ERROR: No connection, aborting")
return
if self.conn.authorize() == False:
print ("ERROR: API authorization failed, aborting")
return
if self.loadHistory() <= 0:
print ("WARNING: No history found")
else:
print (" History: ", len(self.history))
if self.getScheduledPosts() <= 0:
print ("WARNING: No scheduled posts")
else:
print (" Scheduled posts: ", self.scheduled_posts[0])
# Loop through files omitting ones in self.history and post them
# Posting with self.conn
print (" posting to", self.name)
if __name__ == "__main__":
conn = Connection()
g_meirl = Group(conn, 'me_irl', -99583108)
g_4chan = Group(conn, '4chan', -99632260)
g_bpt = Group(conn, 'BlackPeopleTwitter', -99632081)
g_meirl.post()
g_4chan.post()
g_bpt.post() | import http.client, sys, json, os, vk, datetime, time, imghdr
import requests as r
from PIL import Image
class Connection:
def __init__(self):
self.established = False
self.attempts = 0
self.vkapi = None
# API requests here
def authorize(self):
if os.path.exists("access_token"):
file = open("access_token")
session = vk.Session(access_token=file.read())
self.vkapi = vk.API(session)
return self.vkapi != None
# def request
# return bool
# def get # wall/scheduled data
# return json?
def establish(self):
conn = http.client.HTTPConnection('www.google.com')
try:
conn.request("HEAD", "/")
self.established = True
except:
print("Connection error:", sys.exc_info()[0])
self.established = False
return self.established
class Group:
def __init__ (self, connection, name, id):
self.conn = connection
self.name = name
self.id = id
self.history = []
self.history_file = self.name + ".json"
self.scheduled_posts = []
#self.downloaded = False
# Downloading with os.system("downloader.py")
# Posting logic here
# Utils here or in a separate class?
# TODO: posting time management
# def download
# return len(arr)
# def getTime # find a hole or +1h from last post
# return string?
# def addTime # utils?
# return string?
def getScheduledPosts(self):
self.scheduled_posts = self.conn.vkapi.wall.get(owner_id=self.id, count=100, filter="postponed")
return self.scheduled_posts[0]
def loadHistory(self):
mode = 'a' if os.path.exists(self.history_file) else 'w'
with open(self.history_file, mode) as data_file:
try:
data = json.load(data_file)
self.history = data['files']
except ValueError:
print ("WARNING: Empty file or broken json structure")
return len(self.history)
def post(self):
if self.conn.establish() == False:
print ("ERROR: No connection, aborting")
return
if self.conn.authorize() == False:
print ("ERROR: API authorization failed, aborting")
return
if self.loadHistory() <= 0:
print ("WARNING: No history found")
else:
print (" History: ", len(self.history))
if self.getScheduledPosts() <= 0:
print ("WARNING: No scheduled posts")
else:
print (" Scheduled posts: ", self.scheduled_posts[0])
# Loop through files omitting ones in self.history and post them
# Posting with self.conn
print (" posting to", self.name)
if __name__ == "__main__":
conn = Connection()
g_meirl = Group(conn, 'me_irl', -99583108)
g_4chan = Group(conn, '4chan', -99632260)
g_bpt = Group(conn, 'BlackPeopleTwitter', -99632081)
g_meirl.post()
g_4chan.post()
g_bpt.post() | mit | Python |
2ada5466f18c5f342dfece46281f806e8f9cdc8b | Use a threading instead of a forking server | Foxboron/HyREPL,Foxboron/HyREPL | main.py | main.py | import time
import sys
from HyREPL.session import Session
from HyREPL import bencode
import threading
from socketserver import ThreadingMixIn, ForkingMixIn, TCPServer, BaseRequestHandler
class ReplServer(ThreadingMixIn, TCPServer): pass
sessions = {}
class ReplRequestHandler(BaseRequestHandler):
session = None
def handle(self):
print("New client")
buf = b""
while True:
newstuff = self.request.recv(1024)
if len(newstuff) == 0:
break
buf += newstuff
try:
msg, rest = bencode.decode(buf)
buf = rest
except Exception as e:
print(e)
continue
if self.session is None:
if msg.get("session") in sessions:
self.session = sessions[msg.get("session")]
else:
self.session = Session(sessions)
sessions[self.session.uuid] = self.session
self.session.handle(msg, self.request)
print("Client gone")
def start_tcp_server(ip, port):
s = ReplServer((ip, port), ReplRequestHandler)
t = threading.Thread(target=s.serve_forever)
t.daemon = True
t.start()
return t
if __name__ == '__main__':
# start_server('127.0.0.1', 1337)
port = 1337
while True:
try:
t = start_tcp_server('127.0.0.1', port)
except OSError:
port += 1
else:
sys.stdout.write("Listening on {}\a\n".format(port))
sys.stdout.flush()
while True:
time.sleep(1)
t.join()
| import time
import sys
from HyREPL.session import Session
from HyREPL import bencode
import threading
from socketserver import ForkingMixIn, TCPServer, BaseRequestHandler
class ReplServer(ForkingMixIn, TCPServer): pass
sessions = {}
class ReplRequestHandler(BaseRequestHandler):
session = None
def handle(self):
print("New client")
buf = b""
while True:
newstuff = self.request.recv(1024)
if len(newstuff) == 0:
break
buf += newstuff
try:
msg, rest = bencode.decode(buf)
buf = rest
except Exception as e:
print(e)
continue
if self.session is None:
if msg.get("session") in sessions:
self.session = sessions[msg.get("session")]
else:
self.session = Session(sessions)
sessions[self.session.uuid] = self.session
self.session.handle(msg, self.request)
print("Client gone")
def start_tcp_server(ip, port):
s = ReplServer((ip, port), ReplRequestHandler)
t = threading.Thread(target=s.serve_forever)
t.daemon = True
t.start()
return t
if __name__ == '__main__':
# start_server('127.0.0.1', 1337)
port = 1337
while True:
try:
t = start_tcp_server('127.0.0.1', port)
except OSError:
port += 1
else:
sys.stdout.write("Listening on {}\a\n".format(port))
sys.stdout.flush()
while True:
time.sleep(1)
t.join()
| mit | Python |
752d3b5c1301d4acd1539831fd03abd63ac19cf5 | Switch between two different lists. | philsc/vbox-tui | main.py | main.py | #!/usr/bin/env python
import urwid
class VMWidget (urwid.WidgetWrap):
def __init__ (self, state, name):
self.state = state
self.content = name
self.item = urwid.AttrMap(
urwid.Text('%-15s %s' % (state, name)), 'body', 'focus'
)
self.__super.__init__(self.item)
def selectable (self):
return True
def keypress(self, size, key):
return key
def switch_list(listbox):
global current_listbox
current_listbox = listbox
main.contents.update(body=(listbox, None))
def handle_input(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
elif key in ('j',):
try:
current_listbox.set_focus(current_listbox.focus_position + 1)
except IndexError:
pass
elif key in ('k',):
try:
current_listbox.set_focus(current_listbox.focus_position - 1)
except IndexError:
pass
elif key in ('e',):
if current_listbox is listbox_props:
switch_list(listbox_vms)
else:
switch_list(listbox_props)
palette = [
('highlight', 'black', 'brown'),
('body','dark cyan', ''),
('focus','dark red', 'black'),
]
vms = 'foo lkj kj lskjdf jlklekj f'.split()
listbox_vms = urwid.ListBox(urwid.SimpleListWalker( \
[VMWidget('foo', v) for v in vms]))
listbox_props = urwid.ListBox(urwid.SimpleListWalker( \
[VMWidget('barr', v + ' me') for v in vms]))
current_listbox = listbox_vms
shortcuts = urwid.AttrMap(urwid.Text(' q: Quit'), 'highlight')
listbox_vms_map = urwid.AttrMap(listbox_vms, 'body')
listbox_props_map = urwid.AttrMap(listbox_props, 'body')
label = urwid.AttrMap(urwid.Text(' VM Selection'), 'highlight')
main = urwid.Frame(listbox_vms_map, header=shortcuts, footer=label)
loop = urwid.MainLoop(main, palette=palette, unhandled_input=handle_input)
loop.screen.set_terminal_properties(colors=16)
loop.run()
| #!/usr/bin/env python
import urwid
class VMWidget (urwid.WidgetWrap):
def __init__ (self, state, name):
self.state = state
self.content = name
self.item = urwid.AttrMap(
urwid.Text('%-15s %s' % (state, name)), 'body', 'focus'
)
self.__super.__init__(self.item)
def selectable (self):
return True
def keypress(self, size, key):
return key
def handle_input(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
elif key in ('j'):
try:
listbox.set_focus(listbox.focus_position + 1)
except IndexError:
pass
elif key in ('k'):
try:
listbox.set_focus(listbox.focus_position - 1)
except IndexError:
pass
palette = [
('highlight', 'black', 'brown'),
('body','dark cyan', ''),
('focus','dark red', 'black'),
]
vms = 'foo lkj kj lskjdf jlklekj f'.split()
shortcuts = urwid.AttrMap(urwid.Text(' q: Quit'), 'highlight')
listbox = urwid.ListBox(urwid.SimpleListWalker([VMWidget('foo', v) for v in vms]))
label = urwid.AttrMap(urwid.Text(' VM Selection'), 'highlight')
main = urwid.Frame(urwid.AttrMap(listbox, 'body'), header=shortcuts, \
footer=label)
loop = urwid.MainLoop(main, palette=palette, unhandled_input=handle_input)
loop.screen.set_terminal_properties(colors=16)
loop.run()
| mit | Python |
805536cc2e30da3eb676f86b3382e592e616b793 | rename install function | AmosGarner/PyInventory | main.py | main.py | from createCollection import createCollectionFile
from updateCollection import updateCollection, getCollectionLength
from removeCollection import removeCollection
from editCollection import editCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
from osOps import *
from collectionOps import *
import datetime, json, os, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=False)
parser.add_argument('--name', dest='collectionName', required=False)
parser.add_argument('--type', dest='collectionType', required=False)
parser.add_argument('--input', dest='inputData', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionDataToFile(collectionFilePath, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionName, arguments.collectionType)
collectionFile = open(collectionFilePath, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def installCollectionsDirectory(username):
createDirectory(CONST_COLLECTIONS_NAME + '/' + username)
def main():
arguments = generateArgumentsFromParser()
if arguments.action.lower() == "install":
installCollectionsDirectory(arguments.username)
return None
collectionFilePath = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create_collection":
createCollectionFile(collectionFilePath)
writeCollectionDataToFile(collectionFilePath, arguments)
elif arguments.action.lower() == "edit_collection":
editCollection(collectionFilePath, arguments.inputData)
elif arguments.action.lower() == "insert_item":
collectionLength = getCollectionLength(collectionFilePath)
inputDataArr = arguments.inputData.split('~')
dateTime = datetime.datetime.now()
if arguments.collectionType.lower() == "item":
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime)]))
else:
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime), inputDataArr[1]]))
elif arguments.action.lower() == "remove_collection":
removeCollection(collectionFilePath)
if __name__ == '__main__':
main()
| from createCollection import createCollectionFile
from updateCollection import updateCollection, getCollectionLength
from removeCollection import removeCollection
from editCollection import editCollection
from ObjectFactories.ItemFactory import ItemFactory
from DataObjects.Collection import Collection
from osOps import *
from collectionOps import *
import datetime, json, os, argparse
CONST_COLLECTIONS_NAME = 'collections'
def generateArgumentsFromParser():
parser = parser = argparse.ArgumentParser(description="Runs the PyInventory utility for creating a collection of items.")
parser.add_argument('--action', dest='action', required=True)
parser.add_argument('--user', dest='username', required=False)
parser.add_argument('--name', dest='collectionName', required=False)
parser.add_argument('--type', dest='collectionType', required=False)
parser.add_argument('--input', dest='inputData', required=False)
return parser.parse_args()
def generateFileName(username, collectionName):
return CONST_COLLECTIONS_NAME + "/" + username + "_" + CONST_COLLECTIONS_NAME + "/" + username + "_" + collectionName + "_collection.dat"
def generateNewCollection(username, collectionType, collectionName):
return Collection(username, collectionType, collectionName, [])
def writeCollectionDataToFile(collectionFilePath, arguments):
collection = generateNewCollection(arguments.username, arguments.collectionName, arguments.collectionType)
collectionFile = open(collectionFilePath, 'w')
collectionFile.write(collection.toJSON())
collectionFile.close()
def createCollectionsDirectory(username):
createDirectory(CONST_COLLECTIONS_NAME + '/' + username)
def main():
arguments = generateArgumentsFromParser()
if arguments.action.lower() == "install":
createCollectionsDirectory(arguments.username)
return None
collectionFilePath = generateFileName(arguments.username, arguments.collectionName)
if arguments.action.lower() == "create_collection":
createCollectionFile(collectionFilePath)
writeCollectionDataToFile(collectionFilePath, arguments)
elif arguments.action.lower() == "edit_collection":
editCollection(collectionFilePath, arguments.inputData)
elif arguments.action.lower() == "insert_item":
collectionLength = getCollectionLength(collectionFilePath)
inputDataArr = arguments.inputData.split('~')
dateTime = datetime.datetime.now()
if arguments.collectionType.lower() == "item":
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime)]))
else:
updateCollection(collectionFilePath, ItemFactory.factory(arguments.collectionType, [collectionLength+1, inputDataArr[0], str(dateTime), str(dateTime), inputDataArr[1]]))
elif arguments.action.lower() == "remove_collection":
removeCollection(collectionFilePath)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
ac16841a56450954fc566bd1a2997bf78c3ac195 | Fix error during start (related to sprofile folder) | UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine | main.py | main.py | import datetime
import shutil
from time import sleep
import pymongo
from celery.app.control import Control
from crawler import crawler, tasks
from crawler.celery import app
from crawler.db import db_mongodb as db
from crawler.settings import *
print("******************************")
print("UPOL-Crawler v" + CONFIG.get('Info', 'version'))
print("******************************")
print("LOADING..")
start_load_time = datetime.datetime.now()
# Start procedure
client = pymongo.MongoClient('localhost', 27017, maxPoolSize=None)
database = client.upol_crawler
# Init database
db.init(database)
crawler.load_seed(SEED_FILE, database)
end_load_time = datetime.datetime.now()
if CONFIG.getboolean('Debug', 'cprofile_crawl_task'):
os.makedirs(CPROFILE_DIR, exist_ok=True)
print("Deleting cprofile folder...")
# Cleaning cprofile folder
shutil.rmtree(CPROFILE_DIR)
print("DONE! " + str(end_load_time - start_load_time))
print("------------------------------")
print("Start crawling...")
print("******************************")
start_time = datetime.datetime.now()
last_sleep_1 = datetime.datetime.now()
sleeping = False
number_of_waiting = 0
number_of_added_links = 0
while True:
if sleeping is False:
last_sleep_2 = datetime.datetime.now()
last_sleep_delta = last_sleep_2 - last_sleep_1
if last_sleep_delta.seconds > 5:
sleeping = True
else:
sleeping = False
if CONFIG.getboolean('Settings', 'random_unvisited_url'):
url, value = db.get_random_url_for_crawl(database)
else:
url, value = db.get_url_for_crawl(database)
if url is not None:
number_of_added_links = number_of_added_links + 1
db.set_queued_url(database, url)
tasks.crawl_url_task.delay(url, value)
else:
print("------------------------------")
print("Added links:" + str(number_of_added_links))
number_of_added_links = 0
print("Workers are running - SLEEPING")
print("------------------------------")
sleep(20)
if not db.is_some_url_queued(database):
number_of_waiting = number_of_waiting + 1
else:
number_of_waiting = 0
if number_of_waiting > 5:
break
last_sleep_1 = datetime.datetime.now()
sleeping = False
end_time = datetime.datetime.now()
duration = end_time - start_time
print("------------------------------")
print("Crawl FINISHED")
print("Duration: " + str(duration))
print("------------------------------")
| import datetime
import shutil
from time import sleep
import pymongo
from celery.app.control import Control
from crawler import crawler, tasks
from crawler.celery import app
from crawler.db import db_mongodb as db
from crawler.settings import *
print("******************************")
print("UPOL-Crawler v" + CONFIG.get('Info', 'version'))
print("******************************")
print("LOADING..")
start_load_time = datetime.datetime.now()
# Start procedure
client = pymongo.MongoClient('localhost', 27017, maxPoolSize=None)
database = client.upol_crawler
# Init database
db.init(database)
crawler.load_seed(SEED_FILE, database)
end_load_time = datetime.datetime.now()
print("Deleting cprofile folder...")
# Cleaning cprofile folder
shutil.rmtree(CPROFILE_DIR)
print("DONE! " + str(end_load_time - start_load_time))
print("------------------------------")
print("Start crawling...")
print("******************************")
start_time = datetime.datetime.now()
last_sleep_1 = datetime.datetime.now()
sleeping = False
number_of_waiting = 0
number_of_added_links = 0
while True:
if sleeping is False:
last_sleep_2 = datetime.datetime.now()
last_sleep_delta = last_sleep_2 - last_sleep_1
if last_sleep_delta.seconds > 5:
sleeping = True
else:
sleeping = False
if CONFIG.getboolean('Settings', 'random_unvisited_url'):
url, value = db.get_random_url_for_crawl(database)
else:
url, value = db.get_url_for_crawl(database)
if url is not None:
number_of_added_links = number_of_added_links + 1
db.set_queued_url(database, url)
tasks.crawl_url_task.delay(url, value)
else:
print("------------------------------")
print("Added links:" + str(number_of_added_links))
number_of_added_links = 0
print("Workers are running - SLEEPING")
print("------------------------------")
sleep(20)
if not db.is_some_url_queued(database):
number_of_waiting = number_of_waiting + 1
else:
number_of_waiting = 0
if number_of_waiting > 5:
break
last_sleep_1 = datetime.datetime.now()
sleeping = False
end_time = datetime.datetime.now()
duration = end_time - start_time
print("------------------------------")
print("Crawl FINISHED")
print("Duration: " + str(duration))
print("------------------------------")
| mit | Python |
6c3561f43d5e3e5b9b824ac49898d831af81efd4 | change name | AliGhahraei/nao-classroom | main.py | main.py | import sys
import motion
import almath
import time
from naoqi import ALProxy
def StiffnessOn(proxy):
#We use the "Body" name to signify the collection of all joints
pNames = "Body"
pStiffnessLists = 1.0
pTimeLists = 1.0
proxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def main(robotIP):
""" Example showing a path of two positions
Warning: Needs a PoseInit before executing
"""
# Init proxies.
try:
motionProxy = ALProxy("ALMotion", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALMotion"
print "Error was: ", e
try:
postureProxy = ALProxy("ALRobotPosture", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALRobotPosture"
print "Error was: ", e
# Set NAO in Stiffness On
StiffnessOn(motionProxy)
# Send NAO to Pose Init
postureProxy.goToPosture("StandInit", 1)
postureProxy.goToPosture("Sit")
robotIp = "10.15.89.247"
main(robotIp) | import sys
import motion
import almath
import time
from naoqi import ALProxy
def StiffnessOn(proxy):
#We use the "Body" name to signify the collection of all joints
pNames = "Body"
pStiffnessLists = 1.0
pTimeLists = 1.0
proxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def main(robotIP):
""" Example showing a path of two positions
Warning: Needs a PoseInit before executing
"""
# Init proxies.
try:
motionProxy = ALProxy("ALMotion", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALMotion"
print "Error was: ", e
try:
postureProxy = ALProxy("ALRobotPosture", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALRobotPosture"
print "Error was: ", e
# Set NAO in Stiffness On
StiffnessOn(motionProxy)
# Send NAO to Pose Init
postureProxy.goToPosture("StandInit", 0.5)
motionProxy.angleInterpolation()
time.sleep(1)
# postureProxy.goToPosture("Sit")
robotIp = "10.15.89.247"
main(robotIp) | mit | Python |
9a51358871f04e2a5552621b6ac2c9dbe1ee8345 | Save temporary pictures to local directory | jollex/SnapchatBot | main.py | main.py | !/usr/bin/env python
from pysnap import Snapchat
import secrets
s = Snapchat()
s.login(secrets.USERNAME, secrets.PASSWORD)
friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1]
for friend in friends_to_add:
s.add_friend(friend)
snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0]
for snap in snaps:
with open('~/SnapchatBot/tmp.jpg', 'wb') as f:
f.write(s.get_blob(snap))
media_id = s.upload('~/SnapchatBot/tmp.jpg')
s.post_story(media_id, 5)
s.mark_viewed(snap)
| #!/usr/bin/env python
from pysnap import Snapchat
import secrets
s = Snapchat()
s.login(secrets.USERNAME, secrets.PASSWORD)
friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1]
for friend in friends_to_add:
s.add_friend(friend)
snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0]
for snap in snaps:
with open('tmp.jpg', 'wb') as f:
f.write(s.get_blob(snap))
media_id = s.upload('tmp.jpg')
s.post_story(media_id, 5)
s.mark_viewed(snap)
| mit | Python |
3ba97ec3f98d9b40c0681cd6550d0fbd1ec4f626 | create a base handler class to deal with templates | thiago6891/ai-experiments,thiago6891/ai-experiments | main.py | main.py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import jinja2
import os
import webapp2
template_dir = os.path.dirname(__file__)
jinja_env = jinja2.Environment(
loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
class BaseHandler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, file_name, **params):
template = jinja_env.get_template(file_name)
return template.render(params)
def render(self, file_name, **kw):
self.write(self.render_str(file_name, **kw))
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
app = webapp2.WSGIApplication([
('/', MainHandler)
], debug=True)
| #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
app = webapp2.WSGIApplication([
('/', MainHandler)
], debug=True)
| mit | Python |
da30c13891bc4180d7bf8d94059b5cdbc88db1ed | update main | HeavenH/teleGit | main.py | main.py |
from GitApi import GitHub
import configparser
from telegram.ext import Updater, CommandHandler
# Configuração do Bot
config = configparser.ConfigParser()
config.read_file(open('config.ini'))
# Conectando a API do telegram
# Updater pegará as informações e dispatcher conectará a mensagem ao bot
up = Updater(token=config['DEFAULT']['token'])
dispatcher = up.dispatcher
# Função Inicial
def start(bot, update):
me = bot.get_me()
# Mesagem Inicial
msg = "Bem vindo!\n"
msg += "Eu sou o TeleGit\n"
msg += "O que você gostaria de fazer?\n"
msg += "/listing +username - Listará seus repositórios\n"
msg += "Ex: /listing HeavenH"
# Envia a mensagem com o menu
bot.send_message(chat_id=update.message.chat_id, text=msg)
# Função para listar os repositórios
def listing(bot, update):
user = update.message.text.split()[1]
bot.send_message(chat_id=update.message.chat_id, text=user)
re = GitHub()
bot.send_message(chat_id=update.message.chat_id,
text=re.GetRepos(user))
def info(bot, update):
user = update.message.text.split()[1]
bot.send_message(chat_id=update.message.chat_id, text=user)
msg = GitHub()
bot.send_message(chat_id=update.message.chat_id, text=msg.GetInfo(user))
# Transforma as funções em Comandos
start_handler = CommandHandler('start', start)
listing_handler = CommandHandler('listing', listing)
info_handler = CommandHandler('info', info)
# Envia os Comandos para o telegram
dispatcher.add_handler(start_handler)
dispatcher.add_handler(listing_handler)
dispatcher.add_handler(info_handler)
# Desenvolvido by Heaven,CliTrix,Cerberu5 all rights reserved
| import configparser
from requests import get
from json import loads
from telegram.ext import Updater, CommandHandler
#Configuração do Bot
config = configparser.ConfigParser()
config.read_file(open('config.ini'))
#Conectando a API do telegram
#Updater pegará as informações e dispatcher conectará a mensagem ao bot
up = Updater(token=config['DEFAULT']['token'])
dispatcher = up.dispatcher
#Função Inicial
def start(bot, update):
me = bot.get_me()
#Mesagem Inicial
msg = "Bem vindo!\n"
msg += "Eu sou o TeleGit\n"
msg += "O que você gostaria de fazer?\n"
msg += "/listing +username - Listará seus repositórios\n"
msg += "Ex: /listing HeavenH"
#Envia a mensagem para o telegram
bot.send_message(chat_id=update.message.chat_id,text=msg)
#Função para listar os repositórios
def listing(bot, update):
user = update.message.text.split()[1]
bot.send_message(chat_id=update.message.chat_id,text=user)
r = get('https://api.github.com/users/' + user + '/repos').text
r = loads(r)
for repo in range(len(r)):
bot.send_message(chat_id=update.message.chat_id,text=r[repo]['html_url'])
#Função para mostrar as informações do usuário
def info(bot, update):
user = update.message.text.split()[1]
bot.send_message(chat_id=update.message.chat_id,text=user)
r = get('https://api.github.com/users/' + user).text
r = loads(r)
msg = 'Name: ' + str(r['name'] +'\n')
msg += 'Email: ' + str(r['email'] +'\n')
msg += 'Company: ' + str(r['company'] +'\n')
msg += 'Blog: ' + str(r['blog'] + '\n')
msg += 'Location: ' + str(r['location'] + '\n')
msg += 'Bio: ' + str(r['bio'] + '\n')
#Envia a mensagem para o telegram
bot.send_message(chat_id=update.message.chat_id,text=msg)
#Transforma as funções em Comandos
start_handler = CommandHandler('start', start)
listing_handler = CommandHandler('listing', listing)
info_handler = CommandHandler('info', info)
#Envia os Comandos para o telegram
dispatcher.add_handler(start_handler)
dispatcher.add_handler(listing_handler)
dispatcher.add_handler(info_handler)
#Desenvolvido by Heaven,CliTrix,Cerberu5 all rights reserved
| mit | Python |
565861256c9cf0f41217df13c4244315b4ebd74d | Remove deprecation warning by using new style item pipeline definition | verylasttry/portia,amikey/portia,pombredanne/portia,chennqqi/portia,NoisyText/portia,NoisyText/portia,hmilywb/portia,flip111/portia,NoisyText/portia,nju520/portia,asa1253/portia,NicoloPernigo/portia,amikey/portia,CENDARI/portia,naveenvprakash/portia,PrasannaVenkadesh/portia,CENDARI/portia,lodow/portia-proxy,nju520/portia,flip111/portia,Youwotma/portia,Youwotma/portia,NoisyText/portia,Suninus/portia,SouthStar/portia,sntran/portia,verylasttry/portia,naveenvprakash/portia,asa1253/portia,chennqqi/portia,amikey/portia,hmilywb/portia,hanicker/portia,PrasannaVenkadesh/portia,hanicker/portia,livepy/portia,flip111/portia,hmilywb/portia,livepy/portia,pombredanne/portia,anjuncc/portia,Youwotma/portia,anjuncc/portia,amikey/portia,CENDARI/portia,asa1253/portia,PrasannaVenkadesh/portia,lodow/portia-proxy,Suninus/portia,SouthStar/portia,sntran/portia,naveenvprakash/portia,flip111/portia,hanicker/portia,anjuncc/portia,naveenvprakash/portia,livepy/portia,NicoloPernigo/portia,SouthStar/portia,verylasttry/portia,pombredanne/portia,livepy/portia,asa1253/portia,PrasannaVenkadesh/portia,CENDARI/portia,SouthStar/portia,nju520/portia,pombredanne/portia,verylasttry/portia,sntran/portia,lodow/portia-proxy,sntran/portia,hmilywb/portia,anjuncc/portia,chennqqi/portia,Suninus/portia,hanicker/portia,Suninus/portia,NicoloPernigo/portia,nju520/portia,NicoloPernigo/portia,chennqqi/portia,Youwotma/portia | slybot/slybot/settings.py | slybot/slybot/settings.py | SPIDER_MANAGER_CLASS = 'slybot.spidermanager.SlybotSpiderManager'
EXTENSIONS = {'slybot.closespider.SlybotCloseSpider': 1}
ITEM_PIPELINES = {'slybot.dupefilter.DupeFilterPipeline': 1}
SPIDER_MIDDLEWARES = {'slybot.spiderlets.SpiderletsMiddleware': 999} # as close as possible to spider output
SLYDUPEFILTER_ENABLED = True
PROJECT_DIR = 'slybot-project'
try:
from local_slybot_settings import *
except ImportError:
pass
| SPIDER_MANAGER_CLASS = 'slybot.spidermanager.SlybotSpiderManager'
EXTENSIONS = {'slybot.closespider.SlybotCloseSpider': 1}
ITEM_PIPELINES = ['slybot.dupefilter.DupeFilterPipeline']
SPIDER_MIDDLEWARES = {'slybot.spiderlets.SpiderletsMiddleware': 999} # as close as possible to spider output
SLYDUPEFILTER_ENABLED = True
PROJECT_DIR = 'slybot-project'
try:
from local_slybot_settings import *
except ImportError:
pass
| bsd-3-clause | Python |
04a5d2fb004625379ef58a4e5ea10a837c28c1bc | Sort articles in main page by time | xenx/recommendation_system,xenx/recommendation_system | data/data_utils.py | data/data_utils.py | #!/usr/bin/env python
# coding=utf-8
import os
from pymongo import MongoClient
import random
class TvrainData():
def __init__(self):
"""
Just load data from Mongo.
"""
self.sequences = MongoClient(os.environ['MONGODB_URL']).tvrain.sequences
self.collection = MongoClient(os.environ['MONGODB_URL']).tvrain.articles
self.collection.create_index("time")
def get_random_articles(self, n):
"""Returns N of topics for index.html"""
articles = self.collection.find().sort("time", 1).skip(random.randint(0, self.collection.count())).limit(n)
return list(articles)
def get_article_id(self, url):
"""Get id by url"""
return self.collection.find_one({'url': url})['_id']
def get_articles_data(self, articles_urls):
"""
Get data from MongoDB for articles urls
:param articles_urls: ['article_url', ...]
:return: list of MongoDB documents
"""
articles = []
for url in articles_urls:
articles.append(self.collection.find_one({'url': url}))
return articles
def iterate_articles(self, except_articles, skip=0, limit=None, query={}):
"""
Iteate throw all articles without ids of except articles
:param except_articles: list of ids
:return:
"""
if limit is None:
data = self.collection.find(query).skip(skip)
else:
data = self.collection.find(query).skip(skip).limit(limit)
print(data.count())
for value in data:
if value not in except_articles:
yield value
def get_sequences(self):
"""Return all sequences for train"""
return list(self.sequences.find().limit(-1))
| #!/usr/bin/env python
# coding=utf-8
import os
from pymongo import MongoClient
import random
class TvrainData():
def __init__(self):
"""
Just load data from Mongo.
"""
self.sequences = MongoClient(os.environ['MONGODB_URL']).tvrain.sequences
self.collection = MongoClient(os.environ['MONGODB_URL']).tvrain.articles
self.collection.create_index("time")
def get_random_articles(self, n):
"""Returns N of topics for index.html"""
articles = self.collection.find().skip(random.randint(0, self.collection.count())).limit(n)
return list(articles)
def get_article_id(self, url):
"""Get id by url"""
return self.collection.find_one({'url': url})['_id']
def get_articles_data(self, articles_urls):
"""
Get data from MongoDB for articles urls
:param articles_urls: ['article_url', ...]
:return: list of MongoDB documents
"""
articles = []
for url in articles_urls:
articles.append(self.collection.find_one({'url': url}))
return articles
def iterate_articles(self, except_articles, skip=0, limit=None, query={}):
"""
Iteate throw all articles without ids of except articles
:param except_articles: list of ids
:return:
"""
if limit is None:
data = self.collection.find(query).skip(skip)
else:
data = self.collection.find(query).skip(skip).limit(limit)
print(data.count())
for value in data:
if value not in except_articles:
yield value
def get_sequences(self):
"""Return all sequences for train"""
return list(self.sequences.find().limit(-1))
| mit | Python |
67d63a4a5bc4f1d61f50d3f6e933f2898f218caf | check for updates every day | dmpetrov/dataversioncontrol,efiop/dvc,dataversioncontrol/dvc,dmpetrov/dataversioncontrol,dataversioncontrol/dvc,efiop/dvc | dvc/updater.py | dvc/updater.py | import os
import time
import requests
from dvc import VERSION_BASE
from dvc.logger import Logger
class Updater(object):
URL = 'https://4ki8820rsf.execute-api.us-east-2.amazonaws.com/' \
'prod/latest-version'
UPDATER_FILE = 'updater'
TIMEOUT = 24 * 60 * 60 # every day
TIMEOUT_GET = 10
def __init__(self, dvc_dir):
self.dvc_dir = dvc_dir
self.updater_file = os.path.join(dvc_dir, self.UPDATER_FILE)
@staticmethod
def init(dvc_dir):
return Updater(dvc_dir)
def check(self):
current = VERSION_BASE
if os.getenv('CI'):
return
if os.path.isfile(self.updater_file):
ctime = os.path.getctime(self.updater_file)
if time.time() - ctime < self.TIMEOUT:
msg = '{} is not old enough to check for updates'
Logger.debug(msg.format(self.UPDATER_FILE))
return
os.unlink(self.updater_file)
Logger.info('Checking for updates...')
try:
r = requests.get(self.URL, timeout=self.TIMEOUT_GET)
j = r.json()
latest = j['version']
open(self.updater_file, 'w+').close()
except Exception as exc:
msg = 'Failed to obtain latest version: {}'.format(str(exc))
Logger.debug(msg)
return
l_major, l_minor, l_patch = [int(x) for x in latest.split('.')]
c_major, c_minor, c_patch = [int(x) for x in current.split('.')]
if l_major <= c_major and \
l_minor <= c_minor and \
l_patch <= c_patch:
return
msg = 'You are using dvc version {}, however version {} is ' \
'available. Consider upgrading.'
Logger.warn(msg.format(current, latest))
| import os
import time
import requests
from dvc import VERSION_BASE
from dvc.logger import Logger
class Updater(object):
URL = 'https://4ki8820rsf.execute-api.us-east-2.amazonaws.com/' \
'prod/latest-version'
UPDATER_FILE = 'updater'
TIMEOUT = 7 * 24 * 60 * 60 # every week
TIMEOUT_GET = 10
def __init__(self, dvc_dir):
self.dvc_dir = dvc_dir
self.updater_file = os.path.join(dvc_dir, self.UPDATER_FILE)
@staticmethod
def init(dvc_dir):
return Updater(dvc_dir)
def check(self):
current = VERSION_BASE
if os.getenv('CI'):
return
if os.path.isfile(self.updater_file):
ctime = os.path.getctime(self.updater_file)
if time.time() - ctime < self.TIMEOUT:
msg = '{} is not old enough to check for updates'
Logger.debug(msg.format(self.UPDATER_FILE))
return
os.unlink(self.updater_file)
Logger.info('Checking for updates...')
try:
r = requests.get(self.URL, timeout=self.TIMEOUT_GET)
j = r.json()
latest = j['version']
open(self.updater_file, 'w+').close()
except Exception as exc:
msg = 'Failed to obtain latest version: {}'.format(str(exc))
Logger.debug(msg)
return
l_major, l_minor, l_patch = [int(x) for x in latest.split('.')]
c_major, c_minor, c_patch = [int(x) for x in current.split('.')]
if l_major <= c_major and \
l_minor <= c_minor and \
l_patch <= c_patch:
return
msg = 'You are using dvc version {}, however version {} is ' \
'available. Consider upgrading.'
Logger.warn(msg.format(current, latest))
| apache-2.0 | Python |
a42d4f2c90ae604d6cb98bfc37c1fef9840b5ff9 | bump to 0.82.3 | dmpetrov/dataversioncontrol,dmpetrov/dataversioncontrol,efiop/dvc,efiop/dvc | dvc/version.py | dvc/version.py | # Used in setup.py, so don't pull any additional dependencies
#
# Based on:
# - https://github.com/python/mypy/blob/master/mypy/version.py
# - https://github.com/python/mypy/blob/master/mypy/git.py
import os
import subprocess
_BASE_VERSION = "0.82.3"
def _generate_version(base_version):
"""Generate a version with information about the git repository"""
pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if not _is_git_repo(pkg_dir) or not _have_git():
return base_version
if _is_release(pkg_dir, base_version) and not _is_dirty(pkg_dir):
return base_version
return "{base_version}+{short_sha}{dirty}".format(
base_version=base_version,
short_sha=_git_revision(pkg_dir).decode("utf-8")[0:6],
dirty=".mod" if _is_dirty(pkg_dir) else "",
)
def _is_git_repo(dir_path):
"""Is the given directory version-controlled with git?"""
return os.path.exists(os.path.join(dir_path, ".git"))
def _have_git():
"""Can we run the git executable?"""
try:
subprocess.check_output(["git", "--help"])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
def _is_release(dir_path, base_version):
try:
output = subprocess.check_output(
["git", "describe", "--tags", "--exact-match"],
cwd=dir_path,
stderr=subprocess.STDOUT,
).decode("utf-8")
tag = output.strip()
return tag == base_version
except subprocess.CalledProcessError:
return False
def _git_revision(dir_path):
"""Get the SHA-1 of the HEAD of a git repository."""
return subprocess.check_output(
["git", "rev-parse", "HEAD"], cwd=dir_path
).strip()
def _is_dirty(dir_path):
"""Check whether a git repository has uncommitted changes."""
try:
subprocess.check_call(["git", "diff", "--quiet"], cwd=dir_path)
return False
except subprocess.CalledProcessError:
return True
__version__ = _generate_version(_BASE_VERSION)
| # Used in setup.py, so don't pull any additional dependencies
#
# Based on:
# - https://github.com/python/mypy/blob/master/mypy/version.py
# - https://github.com/python/mypy/blob/master/mypy/git.py
import os
import subprocess
_BASE_VERSION = "0.82.2"
def _generate_version(base_version):
"""Generate a version with information about the git repository"""
pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if not _is_git_repo(pkg_dir) or not _have_git():
return base_version
if _is_release(pkg_dir, base_version) and not _is_dirty(pkg_dir):
return base_version
return "{base_version}+{short_sha}{dirty}".format(
base_version=base_version,
short_sha=_git_revision(pkg_dir).decode("utf-8")[0:6],
dirty=".mod" if _is_dirty(pkg_dir) else "",
)
def _is_git_repo(dir_path):
"""Is the given directory version-controlled with git?"""
return os.path.exists(os.path.join(dir_path, ".git"))
def _have_git():
"""Can we run the git executable?"""
try:
subprocess.check_output(["git", "--help"])
return True
except subprocess.CalledProcessError:
return False
except OSError:
return False
def _is_release(dir_path, base_version):
try:
output = subprocess.check_output(
["git", "describe", "--tags", "--exact-match"],
cwd=dir_path,
stderr=subprocess.STDOUT,
).decode("utf-8")
tag = output.strip()
return tag == base_version
except subprocess.CalledProcessError:
return False
def _git_revision(dir_path):
"""Get the SHA-1 of the HEAD of a git repository."""
return subprocess.check_output(
["git", "rev-parse", "HEAD"], cwd=dir_path
).strip()
def _is_dirty(dir_path):
"""Check whether a git repository has uncommitted changes."""
try:
subprocess.check_call(["git", "diff", "--quiet"], cwd=dir_path)
return False
except subprocess.CalledProcessError:
return True
__version__ = _generate_version(_BASE_VERSION)
| apache-2.0 | Python |
9f8bd896ddbde017574ef98e09cbc76b21e9a4b0 | swap documentation for lerp and norm (oops) | davelab6/drawbotlab,djrrb/drawbotlab | math.py | math.py | # MATH HELPERS
def lerp(start, stop, amt):
"""
Return the interpolation factor (between 0 and 1) of a VALUE between START and STOP.
https://processing.org/reference/lerp_.html
"""
return float(amt-start) / float(stop-start)
def norm(value, start, stop):
"""
Interpolate.
Get Interpolated value, between zero and one.
See also: https://processing.org/reference/norm_.html
"""
return start + (stop-start) * value
| # MATH HELPERS
def lerp(start, stop, amt):
"""
Get Interpolated value.
https://processing.org/reference/lerp_.html
"""
return float(amt-start) / float(stop-start)
def norm(value, start, stop):
"""
Interpolate.
Return the interpolation factor (between 0 and 1) of a VALUE between START and STOP.
See also: https://processing.org/reference/norm_.html
"""
return start + (stop-start) * value
| mit | Python |
c596b30999a8743e6f16cf7eadceca72a0f82c2b | fix if statements to exit properly. | constanthatz/network_tools | echo_server.py | echo_server.py | #!/usr/bin/env python
from __future__ import print_function
import socket
import email.utils
def server_socket_function():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
try:
while True:
conn, addr = server_socket.accept()
message = conn.recv(32)
if message:
conn.sendall("I recieved your message. Stop talking to me. You are annoying.")
except KeyboardInterrupt:
conn.close()
server_socket.close()
def response_ok():
first_line = 'HTTP/1.1 200 OK'
timestamp = email.utils.formatdate(usegmt=True)
content_header = 'Content-Type: text/plain'
crlf = '<CRLF>'
response = ('{}\nDate: {}\n{}\n{}').format(
first_line, timestamp, content_header, crlf)
return response
def response_error(error_code, error_message):
first_line = 'HTTP/1.1 {} {}'.format(error_code, error_text)
timestamp = email.utils.formatdate(usegmt=True)
content_header = 'Content-Type: text/plain'
crlf = '<CRLF>'
response = ('{}\nDate: {}\n{}\n{}').format(
first_line, timestamp, content_header, crlf)
return response
def parse_request(request):
mup_line = request.splitlines()[0]
mup = mup_line.split(' ')
if mup[0] != 'GET':
response_error('405', 'Method Not Allowed')
return
elif mup[2] != 'HTTP/1.1':
response_error('505', 'HTTP Version Not Supported')
return
return mup[1]
if __name__ == '__main__':
server_socket_function()
| #!/usr/bin/env python
from __future__ import print_function
import socket
import email.utils
def server_socket_function():
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(1)
try:
while True:
conn, addr = server_socket.accept()
message = conn.recv(32)
if message:
conn.sendall("I recieved your message. Stop talking to me. You are annoying.")
except KeyboardInterrupt:
conn.close()
server_socket.close()
def response_ok():
first_line = 'HTTP/1.1 200 OK'
timestamp = email.utils.formatdate(usegmt=True)
content_header = 'Content-Type: text/plain'
crlf = '<CRLF>'
response = ('{}\nDate: {}\n{}\n{}').format(
first_line, timestamp, content_header, crlf)
return response
def response_error(error_code, error_message):
first_line = 'HTTP/1.1 {} {}'.format(error_code, error_text)
timestamp = email.utils.formatdate(usegmt=True)
content_header = 'Content-Type: text/plain'
crlf = '<CRLF>'
response = ('{}\nDate: {}\n{}\n{}').format(
first_line, timestamp, content_header, crlf)
return response
def parse_request(request):
mup_line = request.splitlines()[0]
mup = mup_line.split(' ')
if mup[0] != 'GET':
response_error('405', 'Method Not Allowed')
elif mup[2] != 'HTTP/1.1':
response_error('505', 'HTTP Version Not Supported')
return mup[1]
if __name__ == '__main__':
server_socket_function()
| mit | Python |
a68f4ddf71584a53fca39a28f008f7657a485f44 | add extra fields in seller_form | tejesh95/Zubio.in,tejesh95/Zubio.in,tejesh95/Zubio.in | sellapp/app1/views.py | sellapp/app1/views.py | from django.shortcuts import render
from django.http import HttpResponse
from datetime import datetime
from elasticsearch import Elasticsearch
from django import forms
es = Elasticsearch()
class SellerForm(forms.Form):
prod_description = forms.CharField(
label='Looking for buyers interested in..', max_length=5000)
months_used = forms.IntegerField()
selling_price = forms.IntegerField()
is_negotiable = forms.BooleanField(required=False)
# Create your views here.
def seller_form(request):
if request.method == "POST":
# create a form instance and populate with data
form = SellerForm(request.POST)
# check whether form details are valid:
if form.is_valid():
# print request.POST
es.index(index="my-index", doc_type="test-type", id=request.user.id, body={"prod_description": request.POST['prod_description'], "selling_price": request.POST["selling_price"], "months_used": request.POST["months_used"], "is_negotiable": request.POST["is_negotiable"], "timestamp": datetime.now()})
return HttpResponse("Yo..!! Your item is attracting lot of buyers!!")
else:
form = SellerForm()
return render(request, 'seller_form.html', {'form': form})
def buyer_feed(request):
test = es.get(index="my-index", doc_type="test-type", id=42)['_source']
return render(request, 'buyer_feed.html', {'form_data': test})
| from django.shortcuts import render
from django.http import HttpResponse
from datetime import datetime
from elasticsearch import Elasticsearch
from django import forms
es = Elasticsearch()
class SellerForm(forms.Form):
prod_description = forms.CharField(label='Looking for buyers interested in..',max_length=5000)
# Create your views here.
def seller_form(request):
if request.method =="POST":
#create a form instance and populate with data
form = SellerForm(request.POST)
#check whether form details are valid:
if form.is_valid():
print "yooo!!!!!!!!!!!!"
print request.POST['prod_description']
es.index(index="my-index", doc_type="test-type", id=42, body={"prod_description":request.POST['prod_description'],"any": "data", "timestamp": datetime.now()})
HttpResponse("Yo..!! Your item is attracting lot of buyers!!")
else:
form = SellerForm()
return render(request, 'seller_form.html', {'form':form})
def buyer_feed(request):
test = es.get(index="my-index", doc_type="test-type", id=42)['_source']
return render(request, 'buyer_feed.html', {'form_data':test}) | mit | Python |
c1120bd8a1b91ba2903515b18ea50ca6aaf0bce7 | Add method custom.check.CheckBox.Reset | AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder | source/custom/check.py | source/custom/check.py | # -*- coding: utf-8 -*-
## \package custom.check
# MIT licensing
# See: LICENSE.txt
import wx
## wx.CheckBox class that defines a 'Default' attribute
class CheckBox(wx.CheckBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=False, label=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0, validator=wx.DefaultValidator, name=wx.CheckBoxNameStr):
wx.CheckBox.__init__(self, parent, win_id, label, pos, size, style, validator, name)
self.SetValue(value)
self.Default = value
## Reset CheckBox to default value
def Reset(self):
self.SetValue(self.Default)
return self.Value == self.Default
| # -*- coding: utf-8 -*-
## \package custom.check
# MIT licensing
# See: LICENSE.txt
import wx
## wx.CheckBox class that defines a 'Default' attribute
class CheckBox(wx.CheckBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=False, label=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0, validator=wx.DefaultValidator, name=wx.CheckBoxNameStr):
wx.CheckBox.__init__(self, parent, win_id, label, pos, size, style, validator, name)
self.SetValue(value)
self.Default = value
| mit | Python |
d0a907872749f1bb54d6e8e160ea170059289623 | Set ComboBox class default ID to wx.ID_ANY | AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder | source/custom/combo.py | source/custom/combo.py | # -*- coding: utf-8 -*-
## \package custom.combo
# MIT licensing
# See: LICENSE.txt
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
| # -*- coding: utf-8 -*-
## \package custom.combo
# MIT licensing
# See: LICENSE.txt
import wx
from wx.combo import OwnerDrawnComboBox
class ComboBox(OwnerDrawnComboBox):
def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition,
size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator,
name=wx.ComboBoxNameStr):
OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices,
style, validator, name)
self.Default = self.GetLabel()
self.Priority = []
## Resets ComboBox to defaults
def Reset(self):
if not self.Count:
self.SetValue(self.Default)
return self.Value == self.Default
return False
| mit | Python |
183bc364676ad463f90e49340a05987707a15f31 | Access accelerometer without bridge.m (#13) | kivy/pyobjus,kivy/pyobjus,kivy/pyobjus | examples/ball-example/main.py | examples/ball-example/main.py | from random import random
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from kivy.graphics import Color
from kivy.logger import Logger
from pyobjus import autoclass
class Accelerometer:
def __init__(self):
self.motion_manager = autoclass('CMMotionManager').alloc().init()
def start(self):
if(self.motion_manager.isAccelerometerAvailable() == True):
self.motion_manager.startAccelerometerUpdates()
else:
Logger.info('accelerometer: not available.')
@property
def values(self):
if self.motion_manager.accelerometerData:
acceleration = self.motion_manager.accelerometerData.acceleration;
return (acceleration.a, acceleration.b, acceleration.c)
else:
Logger.info('accelerometer: data not yet available.')
return (0, 0, 0)
def stop(self):
self.motion_manager.stopAccelerometerUpdates()
class Ball(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
h = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
def move(self):
self.pos = Vector(*self.velocity) + self.pos
class PyobjusGame(Widget):
ball = ObjectProperty(None)
screen = ObjectProperty(autoclass('UIScreen').mainScreen())
accelerometer = Accelerometer()
sensitivity = ObjectProperty(50)
br_slider = ObjectProperty(None)
def __init__(self, *args, **kwargs):
super(PyobjusGame, self).__init__()
self.accelerometer.start()
def __dealloc__(self, *args, **kwargs):
# self.bridge.stopAccelerometer()
self.accelerometer.stop()
super(PyobjusGame, self).__dealloc__()
def reset_ball_pos(self):
self.ball.pos = self.width / 2, self.height / 2
def on_bright_slider_change(self):
self.screen.brightness = self.br_slider.value
def update(self, dt):
self.ball.move()
val = self.accelerometer.values
self.ball.velocity_x = val[0] * self.sensitivity
self.ball.velocity_y = val[1] * self.sensitivity
if (self.ball.y < 0) or (self.ball.top >= self.height):
self.reset_ball_pos()
self.ball.h = random()
if (self.ball.x < 0) or (self.ball.right >= self.width):
self.reset_ball_pos()
self.ball.h = random()
class PyobjusBallApp(App):
def build(self):
game = PyobjusGame()
Clock.schedule_interval(game.update, 1.0/60.0)
return game
if __name__ == '__main__':
PyobjusBallApp().run()
| from random import random
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from kivy.graphics import Color
from pyobjus import autoclass
class Ball(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
h = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
def move(self):
self.pos = Vector(*self.velocity) + self.pos
class PyobjusGame(Widget):
ball = ObjectProperty(None)
screen = ObjectProperty(autoclass('UIScreen').mainScreen())
bridge = ObjectProperty(autoclass('bridge').alloc().init())
sensitivity = ObjectProperty(50)
br_slider = ObjectProperty(None)
def __init__(self, *args, **kwargs):
super(PyobjusGame, self).__init__()
self.bridge.startAccelerometer()
def __dealloc__(self, *args, **kwargs):
self.bridge.stopAccelerometer()
super(PyobjusGame, self).__dealloc__()
def reset_ball_pos(self):
self.ball.pos = self.width / 2, self.height / 2
def on_bright_slider_change(self):
self.screen.brightness = self.br_slider.value
def update(self, dt):
self.ball.move()
self.ball.velocity_x = self.bridge.ac_x * self.sensitivity
self.ball.velocity_y = self.bridge.ac_y * self.sensitivity
if (self.ball.y < 0) or (self.ball.top >= self.height):
self.reset_ball_pos()
self.ball.h = random()
if (self.ball.x < 0) or (self.ball.right >= self.width):
self.reset_ball_pos()
self.ball.h = random()
class PyobjusBallApp(App):
def build(self):
game = PyobjusGame()
Clock.schedule_interval(game.update, 1.0/60.0)
return game
if __name__ == '__main__':
PyobjusBallApp().run()
| mit | Python |
5fa489bcded8114c0fc9a440d00b47784daab4a1 | Update interface.py | gael-sng/PicToASCII | src/interface.py | src/interface.py |
#Initializes
op = 0;
edgechoice = "L8" #Default choice
save = True #Default
printText = False #Default
while(op != 3):
# Main menu
print("\n\n\n\n")
print("Image to ascii software.")
print("Choose an option:")
# Main menu choices
print("\t1- Transform image.")
print("\t2- Set options.")
print("\t3- Close program.")
op = int(input())
# Main menu switch
if op == 1:
#Transform image menu
print("Transform image:")
print("Give imagem filename.")
filename = input()
# Transform code
# end transform code
if save == True:
# Save text img in a file
#
if printText == True:
# print text img in the terminal
#
if op == 2:
# Set options menu
print("Choose options:")
# Set options choices
print("\t1- Choose the Edge Detection algorithm.")
print("\t2- Save resulting text in a file")
print("\t3- Print resulting text in the terminal")
op2 = int(input())
#Set options switch
if op2 == 1:
# Choose Edge Menu
print("Choose Edge Detection:")
# Choose Edge choices
print("\t1- Apply Laplacian 4")
print("\t2- Apply Laplacian 8 (Default)")
print("\t3- Apply Gaussian and Laplacian 4")
print("\t4- Apply Gaussian and Laplacian 8")
op3 = int(input())
# Choose Edges switch
if op3 == 1:
edgechoice = "L4"
if op3 == 2:
edgechoice = "L8"
if op3 == 3:
edgechoice = "G4"
if op3 == 4:
edgechoice = "G8"
if op2 == 2:
# Save Text Menu
print("Save resulting text in a file?")
# Save Text choices
print("\t1- Yes (Default)")
print("\t2- No")
op3 = int(input())
# Save Text Switch
if op3 == 1:
save = True
if op3 == 2:
save = False
if op2 == 3:
# Print Text Menu
print("Printing resulting text in the terminal?")
# Print Text choices
print("\t1- Yes")
print("\t2- No (Default)")
op3 = int(input())
# Print Text switch
if op3 == 1:
printText = True
if op3 == 2:
printText = False
if op == 3:
print ("Closing program...")
|
#Initializes
op = 0;
edgechoice = "L8" #Default choice
save = True #Default
printText = False #Default
while(op != 3):
# Main menu
print("\n\n\n\n")
print("Image to ascii software.")
print("Choose an option:")
# Main menu choices
print("\t1- Transform image.")
print("\t2- Set options.")
print("\t3- Close program.")
op = int(input())
# Main menu switch
if op == 1:
#Transform image menu
print("Transform image:")
print("Give imagem filename.")
filename = input()
# Transform code
# end transform code
if save == True:
# Save text img in a file
#
if printText == True:
# print text img in the terminal
#
if op == 2:
# Set options menu
print("Choose options:")
# Set options choices
print("\t1- Choose the Edge Detection algorithm.")
print("\t2- Save resulting text in a file")
print("\t3- Print resulting text in the terminal")
op2 = int(input())
#Set options switch
if op2 == 1:
# Choose Edge Menu
print("Choose Edge Detection:")
# Choose Edge choices
print("\t1- Apply Laplacian 4")
print("\t2- Apply Laplacian 8 (Default)")
print("\t3- Apply Gaussian and Laplacian 4")
print("\t4- Apply Gaussian and Laplacian 8")
op3 = input()
# Choose Edges switch
if op3 == 1:
edgechoice = "L4"
if op3 == 2:
edgechoice = "L8"
if op3 == 3:
edgechoice = "G4"
if op3 == 4:
edgechoice = "G8"
if op2 == 2:
# Save Text Menu
print("Save resulting text in a file?")
# Save Text choices
print("\t1- Yes (Default)")
print("\t2- No")
op3 = int(input())
# Save Text Switch
if op3 == 1:
save = True
if op3 == 2:
save = False
if op2 == 3:
# Print Text Menu
print("Printing resulting text in the terminal?")
# Print Text choices
print("\t1- Yes")
print("\t2- No (Default)")
op3 = int(input())
# Print Text switch
if op3 == 1:
printText = True
if op3 == 2:
printText = False
if op == 3:
print ("Closing program...")
| agpl-3.0 | Python |
f3f43500a2f07dafd7a6f00f5c59044e873d6478 | fix formatting | IanDCarroll/xox | source/player_chair.py | source/player_chair.py | from announcer_chair import *
class Player(object):
def __init__(self, marker_code):
self.announcer = Announcer()
self.marker_code = marker_code
def move(self, board):
choice = self.choose(board)
board[choice] = self.marker_code
return board
def choose(self, board):
options = self.get_legal_moves(board)
return options[0]
def get_legal_moves(self, board):
legal_moves = []
for i in range(0, len(board)):
if board[i] != 1 and board[i] != 10:
legal_moves.append(i)
return legal_moves
class Human(Player):
name = 'human'
def choose(self, board):
choice = int(raw_input("Which square do you choose? ")) -1
if self.check_conscience(choice, board):
return self.redo_move(board)
else:
return choice
def check_conscience(self, choice, board):
if choice not in self.get_legal_moves(board):
return True
def redo_move(self, board):
self.announcer.show(self.announcer.bad_move)
self.move(board)
return 1
class Computer(Player):
name = 'computer'
| from announcer_chair import *
class Player(object):
def __init__(self, marker_code):
self.announcer = Announcer()
self.marker_code = marker_code
def move(self, board):
choice = self.choose(board)
board[choice] = self.marker_code
return board
def choose(self, board):
options = self.get_legal_moves(board)
return options[0]
def get_legal_moves(self, board):
legal_moves = []
for i in range(0, len(board)):
if board[i] != 1 and board[i] != 10:
legal_moves.append(i)
return legal_moves
class Human(Player):
name = 'human'
def choose(self, board):
choice = int(raw_input("Which square do you choose? ")) -1
if self.check_conscience(choice, board):
return self.redo_move(board)
else:
return choice
def check_conscience(self, choice, board):
if choice not in self.get_legal_moves(board):
return True
def redo_move(self, board):
self.announcer.show(self.announcer.bad_move)
self.move(board)
return 1
class Computer(Player):
name = 'computer'
| mit | Python |
1f12445bd9b04c8d8ce3cb5c15c76e39e1e97781 | Fix absl cops flag names. (#307) | census-instrumentation/opencensus-cpp,census-instrumentation/opencensus-cpp,census-instrumentation/opencensus-cpp,census-instrumentation/opencensus-cpp | opencensus/copts.bzl | opencensus/copts.bzl | # Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compiler options for OpenCensus.
Flags specified here must not impact ABI. Code compiled with and without these
opts will be linked together, and in some cases headers compiled with and
without these options will be part of the same program.
We use the same flags as absl, plus turn some warnings into errors.
"""
load(
"@com_google_absl//absl:copts/configure_copts.bzl",
"ABSL_GCC_FLAGS",
"ABSL_GCC_TEST_FLAGS",
"ABSL_LLVM_FLAGS",
"ABSL_LLVM_TEST_FLAGS",
"ABSL_MSVC_FLAGS",
"ABSL_MSVC_TEST_FLAGS",
)
WERROR = ["-Werror=return-type", "-Werror=switch"]
DEFAULT_COPTS = select({
"//opencensus:llvm_compiler": ABSL_LLVM_FLAGS + WERROR,
"//opencensus:windows": ABSL_MSVC_FLAGS,
"//conditions:default": ABSL_GCC_FLAGS + WERROR,
})
TEST_COPTS = DEFAULT_COPTS + select({
"//opencensus:llvm_compiler": ABSL_LLVM_TEST_FLAGS + WERROR,
"//opencensus:windows": ABSL_MSVC_TEST_FLAGS,
"//conditions:default": ABSL_GCC_TEST_FLAGS + WERROR,
})
| # Copyright 2018, OpenCensus Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compiler options for OpenCensus.
Flags specified here must not impact ABI. Code compiled with and without these
opts will be linked together, and in some cases headers compiled with and
without these options will be part of the same program.
We use the same flags as absl, plus turn some warnings into errors.
"""
load(
"@com_google_absl//absl:copts/configure_copts.bzl",
"GCC_FLAGS",
"GCC_TEST_FLAGS",
"LLVM_FLAGS",
"LLVM_TEST_FLAGS",
"MSVC_FLAGS",
"MSVC_TEST_FLAGS",
)
WERROR = ["-Werror=return-type", "-Werror=switch"]
DEFAULT_COPTS = select({
"//opencensus:llvm_compiler": LLVM_FLAGS + WERROR,
"//opencensus:windows": MSVC_FLAGS,
"//conditions:default": GCC_FLAGS + WERROR,
})
TEST_COPTS = DEFAULT_COPTS + select({
"//opencensus:llvm_compiler": LLVM_TEST_FLAGS + WERROR,
"//opencensus:windows": MSVC_TEST_FLAGS,
"//conditions:default": GCC_TEST_FLAGS + WERROR,
})
| apache-2.0 | Python |
3971260dad534e4d3816fe580c55480acce26abe | refactor mock computer player move | IanDCarroll/xox | source/player_chair.py | source/player_chair.py | class Player(object):
def move(self, board):
board[4] = "spam"
return board
def get_legal_moves(self, board):
legal_moves = []
for i in range(0, len(board)):
if board[i] == 0:
legal_moves.append(i)
return legal_moves
class Human(Player):
def move(self, board):
square = raw_input("Which square do you choose? ")
board[square] = 10
return board
class Computer(Player):
def move(self, board):
options = self.get_legal_moves(board)
board[options[0]] = 1
return board
| class Player(object):
def move(self, board):
board[4] = "spam"
return board
def get_legal_moves(self, board):
legal_moves = []
for i in range(0, len(board)):
if board[i] == 0:
legal_moves.append(i)
return legal_moves
class Human(Player):
def move(self, board):
square = raw_input("Which square do you choose? ")
board[square] = 10
return board
class Computer(Player):
def move(self, board):
for i in range(0, len(board)):
if board[i] == 0:
board[i] = 1
break
return board
| mit | Python |
7c4cfca409d3df177658c427ab203e95bdb808d5 | Make kiosk mode sessions permanent | fsmi/odie-server,fjalir/odie-server,fjalir/odie-server,fsmi/odie-server,fjalir/odie-server,Kha/odie-server,fsmi/odie-server,Kha/odie-server,Kha/odie-server | odie.py | odie.py | #! /usr/bin/env python3
from functools import partial
from datetime import timedelta
import logging
from flask import Flask, session
from flask.ext.babelex import Babel
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.seasurf import SeaSurf # CSRF. Got it?
app = Flask("odie", template_folder='admin/templates', static_folder='admin/static')
import config # pylint: disable=unused-import
app.config.from_object('config.FlaskConfig')
babel = Babel(app)
csrf = SeaSurf(app)
sqla = SQLAlchemy(app)
@app.before_request
def make_session_permanent():
# We use flask sessions for remembering which client is in kiosk mode.
# By default, these sessions expire when the browser is closed. Prevent that.
session.permanent = True
app.permanent_session_lifetime = timedelta(days=2*365) # should be long enough...
if app.debug:
# allow requests from default broccoli server port
from flask.ext.cors import CORS
CORS(app, origins=['http://localhost:4200'], supports_credentials=True)
import flask_debugtoolbar
toolbar = flask_debugtoolbar.DebugToolbarExtension(app)
csrf.exempt(flask_debugtoolbar.panels.sqlalchemy.sql_select)
else:
# production logger to stderr
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s in %(module)s: %(message)s'))
app.logger.setLevel(logging.INFO)
app.logger.addHandler(handler)
# sqlalchemy treats columns as nullable by default, which we don't want.
Column = partial(sqla.Column, nullable=False)
from db.fsmi import Cookie
# errors that will be reported to the client
class ClientError(Exception):
def __init__(self, *errors, status=400):
super().__init__()
self.errors = errors
self.status = status
| #! /usr/bin/env python3
from functools import partial
import logging
from flask import Flask, session
from flask.ext.babelex import Babel
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.seasurf import SeaSurf # CSRF. Got it?
app = Flask("odie", template_folder='admin/templates', static_folder='admin/static')
import config # pylint: disable=unused-import
app.config.from_object('config.FlaskConfig')
babel = Babel(app)
csrf = SeaSurf(app)
sqla = SQLAlchemy(app)
if app.debug:
# allow requests from default broccoli server port
from flask.ext.cors import CORS
CORS(app, origins=['http://localhost:4200'], supports_credentials=True)
import flask_debugtoolbar
toolbar = flask_debugtoolbar.DebugToolbarExtension(app)
csrf.exempt(flask_debugtoolbar.panels.sqlalchemy.sql_select)
else:
# production logger to stderr
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s in %(module)s: %(message)s'))
app.logger.setLevel(logging.INFO)
app.logger.addHandler(handler)
# sqlalchemy treats columns as nullable by default, which we don't want.
Column = partial(sqla.Column, nullable=False)
from db.fsmi import Cookie
# errors that will be reported to the client
class ClientError(Exception):
def __init__(self, *errors, status=400):
super().__init__()
self.errors = errors
self.status = status
| mit | Python |
87d3d0597d075a6d5eb9d3866f6cc4ffb506b950 | Add version number. | hodgestar/overalls | overalls/__init__.py | overalls/__init__.py | __version__ = "0.1"
| bsd-3-clause | Python | |
c5ea439c86c0ed8e3a00a9c10e4a50a4c07fc136 | simplify the way to detect Py_TRACE_REFS enabled | hideaki-t/sqlite-fts-python | sqlitefts/tokenizer.py | sqlitefts/tokenizer.py | # coding: utf-8
'''
a proof of concept implementation of SQLite FTS tokenizers in Python
'''
import sys
import ctypes
from cffi import FFI # type: ignore
from typing import Any, Union, TYPE_CHECKING
if TYPE_CHECKING:
import sqlite3
import apsw # type: ignore
SQLITE3DBHandle = Any # ffi.CData
SQLITE_OK = 0
SQLITE_DONE = 101
ffi = FFI()
if sys.platform == 'win32':
dll = ffi.dlopen("sqlite3.dll")
else:
from ctypes.util import find_library
dll = ffi.dlopen(find_library("sqlite3"))
if hasattr(sys, 'getobjects'):
# for a python built with Py_TRACE_REFS
ffi.cdef('''
typedef struct sqlite3 sqlite3;
typedef struct {
void *_ob_next;
void *_ob_prev;
size_t ob_refcnt;
void *ob_type;
sqlite3 *db;
} PyObject;
''')
else:
ffi.cdef('''
typedef struct sqlite3 sqlite3;
typedef struct {
size_t ob_refcnt;
void *ob_type;
sqlite3 *db;
} PyObject;
''')
def get_db_from_connection(c: 'Union[sqlite3.Connection, apsw.Connection]') -> SQLITE3DBHandle:
db = getattr(c, '_db', None)
if db:
# pypy's SQLite3 connection has _db using cffi
db = ffi.cast('sqlite3*', db)
else:
db = ffi.cast('PyObject *', id(c)).db
return db
| # coding: utf-8
'''
a proof of concept implementation of SQLite FTS tokenizers in Python
'''
import sys
import ctypes
from cffi import FFI # type: ignore
from typing import Any, Union, TYPE_CHECKING
if TYPE_CHECKING:
import sqlite3
import apsw # type: ignore
SQLITE3DBHandle = Any # ffi.CData
SQLITE_OK = 0
SQLITE_DONE = 101
ffi = FFI()
if sys.platform == 'win32':
dll = ffi.dlopen("sqlite3.dll")
else:
from ctypes.util import find_library
dll = ffi.dlopen(find_library("sqlite3"))
if hasattr(ctypes, 'pythonapi') and \
hasattr(ctypes.pythonapi, '_Py_PrintReferences'):
# for a python built with Py_TRACE_REFS
ffi.cdef('''
typedef struct sqlite3 sqlite3;
typedef struct {
void *_ob_next;
void *_ob_prev;
size_t ob_refcnt;
void *ob_type;
sqlite3 *db;
} PyObject;
''')
else:
ffi.cdef('''
typedef struct sqlite3 sqlite3;
typedef struct {
size_t ob_refcnt;
void *ob_type;
sqlite3 *db;
} PyObject;
''')
def get_db_from_connection(c: 'Union[sqlite3.Connection, apsw.Connection]') -> SQLITE3DBHandle:
db = getattr(c, '_db', None)
if db:
# pypy's SQLite3 connection has _db using cffi
db = ffi.cast('sqlite3*', db)
else:
db = ffi.cast('PyObject *', id(c)).db
return db
| mit | Python |
fedbaa8fe93934d9632c0a6533eb18a201df1bdf | Make ots compile on Win64 | davelab6/ots,anthrotype/ots,anthrotype/ots,davelab6/ots,irori/ots,khaledhosny/ots,irori/ots | ots.gyp | ots.gyp | # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'ots-common.gypi',
],
'targets': [
{
'target_name': 'ots',
'type': '<(library)',
'sources': [
'<@(ots_sources)',
],
'include_dirs': [
'<@(ots_include_dirs)',
],
'direct_dependent_settings': {
'include_dirs': [
'<@(ots_include_dirs)',
],
},
'dependencies': [
'../zlib/zlib.gyp:zlib',
],
# TODO(jschuh): http://crbug.com/167187
'msvs_disabled_warnings': [
4267,
4334,
],
},
],
}
| # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'ots-common.gypi',
],
'targets': [
{
'target_name': 'ots',
'type': '<(library)',
'sources': [
'<@(ots_sources)',
],
'include_dirs': [
'<@(ots_include_dirs)',
],
'direct_dependent_settings': {
'include_dirs': [
'<@(ots_include_dirs)',
],
},
'dependencies': [
'../zlib/zlib.gyp:zlib',
],
},
],
}
| bsd-3-clause | Python |
763cb888eabe0e5e0bb42f77140e32f1df114e99 | update 14 to cache | ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler | p014.py | p014.py | cache = [0] * 10**6
def seq_length(n):
orig = n
iterations = 0
while n != 1:
if n < 10**6 and cache[n] > 0:
iterations += cache[n]
break
iterations += 1
if n % 2 == 0:
n /= 2
else:
n = 3*n + 1
iterations += 1
cache[orig] = iterations
return iterations
max = -1
maxi = 0
for i in xrange(1, 10**6):
length = seq_length(i)
if length > max:
max = length
maxi = i
print maxi, max | def seq_length(n):
iterations = 1
while n != 1:
if n % 2 == 0:
n /= 2
else:
n = 3*n + 1
iterations += 1
return iterations
max = -1
maxi = 0
for i in xrange(1, 10**6):
length = seq_length(i)
if length > max:
max = length
maxi = i
print maxi, max | bsd-3-clause | Python |
65b517b832c3b39da6ec6acb1f905c7ad5f633e7 | Remove unused import. | chrinide/theanets,lmjohns3/theanets,devdoer/theanets | examples/mnist-autoencoder.py | examples/mnist-autoencoder.py | #!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import tempfile
import urllib
import lmj.tnn
lmj.tnn.enable_default_logging()
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Autoencoder
def get_datasets(self):
return [x for x, _ in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(
tempfile.gettempdir(),
'mnist-autoencoder-%s.pkl.gz' % ','.join(str(n) for n in m.args.layers))
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
| #!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
lmj.tnn.enable_default_logging()
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Autoencoder
def get_datasets(self):
return [x for x, _ in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(
tempfile.gettempdir(),
'mnist-autoencoder-%s.pkl.gz' % ','.join(str(n) for n in m.args.layers))
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
| mit | Python |
4b0ef589466d4f0d154c7b52ac4eccdf7447d3c1 | make dicegame | imn00133/pythonSeminar17 | exercise/dicegame/dicegame.py | exercise/dicegame/dicegame.py | import random
def dice_make():
"""
주사위의 면을 받아 조건을 확인하는 함수
이 부분에서 dice_face및 dice_num은 모든 함수에서 사용되야되기 때문에 전역변수를 사용할 수 밖에 없다.
과제 잘못 만들었다...
"""
global dice_face, dice_num
while True:
dice_face = int(input("주사위 면의 개수를 입력하세요: "))
if not (dice_face == 4 or dice_face == 6
or dice_face == 8 or dice_face == 12 or dice_face == 20):
print("잘못 입력하셨습니다. 면의 개수는 정다면체(4, 6, 8, 12, 20)만 가능합니다.")
else:
break
while dice_num <= 0:
dice_num = int(input("주사위의 개수를 입력하세요: "))
def dice_sum(user):
"""
주사위 사용자, 면, 개수를 받아 합을 출력해준다. 주사위 개수는 1이 초기값이다.
:param user: string 사용자 이름
:param dice_num: int 주사위 개수
:return: int 총 계
"""
dice_return = []
for i in range(dice_num):
dice_return.append(random.randint(1, dice_face))
dice_total = 0
for i in dice_return:
dice_total += i
print("%s: " % user, end="")
for i in range(len(dice_return)):
print("%d" % dice_return[i], end="")
if i != len(dice_return)-1:
print(", ", end="")
print(" 합계: %d" % dice_total)
return dice_total
def sum_dice_game(game_win):
return game_win
def odd_even_dice_game(game_win):
return game_win
# dice_face 및 dice_num는 전역변수로 사용된다. dice_make가 언제든 사용될 수 있기 때문에 어쩔 수 없다.
# dice_num을 전역변수로 사용하지 않게 했었어야 되었는데... 잘못 만들었다.
# 주사위 다른 것을 여러 개 만든다면 어떤 일들이... 답은 객체지향인가보다.
# 주사위 만들기
dice_face = dice_num = 0
dice_make()
# 프로그램의 시작
game_list = ["주사위 합계 게임", "주사위 홀짝 게임"]
user_choice_game = 0
sum_dice_game_win = odd_even_game_win = 0
while True:
while user_choice_game <= 0 or user_choice_game > len(game_list):
print("주사위 게임 프로그램을 시작합니다.")
# 확장성을 위해 게임리스트를 리스트로 만들고 출력한다.
for i in range(len(game_list)):
print("%d. %s" % (i + 1, game_list[i]))
user_choice_game = int(input("선택해주세요(exit를 입력하면 종료됩니다.): "))
if str(user_choice_game) == "exit":
break
if user_choice_game == 1:
sum_dice_game_win += sum_dice_game(sum_dice_game_win)
elif user_choice_game == 2:
odd_even_game_win += odd_even_dice_game(odd_even_game_win)
elif user_choice_game == "exit":
print("사죵자는 주사위 합계 게임을 %d번, 홀짝게임을 %d번 이겼습니다." % (sum_dice_game_win, odd_even_game_win))
print("게임을 종료합니다.")
| mit | Python | |
1ce77f8efdccbb82e615c5e1d75d19d9f5b92e0f | put node modules bin in path | chriswatrous/scripts,chriswatrous/scripts,chriswatrous/scripts | path.py | path.py | #!/usr/bin/env python
import sys
import os
from os.path import exists
def uniq(L):
out = []
seen = set()
for item in L:
if item not in seen:
out.append(item)
seen.add(item)
return out
paths = [
'./node_modules/.bin',
'~/bin',
'~/stuff/bin',
'~/scripts/bin',
'/usr/local/opt/coreutils/libexec/gnubin',
'/home/chris/local-stuff/install/ghc/bin',
'/Applications/Emacs.app/Contents/MacOS/libexec-x86_64-10_9',
'/Applications/Emacs.app/Contents/MacOS/bin-x86_64-10_9',
'/usr/local/bin',
] + os.getenv('PATH', '').split(':') + [
'~/projects/cams/cams-test/tools',
'~/.go/bin',
'.',
]
if exists('/cygdrive/c/Users/Chris'):
paths = paths + [
'/cygdrive/c/Program Files (x86)/SMPlayer',
'/cygdrive/c/Program Files (x86)/Audacity',
'/cygdrive/c/Program Files (x86)/CSound6/bin',
]
paths = uniq(paths)
sys.stdout.write(':'.join(paths))
sys.stdout.flush()
| #!/usr/bin/env python
import sys
import os
from os.path import exists
def uniq(L):
out = []
seen = set()
for item in L:
if item not in seen:
out.append(item)
seen.add(item)
return out
paths = [
'~/bin',
'~/stuff/bin',
'~/scripts/bin',
'/usr/local/opt/coreutils/libexec/gnubin',
'/home/chris/local-stuff/install/ghc/bin',
'/Applications/Emacs.app/Contents/MacOS/libexec-x86_64-10_9',
'/Applications/Emacs.app/Contents/MacOS/bin-x86_64-10_9',
'/usr/local/bin',
] + os.getenv('PATH', '').split(':') + [
'~/projects/cams/cams-test/tools',
'~/.go/bin',
'.',
]
if exists('/cygdrive/c/Users/Chris'):
paths = paths + [
'/cygdrive/c/Program Files (x86)/SMPlayer',
'/cygdrive/c/Program Files (x86)/Audacity',
'/cygdrive/c/Program Files (x86)/CSound6/bin',
]
paths = uniq(paths)
sys.stdout.write(':'.join(paths))
sys.stdout.flush()
| mit | Python |
30e030ee548fe1a89efd749b29b812816c53478e | add version and author as metafields | PixelwarStudio/PyTree | Tree/__init__.py | Tree/__init__.py | """
Package for creating and drawing trees.
"""
__version__ = "0.2.2"
__author__ = "Pixelwar"
| """
Package for creating and drawing trees.
"""
| mit | Python |
e55452d2d0bf5d2c95b022a96e751b2d636dfbe8 | Fix config_path | rizar/attention-lvcsr,nke001/attention-lvcsr,nke001/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr,nke001/attention-lvcsr,nke001/attention-lvcsr | lvsr/firsttry/main.py | lvsr/firsttry/main.py | #!/usr/bin/env python
"""Learn to reverse the words in a text."""
import logging
import argparse
from lvsr.firsttry import main
if __name__ == "__main__":
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s: %(name)s: %(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
"Phoneme recognition on TIMIT",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"mode", choices=["train", "test"],
help="The mode to run")
parser.add_argument(
"save_path", default="chain",
help="The path to save the training process.")
parser.add_argument(
"config_path", default=None, nargs="?",
help="The configuration")
parser.add_argument(
"--num-batches", default=20000, type=int,
help="Train on this many batches.")
parser.add_argument(
"--from-dump", default=None,
help="Path to the dump to be loaded")
parser.add_argument(
"--use-old", default=False, action="store_true",
help="Use old model and log pickles")
args = parser.parse_args()
main(**vars(args))
| #!/usr/bin/env python
"""Learn to reverse the words in a text."""
import logging
import argparse
from lvsr.firsttry import main
if __name__ == "__main__":
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s: %(name)s: %(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
"Phoneme recognition on TIMIT",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"mode", choices=["train", "test"],
help="The mode to run")
parser.add_argument(
"save_path", default="chain",
help="The path to save the training process.")
parser.add_argument(
"config_path", default=None, nargs="*",
help="The configuration")
parser.add_argument(
"--num-batches", default=20000, type=int,
help="Train on this many batches.")
parser.add_argument(
"--from-dump", default=None,
help="Path to the dump to be loaded")
parser.add_argument(
"--use-old", default=False, action="store_true",
help="Use old model and log pickles")
args = parser.parse_args()
main(**vars(args))
| mit | Python |
2560ca287e81cbefb6037e5688bfa4ef74d85149 | Change call method for Python2.7 | oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/dmm-eikaiwa-fft | clock.py | clock.py | from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.check_call(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
| from __future__ import print_function
from apscheduler.schedulers.blocking import BlockingScheduler
import logging
import subprocess
logging.basicConfig()
scheduler = BlockingScheduler()
@scheduler.scheduled_job('interval', minutes=1)
def timed_job_min1():
print("Run notifier")
subprocess.run(
"notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685",
shell=True,
check=True)
# @scheduler.scheduled_job('interval', minutes=10)
# def timed_job_min10():
# print("Run notifier")
# subprocess.run(
# "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685",
# shell=True,
# check=True)
scheduler.start()
| mit | Python |
1e4055da7d14355821d2d72b40b1e6d463fe4ecd | Fix Tracing mode TS export for LayerNorm layer | pytorch/fairseq,pytorch/fairseq,pytorch/fairseq | fairseq/modules/layer_norm.py | fairseq/modules/layer_norm.py | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import torch.nn as nn
import torch.nn.functional as F
try:
from apex.normalization import FusedLayerNorm as _FusedLayerNorm
has_fused_layernorm = True
class FusedLayerNorm(_FusedLayerNorm):
@torch.jit.unused
def forward(self, x):
if not x.is_cuda:
return super().forward(x)
else:
with torch.cuda.device(x.device):
return super().forward(x)
except ImportError:
has_fused_layernorm = False
def LayerNorm(normalized_shape, eps=1e-5, elementwise_affine=True, export=False):
if torch.jit.is_scripting() or torch.jit.is_tracing():
export = True
if not export and torch.cuda.is_available() and has_fused_layernorm:
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine)
class Fp32LayerNorm(nn.LayerNorm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def forward(self, input):
output = F.layer_norm(
input.float(),
self.normalized_shape,
self.weight.float() if self.weight is not None else None,
self.bias.float() if self.bias is not None else None,
self.eps,
)
return output.type_as(input)
| # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
import torch.nn as nn
import torch.nn.functional as F
try:
from apex.normalization import FusedLayerNorm as _FusedLayerNorm
has_fused_layernorm = True
class FusedLayerNorm(_FusedLayerNorm):
@torch.jit.unused
def forward(self, x):
if not x.is_cuda:
return super().forward(x)
else:
with torch.cuda.device(x.device):
return super().forward(x)
except ImportError:
has_fused_layernorm = False
def LayerNorm(normalized_shape, eps=1e-5, elementwise_affine=True, export=False):
if torch.jit.is_scripting():
export = True
if not export and torch.cuda.is_available() and has_fused_layernorm:
return FusedLayerNorm(normalized_shape, eps, elementwise_affine)
return torch.nn.LayerNorm(normalized_shape, eps, elementwise_affine)
class Fp32LayerNorm(nn.LayerNorm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def forward(self, input):
output = F.layer_norm(
input.float(),
self.normalized_shape,
self.weight.float() if self.weight is not None else None,
self.bias.float() if self.bias is not None else None,
self.eps,
)
return output.type_as(input)
| mit | Python |
ca16ca52992c162832c1aaf878788ecf8bdcc4f7 | Add message for previewing | sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer | falmer/content/models/core.py | falmer/content/models/core.py | from django.conf import settings
from django.db import models
from django.http import HttpResponse
from django.shortcuts import redirect
from wagtail.admin.edit_handlers import FieldPanel, PageChooserPanel
from wagtail.core.models import Page as WagtailPage
from falmer.content.utils import get_public_path_for_page
class Page(WagtailPage):
is_creatable = False
def serve(self, request, *args, **kwargs):
if not settings.MSL_SITE_HOST:
return HttpResponse('MSL_SITE_HOST not set in settings')
return redirect(f'{settings.MSL_SITE_HOST}{self.public_path}')
def serve_preview(self, request, mode_name):
return HttpResponse('Apologies, previewing articles that aren\'t published currently '
'isn\'t supported')
#return HttpResponse(f'{self.title}, {self.draft_title}, {self.pk}')
def get_live_children(self):
return self.get_children().live()
class Meta:
proxy = True
def get_assumption_path(self):
return None
@property
def public_path(self):
return get_public_path_for_page(self)
class ClickThrough(Page):
target_page = models.ForeignKey('content.Page', models.SET_NULL, 'click_throughs', blank=True, null=True)
target_link = models.TextField(blank=True, default='')
@property
def public_path(self):
if self.target_page:
return self.target_page.public_path
return self.target_link
content_panels = Page.content_panels + [
PageChooserPanel('target_page'),
FieldPanel('target_link'),
]
| from django.conf import settings
from django.db import models
from django.http import HttpResponse
from django.shortcuts import redirect
from wagtail.admin.edit_handlers import FieldPanel, PageChooserPanel
from wagtail.core.models import Page as WagtailPage
from falmer.content.utils import get_public_path_for_page
class Page(WagtailPage):
is_creatable = False
def serve(self, request, *args, **kwargs):
if not settings.MSL_SITE_HOST:
return HttpResponse('MSL_SITE_HOST not set in settings')
return redirect(f'{settings.MSL_SITE_HOST}{self.public_path}')
def get_live_children(self):
return self.get_children().live()
class Meta:
proxy = True
def get_assumption_path(self):
return None
@property
def public_path(self):
return get_public_path_for_page(self)
class ClickThrough(Page):
target_page = models.ForeignKey('content.Page', models.SET_NULL, 'click_throughs', blank=True, null=True)
target_link = models.TextField(blank=True, default='')
@property
def public_path(self):
if self.target_page:
return self.target_page.public_path
return self.target_link
content_panels = Page.content_panels + [
PageChooserPanel('target_page'),
FieldPanel('target_link'),
]
| mit | Python |
042748899edaa217fbfd4022c8cd49de97161af4 | Fix import of `trim_whitespace`. | potatolondon/fluent-2.0,potatolondon/fluent-2.0 | fluent/templatetags/fluent.py | fluent/templatetags/fluent.py | from __future__ import absolute_import
from django import template
from django.templatetags.i18n import do_translate, do_block_translate
from django.utils.translation import get_language, trim_whitespace
register = template.Library()
@register.tag("trans")
def trans_override(parser, token):
"""
Wraps around Django's trans tag, but allows for 'group "Thing"'
to be specified
"""
contents = token.split_contents()
if "group" in contents:
#Remove the group tag from the token
idx = contents.index("group")
group = contents[idx + 1]
contents.remove("group")
contents.remove(group)
token.contents = " ".join(contents)
return do_translate(parser, token)
def _trim_text(tokens):
for i, token in enumerate(tokens):
token.contents = trim_whitespace(token.contents)
if i == 0 and token.contents[0] == " ": # first tag
token.contents = token.contents[1:]
elif i == len(tokens) - 1 and token.contents[-1] == " ": # last tag
token.contents = token.contents[:-1]
@register.tag("blocktrans")
def blocktrans_override(parser, token):
"""
Wraps around Django's trans tag, but allows for 'group "Thing"'
to be specified
"""
contents = token.split_contents()
trimmed = ("trimmed" in contents)
if "group" in contents:
#Remove the group tag from the token
idx = contents.index("group")
group = contents[idx + 1]
contents.remove("group")
contents.remove(group)
token.contents = " ".join(contents)
node = do_block_translate(parser, token)
if trimmed:
_trim_text(node.singular)
if node.plural:
_trim_text(node.plural)
return node
@register.filter
def translate(value, language_code=None):
language_code = language_code or get_language()
return value.translation(language_code)
| from __future__ import absolute_import
from django import template
from django.templatetags.i18n import do_translate, do_block_translate
from django.utils.translation import get_language
from fluent.utils import trim_whitespace
register = template.Library()
@register.tag("trans")
def trans_override(parser, token):
"""
Wraps around Django's trans tag, but allows for 'group "Thing"'
to be specified
"""
contents = token.split_contents()
if "group" in contents:
#Remove the group tag from the token
idx = contents.index("group")
group = contents[idx + 1]
contents.remove("group")
contents.remove(group)
token.contents = " ".join(contents)
return do_translate(parser, token)
def _trim_text(tokens):
for i, token in enumerate(tokens):
token.contents = trim_whitespace(token.contents)
if i == 0 and token.contents[0] == " ": # first tag
token.contents = token.contents[1:]
elif i == len(tokens) - 1 and token.contents[-1] == " ": # last tag
token.contents = token.contents[:-1]
@register.tag("blocktrans")
def blocktrans_override(parser, token):
"""
Wraps around Django's trans tag, but allows for 'group "Thing"'
to be specified
"""
contents = token.split_contents()
trimmed = ("trimmed" in contents)
if "group" in contents:
#Remove the group tag from the token
idx = contents.index("group")
group = contents[idx + 1]
contents.remove("group")
contents.remove(group)
token.contents = " ".join(contents)
node = do_block_translate(parser, token)
if trimmed:
_trim_text(node.singular)
if node.plural:
_trim_text(node.plural)
return node
@register.filter
def translate(value, language_code=None):
language_code = language_code or get_language()
return value.translation(language_code)
| mit | Python |
9ac8b664e4bd084cbb64c961b85c2b0fb1bf12e6 | add log | hiroyuki-kasuga/street-movie,hiroyuki-kasuga/street-movie,hiroyuki-kasuga/street-movie | src/main/street_movie/web/views.py | src/main/street_movie/web/views.py | # coding: utf-8
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.template import loader
from django.views.decorators.csrf import csrf_exempt
from api.models import Movie
from api.services import CreateMovieService
from decorator.decorators import add_log
logger = logging.getLogger(__name__)
@csrf_exempt
@add_log()
def index(request):
service = CreateMovieService()
model = None
if 'id' in request.GET:
try:
model = service.get_movie(request.GET['id'])
model = model.as_json
model['sns_url'] = request.build_absolute_uri(
reverse('street_movie_site_views_ogp', kwargs={'m_id': model['id']}))
except Movie.DoesNotExist:
pass
service = CreateMovieService()
count = service.get_count()
c = RequestContext(request, {'count': count, 'model': model})
t = loader.get_template('web/index.html')
return HttpResponse(t.render(c))
@csrf_exempt
@add_log()
def ogp(request, m_id):
service = CreateMovieService()
try:
model = service.get_movie(m_id)
except Movie.DoesNotExist:
raise Http404
url = request.build_absolute_uri(reverse('street_movie_site_views_ogp', kwargs={'m_id': model.id}))
next_url = request.build_absolute_uri(reverse('street_movie_site_views_index')) + '?id=' + m_id
image = settings.FB_OGP_IMAGE % (
str(model.center_lat), str(model.center_lon), str(model.start_lat), str(model.start_lon), str(model.end_lat),
str(model.end_lon))
logger.info(image)
description = settings.FB_OGP_DESCRIPTION % (model.start_name, model.end_name)
ogp = dict(title=settings.FB_OGP_TITLE, description=description, next_url=next_url, url=url,
app_id=settings.FB_APP_ID, image=image)
c = RequestContext(request, {'ogp': ogp})
t = loader.get_template('web/ogp.html')
return HttpResponse(t.render(c))
| # coding: utf-8
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.template import RequestContext
from django.template import loader
from django.views.decorators.csrf import csrf_exempt
from api.models import Movie
from api.services import CreateMovieService
from decorator.decorators import add_log
logger = logging.getLogger(__name__)
@csrf_exempt
@add_log()
def index(request):
service = CreateMovieService()
model = None
if 'id' in request.GET:
try:
model = service.get_movie(request.GET['id'])
model = model.as_json
model['sns_url'] = request.build_absolute_uri(
reverse('street_movie_site_views_ogp', kwargs={'m_id': model['id']}))
except Movie.DoesNotExist:
pass
service = CreateMovieService()
count = service.get_count()
c = RequestContext(request, {'count': count, 'model': model})
t = loader.get_template('web/index.html')
return HttpResponse(t.render(c))
@csrf_exempt
@add_log()
def ogp(request, m_id):
service = CreateMovieService()
try:
model = service.get_movie(m_id)
except Movie.DoesNotExist:
raise Http404
url = request.build_absolute_uri(reverse('street_movie_site_views_ogp', kwargs={'m_id': model.id}))
next_url = request.build_absolute_uri(reverse('street_movie_site_views_index')) + '?id=' + model.id
image = settings.FB_OGP_IMAGE % (
str(model.center_lat), str(model.center_lon), str(model.start_lat), str(model.start_lon), str(model.end_lat),
str(model.end_lon))
logger.info(image)
description = settings.FB_OGP_DESCRIPTION % (model.start_name, model.end_name)
ogp = dict(title=settings.FB_OGP_TITLE, description=description, next_url=next_url, url=url,
app_id=settings.FB_APP_ID, image=image)
c = RequestContext(request, {'ogp': ogp})
t = loader.get_template('web/ogp.html')
return HttpResponse(t.render(c))
| mit | Python |
c27a22d6750c1c8dfacd018aa932896489bab20f | Improve readability of error | getavalon/core,mindbender-studio/core,mindbender-studio/core,getavalon/core | avalon/tools/projectmanager/lib.py | avalon/tools/projectmanager/lib.py | """Utility script for updating database with configuration files
Until assets are created entirely in the database, this script
provides a bridge between the file-based project inventory and configuration.
- Migrating an old project:
$ python -m avalon.inventory --extract --silo-parent=f02_prod
$ python -m avalon.inventory --upload
- Managing an existing project:
1. Run `python -m avalon.inventory --load`
2. Update the .inventory.toml or .config.toml
3. Run `python -m avalon.inventory --save`
"""
from avalon import schema, io
def create_asset(data):
"""Create asset
Requires:
{"name": "uniquecode",
"silo": "assets"}
Optional:
{"data": {}}
"""
data = data.copy()
project = io.find_one({"type": "project"})
if project is None:
raise RuntimeError("Project must exist prior to creating assets")
asset = {
"schema": "avalon-core:asset-2.0",
"parent": project['_id'],
"name": data.pop("name"),
"silo": data.pop("silo"),
"type": "asset",
"data": data
}
# Asset *must* have a name and silo
assert asset['name'], "Asset has no name"
assert asset['silo'], "Asset has no silo"
# Ensure it has a unique name
asset_doc = io.find_one({
"name": asset['name'],
"type": "asset",
})
if asset_doc is not None:
raise RuntimeError("Asset '{}' already exists.".format(asset['name']))
schema.validate(asset)
io.insert_one(asset)
def list_project_tasks():
"""List the projec task types available in the current project"""
project = io.find_one({"type": "project"})
return [task['name'] for task in project['config']['tasks']]
| """Utility script for updating database with configuration files
Until assets are created entirely in the database, this script
provides a bridge between the file-based project inventory and configuration.
- Migrating an old project:
$ python -m avalon.inventory --extract --silo-parent=f02_prod
$ python -m avalon.inventory --upload
- Managing an existing project:
1. Run `python -m avalon.inventory --load`
2. Update the .inventory.toml or .config.toml
3. Run `python -m avalon.inventory --save`
"""
from avalon import schema, io
def create_asset(data):
"""Create asset
Requires:
{"name": "uniquecode",
"silo": "assets"}
Optional:
{"data": {}}
"""
data = data.copy()
project = io.find_one({"type": "project"})
if project is None:
raise RuntimeError("Project must exist prior to creating assets")
asset = {
"schema": "avalon-core:asset-2.0",
"parent": project['_id'],
"name": data.pop("name"),
"silo": data.pop("silo"),
"type": "asset",
"data": data
}
# Asset *must* have a name and silo
assert asset['name'], "Asset has no name"
assert asset['silo'], "Asset has no silo"
# Ensure it has a unique name
asset_doc = io.find_one({
"name": asset['name'],
"type": "asset",
})
if asset_doc is not None:
raise RuntimeError("Asset named {} already "
"exists.".format(asset['name']))
schema.validate(asset)
io.insert_one(asset)
def list_project_tasks():
"""List the projec task types available in the current project"""
project = io.find_one({"type": "project"})
return [task['name'] for task in project['config']['tasks']]
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.