commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
ac28cf005e8f4e800b5dee2f0ede36fa56890b64 | Add top-level issues. | sloede/pyglab,sloede/pyglab | pyglab/issues.py | pyglab/issues.py | from .apirequest import RequestType
class Issues(objects):
def __init__(self, pyglab):
self._pyglab = pyglab
def get(self, sudo=None, page=None, per_page=None):
url = '/issues'
r = self._pyglab.request(RequestType.GET, url,
sudo=sudo, page=page, per_page=per_page)
return r
| mit | Python | |
cb5c500af5ca25f4cd96a35114dde59578d15170 | Create tests_diff_between_times.py | davidone/misc,davidone/misc | tests_diff_between_times.py | tests_diff_between_times.py | import pytest
import diff_between_times
def test_validate_date():
val = ('2021-09-15 00:00:00')
assert diff_between_times.validate_date(None, None, val) == val
with pytest.raises(Exception):
val = ('2021-09-15 00:')
assert diff_between_times.validate_date(None, None, val)
since_val = '2021-09-15 00:00:00'
until_val = '2021-09-15 00:01:00'
assert diff_between_times.dateDiff(since_val, until_val) == 1
| mit | Python | |
c3b7a84cbc7b557909a00990c976b4abdeefea35 | Use olpc matchbox theme | puneetgkaur/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,godiard/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,sugarlabs/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,Daksh/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,gusDuarte/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit | shell/session/Session.py | shell/session/Session.py | import os
import gtk
import gobject
import time
import re
import dbus
import dbus.dbus_bindings
from sugar.presence import PresenceService
from Shell import Shell
from ConsoleWindow import ConsoleWindow
from session.Process import Process
import sugar.env
class DbusProcess(Process):
def __init__(self):
config = sugar.env.get_dbus_config()
cmd = "dbus-daemon --print-address --config-file %s" % config
Process.__init__(self, cmd)
def get_name(self):
return 'Dbus'
def start(self):
Process.start(self, True)
dbus_file = os.fdopen(self._stdout)
addr = dbus_file.readline().strip()
dbus_file.close()
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
class MatchboxProcess(Process):
def __init__(self):
kbd_config = os.path.join(sugar.env.get_data_dir(), 'kbdconfig')
options = '-kbdconfig %s ' % kbd_config
options += '-theme olpc '
command = 'matchbox-window-manager %s ' % options
Process.__init__(self, command)
def get_name(self):
return 'Matchbox'
class Session:
"""Takes care of running the shell and all the sugar processes"""
def __init__(self, registry):
self._registry = registry
def start(self):
"""Start the session"""
process = DbusProcess()
process.start()
PresenceService.start()
process = MatchboxProcess()
process.start()
console = ConsoleWindow()
sugar.logger.start('Shell', console)
shell = Shell(self._registry)
shell.set_console(console)
shell.start()
from sugar import TracebackUtils
tbh = TracebackUtils.TracebackHelper()
try:
gtk.main()
except KeyboardInterrupt:
print 'Ctrl+C pressed, exiting...'
del tbh
| import os
import gtk
import gobject
import time
import re
import dbus
import dbus.dbus_bindings
from sugar.presence import PresenceService
from Shell import Shell
from ConsoleWindow import ConsoleWindow
from session.Process import Process
import sugar.env
class DbusProcess(Process):
def __init__(self):
config = sugar.env.get_dbus_config()
cmd = "dbus-daemon --print-address --config-file %s" % config
Process.__init__(self, cmd)
def get_name(self):
return 'Dbus'
def start(self):
Process.start(self, True)
dbus_file = os.fdopen(self._stdout)
addr = dbus_file.readline().strip()
dbus_file.close()
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
class MatchboxProcess(Process):
def __init__(self):
options = '-use_titlebar no'
kbd_config = os.path.join(sugar.env.get_data_dir(), 'kbdconfig')
options += ' -kbdconfig %s' % kbd_config
command = 'matchbox-window-manager %s' % options
print command
Process.__init__(self, command)
def get_name(self):
return 'Matchbox'
class Session:
"""Takes care of running the shell and all the sugar processes"""
def __init__(self, registry):
self._registry = registry
def start(self):
"""Start the session"""
process = DbusProcess()
process.start()
PresenceService.start()
process = MatchboxProcess()
process.start()
console = ConsoleWindow()
sugar.logger.start('Shell', console)
shell = Shell(self._registry)
shell.set_console(console)
shell.start()
from sugar import TracebackUtils
tbh = TracebackUtils.TracebackHelper()
try:
gtk.main()
except KeyboardInterrupt:
print 'Ctrl+C pressed, exiting...'
del tbh
| lgpl-2.1 | Python |
984381d614f181f1d48104fe6043191b626e636b | Create soundtest.py | averywallis/COMAPtriathlon,averywallis/Radar,averywallis/Yahtzeething,averywallis/Final-Project | soundtest.py | soundtest.py | mit | Python | ||
c7138bd5ae465d072f86b89bff166cca34c82cdd | 添加 escape 的测试 | alingse/jsoncsv | tests/test_escape.py | tests/test_escape.py | # coding=utf-8
# author@alingse
# 2016.11.21
import unittest
from jsoncsv.utils import encode_safe_key, decode_safe_key
class Testescape(unittest.TestCase):
def test_all(self):
path = ['A', 'B', '..', '\.\\ww']
for sep in 'AB.w':
key = encode_safe_key(path, sep)
_path = decode_safe_key(key, sep)
self.assertListEqual(path, _path)
def test_encode(self):
path = ['A', 'B', 'C', 'www.xxx.com']
sep = '.'
key = encode_safe_key(path, sep)
self.assertEqual(key, 'A\.B\.C\.www.xxx.com')
def test_decode(self):
key = 'A\.B\.C\.www.xxx.com'
sep = '.'
path = decode_safe_key(key, sep)
self.assertEqual(path[0], 'A')
self.assertEqual(path[1], 'B')
self.assertEqual(path[2], 'C')
self.assertEqual(path[3], 'www.xxx.com')
| apache-2.0 | Python | |
4a70f9ed2f19cba08208fa9f2a3cafe38ee283b6 | Add simple test for column expansion | qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/userreports/tests/test_columns.py | corehq/apps/userreports/tests/test_columns.py | from django.test import SimpleTestCase
from jsonobject.exceptions import BadValueError
from sqlagg import SumWhen
from corehq.apps.userreports.sql import _expand_column
from corehq.apps.userreports.reports.specs import ReportColumn
class TestReportColumn(SimpleTestCase):
def testBadAggregation(self):
with self.assertRaises(BadValueError):
ReportColumn.wrap({
"aggregation": "simple_",
"field": "doc_id",
"type": "field",
})
def testGoodFormat(self):
for format in [
'default',
'percent_of_total',
]:
self.assertEquals(ReportColumn, type(
ReportColumn.wrap({
"aggregation": "simple",
"field": "doc_id",
"format": format,
"type": "field",
})
))
def testBadFormat(self):
with self.assertRaises(BadValueError):
ReportColumn.wrap({
"aggregation": "simple",
"field": "doc_id",
"format": "default_",
"type": "field",
})
class TestExpandReportColumn(SimpleTestCase):
def test_expansion(self):
column = ReportColumn(
type="field",
field="lab_result",
display="Lab Result",
format="default",
aggregation="expand",
description="foo"
)
cols = _expand_column(column, ["positive", "negative"])
self.assertEqual(len(cols), 2)
self.assertEqual(type(cols[0].view), SumWhen)
self.assertEqual(cols[1].view.whens, {'negative':1})
| from django.test import SimpleTestCase
from jsonobject.exceptions import BadValueError
from corehq.apps.userreports.reports.specs import ReportColumn
class TestReportColumn(SimpleTestCase):
def testBadAggregation(self):
with self.assertRaises(BadValueError):
ReportColumn.wrap({
"aggregation": "simple_",
"field": "doc_id",
"type": "field",
})
def testGoodFormat(self):
for format in [
'default',
'percent_of_total',
]:
self.assertEquals(ReportColumn, type(
ReportColumn.wrap({
"aggregation": "simple",
"field": "doc_id",
"format": format,
"type": "field",
})
))
def testBadFormat(self):
with self.assertRaises(BadValueError):
ReportColumn.wrap({
"aggregation": "simple",
"field": "doc_id",
"format": "default_",
"type": "field",
})
| bsd-3-clause | Python |
643456f6f1bb9f264dbe6d3ad48a84af4e4dd91c | Add data migrations for rulesets | pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro | temba/flows/migrations/0087_fix_open_ended_ruleset_with_timeout.py | temba/flows/migrations/0087_fix_open_ended_ruleset_with_timeout.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-26 08:42
from __future__ import unicode_literals
import json
from django.db import migrations
def fix_ruleset_categories_open_ended(RuleSet):
rulesets = list(RuleSet.objects.all())
if not rulesets:
return
affected_flows = []
for ruleset in rulesets:
base_lang = ruleset.flow.base_language
rules_json = json.loads(ruleset.rules)
if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout':
rules_json[0]['category'][base_lang] = 'All Responses'
ruleset.rules = json.dumps(rules_json)
ruleset.save()
if ruleset.flow.pk not in affected_flows:
affected_flows.append(ruleset.flow.pk)
print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id))
print("Update oped ended categories with timeout on %d flows" % len(affected_flows))
def apply_as_migration(apps, schema_editor):
RuleSet = apps.get_model('flows', 'RuleSet')
fix_ruleset_categories_open_ended(RuleSet)
def apply_manual():
from temba.flows.models import RuleSet
fix_ruleset_categories_open_ended(RuleSet)
class Migration(migrations.Migration):
dependencies = [
('flows', '0086_is_squashed'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
| agpl-3.0 | Python | |
d2b6abe7535635d2bf8f0c13b87e3ca382f1559d | Add an echo server program for local testing | git-commit/iot-gatekeeper,git-commit/iot-gatekeeper | setup/tcp-echo-server/echo.py | setup/tcp-echo-server/echo.py | # Echo server program
import socket
HOST = '' # Symbolic name meaning all available interfaces
PORT = 4444 # Arbitrary non-privileged port
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
s.listen(1)
conn, addr = s.accept()
with conn:
print('Connected by', addr)
while True:
data = conn.recv(1024)
if not data: break
conn.sendall(data)
| mit | Python | |
e4bec0203c19f5e9204a41de530f93fa46de565f | update - possible test for wednesday | dracaether/python | highestnumber.py | highestnumber.py | number1=int(input("Please enter the first number: "))
number2=int(input("Please enter the second number: "))
number3=int(input("Please enter the third number: "))
if (number1>number2) and (number1>number3):
print("The highest number is",number1)
elif (number2>number1) and (number2>number3):
print("The highest number is",number2)
elif (number3>number1) and (number3>number2):
print("The highest number is",number3)
elif (number1>number2) and (number1==number3):
print("Both the first and third number - which is",number1,"- are the highest.")
elif (number2>number1) and (number2==number3):
print("Both the second and third number - which is",number2,"- are the highest.")
elif (number2>number3) and (number2==number1):
print("Both the first and second number - which is",number2,"- are the highest.")
elif (number1==number2) and (number1==number3):
print("Woah there, you typed the same number three times in a row - you sure 'bout that?")
else:
print("Can't compute.")
| mit | Python | |
ce912bfb028633a32c8dbb882000182e7b6ccb80 | Initialize P01_boxPrint | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/AutomateTheBoringStuffWithPython/Chapter10/P01_boxPrint.py | books/AutomateTheBoringStuffWithPython/Chapter10/P01_boxPrint.py | # This program prints out a box based on input sizes
def boxPrint(symbol, width, height):
if len(symbol) != 1:
raise Exception("Symbol must be a single character string.")
if width <= 2:
raise Exception("Width must be greater than 2.")
if height <= 2:
raise Exception("Height must be greater than 2.")
print(symbol * width)
for i in range(height - 2):
print(symbol + (' ' * (width - 2)) + symbol)
print(symbol * width)
for sym, w, h in (('*', 4, 4), ('O', 20, 5), ('x', 1, 3), ("ZZ", 3, 3)):
try:
boxPrint(sym, w, h)
except Exception as err:
print("An exception happened: " + str(err))
| mit | Python | |
8e20c0f4844b366c43bb39e14ab6af5299ab03f0 | Make java_util.create_single_jar function visible also for Bazel | cushon/bazel,cushon/bazel,bazelbuild/bazel,katre/bazel,katre/bazel,cushon/bazel,bazelbuild/bazel,cushon/bazel,bazelbuild/bazel,katre/bazel,bazelbuild/bazel,cushon/bazel,cushon/bazel,katre/bazel,katre/bazel,katre/bazel,bazelbuild/bazel,bazelbuild/bazel | src/main/starlark/builtins_bzl/common/java/java_util.bzl | src/main/starlark/builtins_bzl/common/java/java_util.bzl | # Copyright 2021 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common util functions for java_* rules"""
load(":common/java/java_semantics.bzl", "semantics")
def create_single_jar(ctx, output, *input_depsets):
"""Register action for the output jar.
Args:
ctx: (RuleContext) Used to register the action.
output: (Artifact) Output file of the action.
*input_depsets: (list[depset[Artifact]]) Input files of the action.
Returns:
(File) Output file which was used for registering the action.
"""
toolchain = semantics.find_java_toolchain(ctx)
args = ctx.actions.args()
args.set_param_file_format("shell").use_param_file("@%s", use_always = True)
args.add("--output", output)
args.add_all(
[
"--compression",
"--normalize",
"--exclude_build_data",
"--warn_duplicate_resources",
],
)
all_inputs = depset(transitive = input_depsets)
args.add_all("--sources", all_inputs)
ctx.actions.run(
mnemonic = "JavaSingleJar",
progress_message = "Building singlejar jar %s" % output.short_path,
executable = toolchain.single_jar,
inputs = all_inputs,
tools = [toolchain.single_jar],
outputs = [output],
arguments = [args],
)
return output
| apache-2.0 | Python | |
3aac0952d5c13e67c812f00f7a35d508dd1c1f5c | Test OEmbed XML parsing. | kgaughan/adjunct,kgaughan/adjunct | tests/test_oembed.py | tests/test_oembed.py | import io
import unittest
from adjunct.oembed import parse_xml_oembed_response
class OEmbedXMLParserTest(unittest.TestCase):
def test_parse(self):
fh = io.StringIO(
"""<?xml version="1.0" encoding="utf-8"?>
<oembed>
<version>1.0</version>
<type>photo</type>
<title>This is a title</title>
<url>http://example.com/foo.png</url>
<height>300</height>
<width>300</width>
</oembed>
"""
)
fields = parse_xml_oembed_response(fh)
self.assertDictEqual(
fields, {"version": "1.0", "type": "photo", "title": "This is a title"}
)
| mit | Python | |
4a3faad1bd26054742f871729b25b5a4a37ba127 | Create projective coordinates. | ofgulban/tetrahydra | examples/conversions/rgb_to_v1v2.py | examples/conversions/rgb_to_v1v2.py | """Create isometric logratio transformed coordinates for MRI data."""
import os
import numpy as np
import tetrahydra.core as tet
from tetrahydra.utils import truncate_and_scale
from nibabel import load, save, Nifti1Image
"""Load Data"""
#
nii1 = load('/home/faruk/Data/Faruk/bias_corr/T1_restore.nii.gz')
nii2 = load('/home/faruk/Data/Faruk/bias_corr/PD_restore.nii.gz')
nii3 = load('/home/faruk/Data/Faruk/bias_corr/T2s_restore.nii.gz')
basename = nii1.get_filename().split(os.extsep, 1)[0]
dirname = os.path.dirname(nii1.get_filename())
vol1 = nii1.get_data()
vol2 = nii2.get_data()
vol3 = nii3.get_data()
dims = vol1.shape + (3,)
comp = np.zeros(dims)
comp[..., 0] = vol1
comp[..., 1] = vol2
comp[..., 2] = vol3
comp = comp.reshape(dims[0]*dims[1]*dims[2], dims[3])
# Impute
comp[comp == 0] = 1.
# Closure
comp = tet.closure(comp)
# Isometric logratio transformed
ilr = tet.ilr_transformation(comp)
ilr = ilr.reshape(dims[0], dims[1], dims[2], dims[3]-1)
# save the new coordinates
for i in range(ilr.shape[-1]):
img = ilr[..., i]
# scale is done for FSL-FAST otherwise it cannot find clusters
img = truncate_and_scale(img, percMin=0, percMax=100, zeroTo=2000)
out = Nifti1Image(img, affine=nii1.affine)
save(out, os.path.join(dirname, 'ilr_coord_'+str(i+1)+'.nii.gz'))
| bsd-3-clause | Python | |
339655b82c7a1d307ca69b0666a3a150a5b04aa6 | Create basic_tweepy.py | agusmakmun/Some-Examples-of-Simple-Python-Script,agusmakmun/Some-Examples-of-Simple-Python-Script | tweepy/basic_tweepy.py | tweepy/basic_tweepy.py | """
See tutorial:
- https://python.web.id/blog/cara-menggunakan-tweepy/
- https://python.web.id/blog/filter-timeline-twitter-menggunakan-tweepy/
"""
import tweepy
# Consumer keys and access tokens, used for OAuth
consumer_key = 'JNT2qhC2noSTSya'
consumer_secret = 'YibVMiytGm2qWCun83cYjU4'
access_token = '1746506726-Jx7xPaH63tNC40JC'
access_token_secret = 'LDg6sOkOKlMEO9TSchDDb'
# OAuth process, using the keys and tokens
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
# Creation of the actual interface, using authentication
api = tweepy.API(auth)
# Sample method, used to update a status
api.update_status('Test Status using tweepy, hurray!!')
| agpl-3.0 | Python | |
5b9941d67f9975ced1a628d315165f03d22d8b0d | Create 4sqr2instconverter.py | haddadi/Instagram,haddadi/Instagram | 4sqr2instconverter.py | 4sqr2instconverter.py | #!/usr/bin/python
import httplib2
import os
import sys
import urllib2
import re
import time
import subprocess
from time import sleep
dataFile = open('4sq_USA_food_location_ids.txt', 'r')
outputfile = open ('4sq2inst_mapped_with_4sq_id_at_end.txt', 'w')
for line in dataFile:
location_id=line
string="curl https://api.instagram.com/v1/locations/search?access_token=XXXXXXX\&foursquare_v2_id="
# print location_id
cmd=string+location_id
output = subprocess.check_output(cmd, shell=True)
# os.system(cmd)
print >> outputfile, output+' '+line,
#os.system('echo')
#os.system('curl "https://api.instagram.com/v1/locations/search?access_token=XXXXXXX&foursquare_v2_id="+location_id ')
# time.sleep(1)
dataFile.close()
| mit | Python | |
165aae202a9a0e350fd235e558a18710126200ac | Create acp.py | Asyncode/ACR2.0,GluuIO/ACR2.0 | ACR/components/acp.py | ACR/components/acp.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Properitary
# Copyright (C) 2014 Asyncode
from ACR import acconfig
from ACR.components import *
from ACR.components.interpreter import makeTree
from ACR.errors import *
from ACR.utils import HTTP
import os
import subprocess
import shutil
from bson import ObjectId
def exe(command):
try:
p=subprocess.Popen(command.split(), bufsize=2024, stdin=subprocess.PIPE,stdout=subprocess.PIPE)
except OSError:
raise Error("SubProcessError","%s failed with error code %s"%(command,res))
# p.stdin.write(conf["content"])
p.stdin.close()
return p.stdout.read()
class ACP(Component):
PROJECTS_PATH="/var/apps/"
GIT_PATH="/home/ubuntu/git/"
def __init__(self,config):
currPath="/"
for d in self.PROJECTS_PATH.split("/")[:-1]:
currPath=os.path.join(currPath, d)
if not os.path.isdir(currPath):
os.mkdir(currPath)
def isAvailable(self,acenv,config):
name=config["name"].execute(acenv)
return not os.path.isdir(self.PROJECTS_PATH+name+".asyncode.com")
def create(self,acenv,config):
if not self.isAvailable(acenv,config):
return {
"@status":"error",
"@error":"NameTaken"
}
name=config["name"].execute(acenv)
template=config["template"].execute(acenv)
r={}
savedPath = os.getcwd()
os.chdir(self.GIT_PATH+template)
r["gitPull"]=str(exe("git pull"))
os.chdir(savedPath)
try:
shutil.copytree(self.GIT_PATH+template, self.PROJECTS_PATH+name+".asyncode.com")
except OSError,e:
pass
# return {
# "@status":"error",
# "@error":"NameNotAvailable",
# "@message":"Project name is not available."+str(e)
# }
try:
shutil.copytree(self.GIT_PATH+"IDEskel", self.PROJECTS_PATH+"ide."+name+".asyncode.com",symlinks=True)
except OSError,e:
pass
# return {
# "@status":"error",
# "@error":"NameNotAvailable",
# "@message":"Project name is not available."+str(e)
# }
# shutil.copyfile(self.GIT_PATH+"symlinktoIDE",self.PROJECTS_PATH+"ide."+name+".asyncode.com")
conf=open(self.PROJECTS_PATH+"ide."+name+".asyncode.com/config.xml","w")
conf.write("""<?xml version="1.0" encoding="UTF-8"?>
<config>
<!-- Core configuration -->
<name>%s</name>
<debug enable="f" level="debug"/>
<profiler enable="f"/>
<lang default="en"/>
<mongo db="%s_asyncode_com"/>
<timezone>UTC</timezone>
<component name="filesystem">
<absolutePath>%s</absolutePath>
</component>
</config>"""%(name,name,self.PROJECTS_PATH+name+".asyncode.com"))
conf.close()
user=acenv.app.storage.users.find({"_id":ObjectId(acenv.sessionStorage.data["ID"])})
user=user[0]
try:
del user["_id"]
except:
pass
try:
del user["approvalKey"]
except:
pass
try:
del user["last_login"]
except:
pass
user["loggedIn"]=False
user["role"]="admin"
acenv.app.storage.connection[name+"_asyncode_com"].users.insert(user)
r.update({"@status":"ok"})
return r
def generate(self,env,config):
return self.__getattribute__(config["command"].split(":").pop())(env,config["params"])
def parseAction(self, conf):
params={}
for i in conf["params"]:
params[i]=makeTree(conf["params"][i])
ret=conf
ret["params"]=params
return ret
def getObject(config):
return ACP(config)
| agpl-3.0 | Python | |
523fac76d0de41b09aab7b42ec0a975ccc99a532 | add AE1 | QULab/sound_field_analysis-py | AE1_IdealPlaneWave.py | AE1_IdealPlaneWave.py | # SOFiA example 1: Ideal unity plane wave simulation
# Additionally requires vispy, see http://vispy.org
import numpy as np
from sofia import gen, plot
pi = np.pi
N = 9 # Order
Nrf = N # Radial filter order
Nviz = N # Visualization order
krViz = 30 # Select kr bin for vizualisation
r = 0.5 # Array radius
ac = 2 # Array configuration, 2: Rigid sphere array
FS = 48000 # Sampling Frequency
NFFT = 128 # FFT-Bins
AZ = pi / 3 # Azimuth angle
EL = pi / 3 # Elevation angle
# Generate an ideal plane wave using W/G/C (Wave Generator Core)
Pnm, kr = gen.wgc(N, r, ac, FS, NFFT, AZ, EL)
# Make radial filters for the rigid sphere array
dn, _ = gen.mf(Nrf, kr, ac)
# Generate visualization data
vizMTX = plot.makeMTX(Pnm, dn, Nviz, krViz)
# Visualize
#plot.visualize3D(vizMTX, style='sphere')
| mit | Python | |
fda3c12070404a6f209f8b0ae18c6712d71a88ad | update to query and print GeoJSON results | mdiener21/python-geospatial-analysis-cookbook,mdiener21/python-geospatial-analysis-cookbook,mdiener21/python-geospatial-analysis-cookbook,mdiener21/python-geospatial-analysis-cookbook | ch04/code/ch04-06_postgis_complex_query.py | ch04/code/ch04-06_postgis_complex_query.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import psycopg2
import pprint
from geojson import loads, Feature, FeatureCollection
# Database Connection Info
db_host = "localhost"
db_user = "postgres"
db_passwd = "air"
db_database = "py_geoan_cb"
db_port = "5432"
# connect to DB
conn = psycopg2.connect(host=db_host, user=db_user, port=db_port, password=db_passwd, database=db_database)
# create a cursor
cur = conn.cursor()
complex_query = """
SELECT
st_asgeojson(st_centroid(g.wkb_geometry)) as geom, c.name AS city, g.name AS golfclub, p.name_en AS park,
ST_Distance(geography(c.wkb_geometry), geography(g.wkb_geometry)) AS distance,
ST_Distance(geography(p.geom), geography(g.wkb_geometry)) AS distance
FROM
geodata.parks_pa_canada AS p,
geodata.cities_bc_alberta AS c
JOIN
geodata.golf_courses_bc_alberta AS g
ON
ST_DWithin(geography(c.wkb_geometry), geography(g.wkb_geometry),4000)
WHERE
ST_DWithin(geography(p.geom), geography(g.wkb_geometry),5000)
"""
# WHERE c.population is not null and e.name is not null
# execute the query
cur.execute(complex_query)
# return all the rows, we expect more than one
validity_results = cur.fetchall()
# an empty list to hold each feature of our feature collection
new_geom_collection = []
# loop through each row in result query set and add to my feature collection
# assign name field to the GeoJSON properties
for each_result in validity_results:
geom = each_result[0]
city_name = each_result[1]
course_name = each_result[2]
park_name = each_result[3]
dist_city_to_golf = each_result[4]
dist_park_to_golf = each_result[5]
geoj_geom = loads(geom)
myfeat = Feature(geometry=geoj_geom, properties={'city': city_name, 'golf_course': course_name,
'park_name': park_name, 'dist_to city': dist_city_to_golf,
'dist_to_park': dist_park_to_golf})
new_geom_collection.append( myfeat) # use the geojson module to create the final Feature Collection of features created from for loop above
my_geojson = FeatureCollection(new_geom_collection)
pprint.pprint(my_geojson)
# close cursor
cur.close()
# close connection
conn.close()
| mit | Python | |
11f270fa0d962aa7d22c647c195a8db9d8c2cdd0 | Convert RGB color space to CMYK and split the channels. | danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,stiphyMT/plantcv | plantcv/plantcv/rgb2gray_cmyk.py | plantcv/plantcv/rgb2gray_cmyk.py | # RGB -> CMYK -> Gray
import cv2
import os
from plantcv.plantcv import print_image
from plantcv.plantcv import plot_image
from plantcv.plantcv import fatal_error
from plantcv.plantcv import params
import numpy as np
def rgb2gray_cmyk(rgb_img, channel):
"""Convert image from RGB colorspace to CMYK colorspace. Returns the specified subchannel as a gray image.
Inputs:
rgb_img = RGB image data
channel = color subchannel (c = cyan, m = magenta, y = yellow, k=black)
Returns:
c | m | y | k = grayscale image from one CMYK color channel
:param rgb_img: numpy.ndarray
:param channel: str
:return channel: numpy.ndarray
"""
# Auto-increment the device counter
params.device += 1
# The allowable channel inputs are c, m , y or k
names = {"c": "cyan", "m": "magenta", "y": "yellow", "k": "black"}
channel = channel.lower()
if channel not in names:
fatal_error("Channel " + str(channel) + " is not c, m, y or k!")
B = rgb_img[:, :, 0].astype(float) # float conversion
G = rgb_img[:, :, 1].astype(float) #
R = rgb_img[:, :, 2].astype(float) #
B_ = np.copy(B)
G_ = np.copy(G)
R_ = np.copy(R)
K = np.zeros_like(B)
C = np.zeros_like(B)
M = np.zeros_like(B)
Y = np.zeros_like(B)
for i in range(B.shape[0]):
for j in range(B.shape[1]):
B_[i, j] = B[i, j]/255
G_[i, j] = G[i, j]/255
R_[i, j] = R[i, j]/255
K[i, j] = 1 - max(B_[i, j], G_[i, j], R_[i, j])
if (B_[i, j] == 0) and (G_[i, j] == 0) and (R_[i, j] == 0):
# black
C[i, j] = 0
M[i, j] = 0
Y[i, j] = 0
else:
C[i, j] = (1 - R_[i, j] - K[i, j])/float((1 - K[i, j]))
M[i, j] = (1 - G_[i, j] - K[i, j])/float((1 - K[i, j]))
Y[i, j] = (1 - B_[i, j] - K[i, j])/float((1 - K[i, j]))
# Convert the input BGR image to LAB colorspace
CMYK = (np.dstack((C,M,Y,K)) * 255).astype(np.uint8)
#Split CMYK channels
C, M, Y, K = cv2.split(CMYK)
# Create a channel dictionaries for lookups by a channel name index
channels = {"c": C, "m": M, "y": Y, "k": K}
if params.debug == "print":
print_image(channels[channel], os.path.join(params.debug_outdir,
str(params.device) + "_cmyk_" + names[channel] + ".png"))
elif params.debug == "plot":
plot_image(channels[channel], cmap="gray")
return channels[channel]
| mit | Python | |
df7d2393ebb10ddbfb44d6cacb9c4ec3d07eb381 | Add initial Gunicorn configuration. | rcutmore/vinotes-api,rcutmore/vinotes-api | vinotes/config/gunicorn.py | vinotes/config/gunicorn.py | command = '/home/rc/projects/vinotes-api/env/bin/gunicorn'
pythonpath = '/home/rc/projects/vinotes-api/vinotes'
bind = '127.0.0.1:8001'
workers = 1
user = 'vinotes-gunicorn'
| unlicense | Python | |
9669a45df9f37477bee1a154a9fe4f375e719151 | add default models.py | rctay/satchmo-payment-dumb | models.py | models.py | import config
PAYMENT_PROCESSOR=True | bsd-3-clause | Python | |
36fb0255a4037a9fe7b6d61868f8666325fea944 | Test recipient address formatting in user message e-mails | m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | tests/blueprints/user_message/test_address_formatting.py | tests/blueprints/user_message/test_address_formatting.py | """
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.database import db
from byceps.services.email.models import EmailConfig
from byceps.services.user_message import service as user_message_service
from testfixtures.brand import create_brand as _create_brand
from testfixtures.user import create_user as _create_user
from tests.helpers import app_context
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
brand = create_brand()
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '', brand.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('Bob', 'bob@example.com', 'Bob <bob@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application():
with app_context():
set_up_database()
yield
tear_down_database()
# helpers
def set_up_database():
db.reflect()
db.drop_all()
db.create_all()
def tear_down_database():
db.session.remove()
db.drop_all()
def create_brand():
brand = _create_brand()
db.session.add(brand)
db.session.commit()
sender_address = '{}@example.com'.format(brand.id)
email_config = EmailConfig(brand.id, sender_address)
db.session.add(email_config)
db.session.commit()
return brand
def create_user(*args, **kwargs):
user = _create_user(*args, **kwargs)
db.session.add(user)
db.session.commit()
return user
| bsd-3-clause | Python | |
a0b4e074d5ca13634a1671ea074ffa7a6ad9fea1 | Add missing change | Just-D/chromium-1,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,Chilledheart/chromium,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,rogerwang/chromium,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,ltilve/chromium,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,keishi/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,robclark/chromium,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,littlstar/chromium.src,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,junmin-zhu/chromium-rivertrail,rogerwang/chromium,mogoweb/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,keishi/chromium,jaruba/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,keishi/chromium,Jonekee/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,hujiajie/pa-chromium,rogerwang/chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,M4sse/chromium.src,nacl-webkit/chrome_deps,dushu1203/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,robclark/chromium,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,markYoungH/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,hujiajie/pa-chromium,M4sse/chromium.src,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,Jonekee/chromium.src,robclark/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,M4sse/chromium.src,dednal/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,jaruba/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,patrickm/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,patrickm/chromium.src,robclark/chromium,mogoweb/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,robclark/chromium,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,dushu1203/chromium.src,nacl-webkit/chrome_deps,keishi/chromium,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,rogerwang/chromium,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,rogerwang/chromium,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,robclark/chromium,ondra-novak/chromium.src,keishi/chromium,timopulkkinen/BubbleFish,Just-D/chromium-1,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,rogerwang/chromium,nacl-webkit/chrome_deps,robclark/chromium,hgl888/chromium-crosswalk-efl,dednal/chromium.src,zcbenz/cefode-chromium,keishi/chromium,hujiajie/pa-chromium,keishi/chromium,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,ltilve/chromium,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,anirudhSK/chromium,nacl-webkit/chrome_deps,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,rogerwang/chromium,M4sse/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,patrickm/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,littlstar/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,robclark/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,patrickm/chromium.src,rogerwang/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,mogoweb/chromium-crosswalk,ltilve/chromium,keishi/chromium,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,mogoweb/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,patrickm/chromium.src,hujiajie/pa-chromium,axinging/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,keishi/chromium,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,robclark/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,keishi/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,ltilve/chromium,robclark/chromium,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,rogerwang/chromium,nacl-webkit/chrome_deps,littlstar/chromium.src,dednal/chromium.src,ltilve/chromium,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,rogerwang/chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,Jonekee/chromium.src,littlstar/chromium.src,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,ltilve/chromium | ppapi/PRESUBMIT.py | ppapi/PRESUBMIT.py | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
def CheckChange(input_api, output_api):
results = []
# Verify all modified *.idl have a matching *.h
files = input_api.LocalPaths()
h_files = []
idl_files = []
for filename in files:
name, ext = os.path.splitext(filename)
name_parts = name.split(os.sep)
if name_parts[0:2] == ['ppapi', 'c'] and ext == '.h':
h_files.append('/'.join(name_parts[2:]))
if name_parts[0:2] == ['ppapi', 'api'] and ext == '.idl':
idl_files.append('/'.join(name_parts[2:]))
# Generate a list of all appropriate *.h and *.idl changes in this CL.
both = h_files + idl_files
# If there aren't any, we are done checking.
if not both: return results
missing = []
for filename in idl_files:
if filename not in set(h_files):
missing.append(' ppapi/c/%s.h' % filename)
for filename in h_files:
if filename not in set(idl_files):
missing.append(' ppapi/api/%s.idl' % filename)
if missing:
results.append(
output_api.PresubmitPromptWarning('Missing matching PPAPI definition:',
long_text='\n'.join(missing)))
# Verify all *.h files match *.idl definitions, use:
# --test to prevent output to disk
# --diff to generate a unified diff
# --out to pick which files to examine (only the ones in the CL)
ppapi_dir = input_api.PresubmitLocalPath()
cmd = [ sys.executable, 'generator.py',
'--wnone', '--diff', '--test','--cgen', '--range=M13,M16']
# Only generate output for IDL files references (as *.h or *.idl) in this CL
cmd.append('--out=' + ','.join([name + '.idl' for name in both]))
p = subprocess.Popen(cmd, cwd=os.path.join(ppapi_dir, 'generators'),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(p_stdout, p_stderr) = p.communicate()
if p.returncode:
results.append(
output_api.PresubmitError('PPAPI IDL Diff detected: Run the generator.',
long_text=p_stderr))
return results
def CheckChangeOnUpload(input_api, output_api):
# return []
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
# return []
return CheckChange(input_api, output_api)
| # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
def CheckChange(input_api, output_api):
results = []
# Verify all modified *.idl have a matching *.h
files = input_api.LocalPaths()
h_files = []
idl_files = []
for filename in files:
name, ext = os.path.splitext(filename)
name_parts = name.split(os.sep)
if name_parts[0:2] == ['ppapi', 'c'] and ext == '.h':
h_files.append('/'.join(name_parts[2:]))
if name_parts[0:2] == ['ppapi', 'api'] and ext == '.idl':
idl_files.append('/'.join(name_parts[2:]))
# Generate a list of all appropriate *.h and *.idl changes in this CL.
both = h_files + idl_files
# If there aren't any, we are done checking.
if not both: return results
missing = []
for filename in idl_files:
if filename not in set(h_files):
missing.append(' ppapi/c/%s.h' % filename)
for filename in h_files:
if filename not in set(idl_files):
missing.append(' ppapi/api/%s.idl' % filename)
if missing:
results.append(
output_api.PresubmitPromptWarning('Missing matching PPAPI definition:',
long_text='\n'.join(missing)))
# Verify all *.h files match *.idl definitions, use:
# --test to prevent output to disk
# --diff to generate a unified diff
# --out to pick which files to examine (only the ones in the CL)
ppapi_dir = input_api.PresubmitLocalPath()
cmd = [ sys.executable, 'generator.py',
'--wnone', '--diff', '--test','--cgen', '--range=M13,M14']
# Only generate output for IDL files references (as *.h or *.idl) in this CL
cmd.append('--out=' + ','.join([name + '.idl' for name in both]))
p = subprocess.Popen(cmd, cwd=os.path.join(ppapi_dir, 'generators'),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(p_stdout, p_stderr) = p.communicate()
if p.returncode:
results.append(
output_api.PresubmitError('PPAPI IDL Diff detected: Run the generator.',
long_text=p_stderr))
return results
def CheckChangeOnUpload(input_api, output_api):
# return []
return CheckChange(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
# return []
return CheckChange(input_api, output_api)
| bsd-3-clause | Python |
009ac6b912a00191f69b32791cdb7e74fee83752 | Add config reader | rajikaimal/emma,rajikaimal/emma | src/config.py | src/config.py | import configparser
import os
Config = configparser.ConfigParser()
full_path = os.path.dirname(os.path.realpath(__file__))
def read_credentials():
Config.read(full_path + '/config/config_details.ini')
try:
username = Config['Credentials']['username']
password = Config['Credentials']['password']
return {
'username': username,
'password': password
}
except ValueError:
print('Error !')
def read_repo():
Config.read(full_path + '/config/config_details.ini')
try:
org = Config['Repository']['org']
repo = Config['Repository']['repo']
return {
'org': org,
'repo': repo
}
except ValueError:
print('Error !')
| mit | Python | |
f290dd020b2cb3e586c8de6c4e8e3c1bc80f3583 | Add new class to compute the colourmap and the node filters accordingly to did | amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt | evaluation/packages/colours.py | evaluation/packages/colours.py | """@package Colours
This module provides the colourmaps used in globOpt to display primitives
according to their gid
"""
import packages.primitive as primitive
import packages.orderedSet as orderedSet
class Colours(object):
def __init__(self):
self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377']
self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377']
"""
Compute the colourmap associating one colour per group id. Also output the
masks associating the node idx for each group id (can be used directly as
filter in networkX display funtions)
"""
def getDIDColourMap(self, primArray):
ids = orderedSet.OrderedSet()
gfilter = {}
for p in primArray:
ids.add(p.did)
if p.did not in gfilter:
gfilter[p.did] = []
gfilter[p.did].append(p.uid)
cmap = {}
nbCol = len(self.colListMedium)
for idx, did in enumerate(ids):
print idx, idx%nbCol, self.colListMedium[idx%nbCol]
cmap[did] = self.colListMedium[idx%nbCol]
return cmap, gfilter
| apache-2.0 | Python | |
902fe0435367a55b218265f11f0c444cba9a2cfa | Add files via upload | tijme/angularjs-csti-scanner,tijme/angularjs-sandbox-escape-scanner | source.py | source.py | from bs4 import BeautifulSoup
import urllib
import re
import math
import html5lib
from html5lib import treebuilders
import urlparse
def print_parameter_tabel(parameters):
def cs(string, length=30):
return (string[0+i:length+i] for i in range(0, len(string), length))
def houndred(x):
return int(math.ceil(x / 100.0)) * 100
items = parameters
for number,item in enumerate(items):
name=item
name += str(" " * int(29- len(name[0])))+ "|"
print("+" + "-" * 32 + "+" )
print(str ( "|" + str(number)+ ") " + "".join(name)) )
print("+" + "-" * 32 + "+" )
def correct_hypertext_scheme(site):
if 'https://' in site:
pass
elif 'http://' in site:
pass
else:
site = "http://"+site
return site
finalurl = urlparse.urlparse(site)
domain = '{uri.scheme}://{uri.netloc}/'.format(uri=finalurl)
print(domain)
def find_parameters(site):
correct_hypertext_scheme(site)
parameters = []
url = correct_hypertext_scheme(site)
mhtml = urllib.urlopen(url)
soup = BeautifulSoup(mhtml, 'html5lib')
inputs = soup.findAll("input", {'type':'text'})
print("[+] No parameters in your url detected, but here are some input fields the page has")
if len(inputs) > 0:
for elem in inputs:
parameter = elem['name']
parameters.append([parameter])
print("[+] Please fill in those fields and see if you can get a url with parameters included ")
print_parameter_tabel(parameters)
else:
print("[+] No input entery points found on this page...")
def run():
url_choise = raw_input("[+] Please enter a url ")
find_parameters(url_choise)
#Todo: find out if there are no parameters in the url
# put it all in a class
if __name__ == "__main__":
run()
| mit | Python | |
833d114bd1bc396dc7c6b0434782f9e326319e88 | Add file to read .RAW images from Aptina | habi/GlobalDiagnostiX,habi/GlobalDiagnostiX,habi/GlobalDiagnostiX | readAptinaRAW.py | readAptinaRAW.py | import os
import numpy
import matplotlib.pyplot as plt
Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/'
Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm'
File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw'
Size = [int(File.split('_')[1].split('x')[1]),
int(File.split('_')[1].split('x')[0])]
# fromfile
FileToLoad = os.path.join(Directory, Folder, File)
FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size)
#~ FromFile -= numpy.mean(FromFile)
MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1]))
#~ MemMap -= numpy.mean(MemMap)
plt.figure(File)
plt.subplot(121)
plt.imshow(FromFile,cmap='gray')
plt.title('numpy.fromfile > leaves file')
plt.subplot(122)
plt.imshow(MemMap,cmap='gray')
plt.title('numpy.memmap > destroys file')
plt.show()
print 'Only use "numpy.memmap" for displaying files! If you perform some',\
'calculations on the files (e.g "File -= numpy.mean(File)") these',\
'calculations are immediately saved to disk, essentially destroying the',\
'file! In this case use "numpy.fromfile"!'
| unlicense | Python | |
0755afbf47087aded357ca77c86e98f7243a53c7 | check that `{base_url}/nbextensions` page loads | jcb91/jupyter_nbextensions_configurator,jcb91/jupyter_nbextensions_configurator,jcb91/jupyter_nbextensions_configurator,jcb91/jupyter_nbextensions_configurator | tests/test_nbextensions_configurator.py | tests/test_nbextensions_configurator.py | import requests
from notebook.notebookapp import NotebookApp
from notebook.tests.launchnotebook import NotebookTestBase
from notebook.utils import url_path_join
from traitlets.config import Config
class ConfiguratorTest(NotebookTestBase):
config = Config(log_level='DEBUG')
if hasattr( NotebookApp, 'nbserver_extensions'):
config.nbserver_extensions = Config({
'themysto.nbextensions_configurator': True,
})
else:
config.server_extensions = [
'themysto.nbextensions_configurator',
]
def test_load_nbextensions_page(self):
"""check that /nbextensions url loads"""
response = requests.request(
'GET', url_path_join(self.base_url(), 'nbextensions'),
)
response.raise_for_status()
| bsd-3-clause | Python | |
b3aaa3c9e9eccd1f8be82316713a613d16412f36 | add files module (mostly for images at the moment) | mahmoud/wapiti | wapiti/operations/files.py | wapiti/operations/files.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from base import QueryOperation
from params import MultiParam, StaticParam
from models import PageInfo, ImageInfo
from utils import OperationExample
DEFAULT_IMAGE_PROPS = ['timestamp', 'user', 'userid', 'comment', 'parsedcomment',
'url', 'size', 'dimensions', 'sha1', 'mime', 'mediatype',
'metadata', 'bitdepth']
IMAGE_INFO_PROPS = DEFAULT_IMAGE_PROPS + ['thumbmime', 'archivename']
class GetImages(QueryOperation):
"""
Fetch the images embedded on pages.
"""
field_prefix = 'gim'
input_field = MultiParam('titles', key_prefix=False)
fields = [StaticParam('generator', 'images'),
StaticParam('prop', 'info'),
StaticParam('inprop', 'subjectid|talkid|protection')]
output_type = [PageInfo]
examples = [OperationExample('Coffee')]
def extract_results(self, query_resp):
ret = []
for pid, pid_dict in query_resp['pages'].iteritems():
if pid.startswith('-'):
pid_dict['pageid'] = None # TODO: breaks consistency :/
try:
page_ident = PageInfo.from_query(pid_dict,
source=self.source)
except ValueError:
continue
ret.append(page_ident)
return ret
class GetImageInfos(QueryOperation):
field_prefix = 'ii'
input_field = MultiParam('titles', key_prefix=False)
fields = [StaticParam('prop', 'imageinfo'),
StaticParam('iiprop', IMAGE_INFO_PROPS)]
output_type = [ImageInfo]
def extract_results(self, query_resp):
ret = []
for k, pid_dict in query_resp['pages'].iteritems():
if int(k) < 0 and pid_dict['imagerepository'] != 'local':
pid_dict['pageid'] = 'shared'
pid_dict['revid'] = 'shared'
try:
pid_dict.update(pid_dict.get('imageinfo', [{}])[0])
image_info = ImageInfo.from_query(pid_dict,
source=self.source)
except ValueError as e:
print e
continue
ret.append(image_info)
return ret
class GetAllImageInfos(GetImageInfos):
field_prefix = 'gai'
input_field = None
fields = [StaticParam('generator', 'allimages'),
StaticParam('prop', 'imageinfo'),
StaticParam('gaiprop', DEFAULT_IMAGE_PROPS)]
examples = [OperationExample()]
| bsd-3-clause | Python | |
56fd675e5bf0bd68a73e21c244807c39a87a3eee | Implement the command handler framework | Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot | heufybot/modules/util/commandhandler.py | heufybot/modules/util/commandhandler.py | from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
pass
def handlePrivateMessage(self, server, user, messageBody):
pass
def handleCommand(self, message):
pass
| mit | Python | |
378f98885fb7ea2eebb7307afded05cd3706647b | make server sleep 60s | zenanhu/pluto,zenanhu/pluto,zenanhu/pluto,zenanhu/pluto | hydra/server1.py | hydra/server1.py | import os
import socket
import time
SERVER_ADDRESS = (HOST, PORT) = '', 8888
REQUEST_QUEUE_SIZE = 1
def handle_request(client_connection):
request = client_connection.recv(1024)
print request.decode()
http_response = '''\
HTTP/1.1 200 OK
Hello World!
'''
client_connection.sendall(http_response)
time.sleep(60)
def serve_forever():
listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listen_socket.bind(SERVER_ADDRESS)
listen_socket.listen(REQUEST_QUEUE_SIZE)
print 'Serving HTTP on port {port} ...\n'.format(port=PORT)
while True:
client_connection, client_address = listen_socket.accept()
handle_request(client_connection)
client_connection.close()
if __name__ == '__main__':
serve_forever()
| apache-2.0 | Python | |
26cad83ebb6466d66f1e9fd87e963af4b5247ecc | Add Heap sort implemented in python | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | sort/heap_sort/python/heap_sort_ccsc.py | sort/heap_sort/python/heap_sort_ccsc.py | # Python program for implementation of heap Sort
# To heapify subtree rooted at index i.
# n is size of heap
def heapify(arr, n, i):
largest = i # Initialize largest as root
l = 2 * i + 1 # left = 2*i + 1
r = 2 * i + 2 # right = 2*i + 2
# See if left child of root exists and is
# greater than root
if l < n and arr[i] < arr[l]:
largest = l
# See if right child of root exists and is
# greater than root
if r < n and arr[largest] < arr[r]:
largest = r
# Change root, if needed
if largest != i:
arr[i],arr[largest] = arr[largest],arr[i] # swap
# Heapify the root.
heapify(arr, n, largest)
# The main function to sort an array of given size
def heapSort(arr):
n = len(arr)
# Build a maxheap.
# Since last parent will be at ((n//2)-1) we can start at that location.
for i in range(n // 2 - 1, -1, -1):
heapify(arr, n, i)
# One by one extract elements
for i in range(n-1, 0, -1):
arr[i], arr[0] = arr[0], arr[i] # swap
heapify(arr, i, 0)
# Driver code to test above
arr = [ 12, 11, 13, 5, 6, 7]
heapSort(arr)
n = len(arr)
print ("Sorted array is")
for i in range(n):
print ("%d" %arr[i]),
# This code is contributed by Chirag Chopra
| cc0-1.0 | Python | |
ee9e31b8a8d93288009ee8d9a846dcaf930edb7a | Create solutions.py | tonylixu/leetcode | unique-number-of-occurrences/solutions.py | unique-number-of-occurrences/solutions.py | class Solution(object):
def uniqueOccurrences(self, arr):
"""
:type arr: List[int]
:rtype: bool
"""
arr_dict = {}
for x in arr:
if x in arr_dict:
arr_dict[x] += 1
else:
arr_dict[x] = 1
if len(set(arr_dict.values())) != len(arr_dict.values()):
return False
else:
return True
| mit | Python | |
f4a4b0ed743b1c56b884d9364759adeca5d64479 | Change PRESUBMIT to allow COMPILE_ASSERT | fujunwei/chromium-crosswalk,ltilve/chromium,ChromiumWebApps/chromium,Just-D/chromium-1,ChromiumWebApps/chromium,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,hujiajie/pa-chromium,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,ltilve/chromium,timopulkkinen/BubbleFish,Just-D/chromium-1,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,Jonekee/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,zcbenz/cefode-chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,M4sse/chromium.src,littlstar/chromium.src,patrickm/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,jaruba/chromium.src,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,zcbenz/cefode-chromium,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,axinging/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ondra-novak/chromium.src,littlstar/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,littlstar/chromium.src,anirudhSK/chromium,Just-D/chromium-1,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,Just-D/chromium-1,nacl-webkit/chrome_deps,dushu1203/chromium.src,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,hujiajie/pa-chromium,nacl-webkit/chrome_deps,anirudhSK/chromium,dednal/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,jaruba/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,markYoungH/chromium.src,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,mogoweb/chromium-crosswalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,littlstar/chromium.src,ltilve/chromium,littlstar/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,ltilve/chromium,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,chuan9/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,timopulkkinen/BubbleFish,ondra-novak/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,axinging/chromium-crosswalk,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,ondra-novak/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,ltilve/chromium,Just-D/chromium-1,ltilve/chromium,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,patrickm/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src | cc/PRESUBMIT.py | cc/PRESUBMIT.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for cc.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
import re
CC_SOURCE_FILES=(r'^cc/.*\.(cc|h)$',)
def CheckAsserts(input_api, output_api, white_list=CC_SOURCE_FILES, black_list=None):
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x, white_list, black_list)
assert_files = []
notreached_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# WebKit ASSERT() is not allowed.
if re.search(r"\bASSERT\(", contents):
assert_files.append(f.LocalPath())
# WebKit ASSERT_NOT_REACHED() is not allowed.
if re.search(r"ASSERT_NOT_REACHED\(", contents):
notreached_files.append(f.LocalPath())
if assert_files:
return [output_api.PresubmitError(
'These files use ASSERT instead of using DCHECK:',
items=assert_files)]
if notreached_files:
return [output_api.PresubmitError(
'These files use ASSERT_NOT_REACHED instead of using NOTREACHED:',
items=notreached_files)]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results += CheckAsserts(input_api, output_api)
return results
def GetPreferredTrySlaves(project, change):
return [
'linux_layout_rel',
]
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for cc.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
import re
CC_SOURCE_FILES=(r'^cc/.*\.(cc|h)$',)
def CheckAsserts(input_api, output_api, white_list=CC_SOURCE_FILES, black_list=None):
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x, white_list, black_list)
assert_files = []
notreached_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# WebKit ASSERT() is not allowed.
if re.search(r"ASSERT\(", contents):
assert_files.append(f.LocalPath())
# WebKit ASSERT_NOT_REACHED() is not allowed.
if re.search(r"ASSERT_NOT_REACHED\(", contents):
notreached_files.append(f.LocalPath())
if assert_files:
return [output_api.PresubmitError(
'These files use ASSERT instead of using DCHECK:',
items=assert_files)]
if notreached_files:
return [output_api.PresubmitError(
'These files use ASSERT_NOT_REACHED instead of using NOTREACHED:',
items=notreached_files)]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results += CheckAsserts(input_api, output_api)
return results
def GetPreferredTrySlaves(project, change):
return [
'linux_layout_rel',
]
| bsd-3-clause | Python |
072fd08dd89cc03aad0508c2e16e7551f5b27de0 | Create info.py | jurandysoares/admintera,jurandysoares/admintera | cgi-bin/info.py | cgi-bin/info.py |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Este arquivo pode ser salvo com qualquer extensão, tipo info.html
# A ser salvo em /usr/lib/cgi-bin no Debian/Ubuntu e
# em /var/www/cgi-bin no Fedora/RedHat/CentOS.
import os
print('Content-type: text/html')
print()
print('''<table border="1">
<tr>
<th>Variavel</th>
<th>Valor</th>
</tr>
''')
chaves_amb = list(os.environ.keys())
chaves_amb.sort()
for var in chaves_amb:
print('<tr>')
print('<td>{}</td>'.format(var))
print('<td>{}</td>'.format(os.environ[var]))
print('</tr>')
print('</table>')
| apache-2.0 | Python | |
a5e35f1b19259addf325d5b2b3545e0f10fbf5b6 | Create string2.py | dimir2/hse12pi2-scripts,dimir2/hse12pi2-scripts,dimir2/hse12pi2-scripts,dimir2/hse12pi2-scripts | BaydakovaE/string2.py | BaydakovaE/string2.py | import math
# D. verbing
def verbing(s):
if s.endswith('ing'):
st=s+"ly"
elif len(s) > 3:
st=s+"ing"
else:
st=s
return st
# E. not_bad
def not_bad(s):
if s.find('not') > 0 and s.find('bad') > 0 and s.find('not') < s.find('bad'):
st=s[:s.find('no')]+'good'
else:
st=s
return st
# F. front_back
def front_back(a, b):
cen= math.ceil (len(a) / 2)
cen2= math.ceil (len(b) / 2)
st=a[:cen]+b[:cen2]+a[cen:]+b[cen2:]
return st
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print ('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
def main():
print ('verbing')
test(verbing('hail'), 'hailing')
test(verbing('swiming'), 'swimingly')
test(verbing('do'), 'do')
print
print ('not_bad')
test(not_bad('This movie is not so bad'), 'This movie is good')
test(not_bad('This dinner is not that bad'), 'This dinner is good')
test(not_bad('This tea is not hot'), 'This tea is not hot')
test(not_bad("It's bad yet not"), "It's bad yet not")
print
print ('front_back')
test(front_back('abcd', 'xy'), 'abxcdy')
test(front_back('abcde', 'xyz'), 'abcxydez')
test(front_back('Kitten', 'Donut'), 'KitDontenut')
if __name__ == '__main__':
main()
| mit | Python | |
a12ced781c91b1d553a7e4e93d3df258cabbe63e | Add new package: jansi-native (#18547) | iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/jansi-native/package.py | var/spack/repos/builtin/packages/jansi-native/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class JansiNative(MavenPackage):
"""Jansi is a small ASl 2.0 licensed Java library that allows you to
use ANSI escape sequences to format your console output which works even
on windows."""
homepage = "https://fusesource.github.io/jansi/"
url = "https://github.com/fusesource/jansi-native/archive/jansi-native-1.8.tar.gz"
version('1.8', sha256='053808f58495a5657c7e7f388008b02065fbbb3f231454bfcfa159adc2e2fcea')
depends_on('java@8', type=('build', 'run'))
| lgpl-2.1 | Python | |
34853f9a99ef385b912c4fe7936594bb70008293 | Add asyncs.utils.CircuitBreaker | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | py/garage/garage/asyncs/utils.py | py/garage/garage/asyncs/utils.py | __all__ = [
'CircuitBreaker',
]
import collections
import time
class CircuitBreaker:
"""Break (disconnect) when no less than `count` errors happened
within last `period` seconds.
"""
class Disconnected(Exception):
pass
def __init__(self, *, count, period, clock=None):
self.timestamps = collections.deque(maxlen=count)
self.period = period
self.clock = clock or time.monotonic
@property
def connected(self):
if len(self.timestamps) < self.timestamps.maxlen:
return True
if self.timestamps[0] + self.period < self.clock():
return True
return False
def err(self, raises=Disconnected):
self.timestamps.append(self.clock())
if raises and not self.connected:
raise raises
| mit | Python | |
083957302452bdd966286bfd8d37d53dce8db7d3 | Add utility methods for facebook stuff. | Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server | pykeg/contrib/facebook/fbutil.py | pykeg/contrib/facebook/fbutil.py | import facebook
def profile_for_user(user):
profile = user.facebookprofile_set.all()
if not profile:
return None
return profile[0]
def session_for_user(user):
profile = profile_for_user(user)
if not profile:
return None
session = profile.session.all()
if not session:
return None
return session[0]
def stream_publish(user, **kwargs):
session = session_for_user(user)
if not session:
raise ValueError, "No session."
fb = facebook.Facebook(settings.FACEBOOK_API_KEY,
settings.FACEBOOK_SECRET_KEY)
fb.session_key = session.session_id
fb.session_key_expires = 0
return fb.stream.publish(**kwargs)
| mit | Python | |
5f2d0b5c9dbb288ee279e7158ad0e0aa2f5d4037 | Add config wrapper. | ohsu-qin/qipipe | qipipe/staging/sarcoma_config.py | qipipe/staging/sarcoma_config.py | import os
import ConfigParser
_CFG_FILE = os.path.join(os.path.dirname(__file__), '..', '..', 'conf', 'sarcoma.cfg')
_CONFIG = ConfigParser()
_CONFIG.read(_CFG_FILE)
def sarcoma_location(pt_id):
return _CONFIG.get('Tumor Location', pt_id)
| bsd-2-clause | Python | |
a002a78843c6324df94790c6185064e9ac2fb08d | Add utils | Sherlock-Holo/Holosocket | src/utils.py | src/utils.py | import base64
import hashlib
import secrets
import struct
def gen_data_len(mask_flag, data):
data_len = len(data)
if mask_flag:
if data_len <= 125:
data_len = data_len | 128
data_len = struct.pack('>B', data_len)
return data_len, 0
elif data_len <= 65535:
prefix = struct.pack('>B', 126 | 128)
data_len = struct.pack('>H', data_len)
return prefix, data_len
else:
prefix = struct.pack('>B', 127 | 128)
data_len = struct.pack('>Q', data_len)
return prefix, data_len
else:
if data_len <= 125:
data_len = struct.pack('>B', data_len)
return data_len, 0
elif data_len <= 65535:
prefix = struct.pack('>B', 126)
data_len = struct.pack('>H', data_len)
return prefix, data_len
else:
prefix = struct.pack('>B', 127)
data_len = struct.pack('>Q', data_len)
return prefix, data_len
def gen_request(addr):
Sec_WebSocket_Key = secrets.token_urlsafe(16)
Sec_WebSocket_Key = base64.b64encode(Sec_WebSocket_Key.encode())
data = b'GET /chat HTTP/1.1\r\n'
data += b'Host: ' + addr.encode() + b':8000\r\n'
data += b'Upgrade: websocket\r\n'
data += b'Connection: Upgrade\r\n'
data += b'Sec-WebSocket-Key: ' + Sec_WebSocket_Key + b'\r\n'
data += b'Sec-WebSocket-Version: 13\r\n\r\n'
return data, Sec_WebSocket_Key
def certificate_key(Sec_WebSocket_Key1, Sec_WebSocket_Key2):
guid = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
Sec_WebSocket_Key1 += guid
sha1 = hashlib.sha1()
sha1.update(Sec_WebSocket_Key1)
Sec_WebSocket_Key1 = base64.b64encode(sha1.digest())
if Sec_WebSocket_Key2 == Sec_WebSocket_Key1:
return True
else:
return False
def gen_response(Sec_WebSocket_Key):
guid = b'258EAFA5-E914-47DA-95CA-C5AB0DC85B11'
Sec_WebSocket_Key += guid
sha1 = hashlib.sha1()
sha1.update(Sec_WebSocket_Key)
Sec_WebSocket_Key = base64.b64encode(sha1.digest())
data = b'HTTP/1.1 101 Switching Protocols\r\n'
data += b'Upgrade: websocket\r\n'
data += b'Connection: Upgrade\r\n'
data += b'Sec-WebSocket-Accept: ' + Sec_WebSocket_Key
return data
| mpl-2.0 | Python | |
3699e8e412c637c9a36cb59e5647d8ff54782200 | Add an integration test for #480 | slackapi/python-slackclient,slackhq/python-slackclient,slackapi/python-slackclient,slackapi/python-slackclient | integration_tests/web/test_issue_480.py | integration_tests/web/test_issue_480.py | import logging
import multiprocessing
import os
import threading
import unittest
from integration_tests.env_variable_names import SLACK_SDK_TEST_USER_TOKEN
from integration_tests.helpers import async_test
from slack import WebClient
class TestWebClient(unittest.TestCase):
"""Runs integration tests with real Slack API
https://github.com/slackapi/python-slackclient/issues/480
"""
def setUp(self):
self.logger = logging.getLogger(__name__)
self.user_token = os.environ[SLACK_SDK_TEST_USER_TOKEN]
self.sync_client: WebClient = WebClient(token=self.user_token, run_async=False)
self.async_client: WebClient = WebClient(token=self.user_token, run_async=True)
def tearDown(self):
pass
def test_issue_480_processes(self):
client = self.sync_client
before = len(multiprocessing.active_children())
for idx in range(10):
response = client.api_test()
self.assertIsNotNone(response)
after = len(multiprocessing.active_children())
self.assertEqual(0, after - before)
@async_test
async def test_issue_480_processes_async(self):
client = self.async_client
before = len(multiprocessing.active_children())
for idx in range(10):
response = await client.api_test()
self.assertIsNotNone(response)
after = len(multiprocessing.active_children())
self.assertEqual(0, after - before)
# fails with Python 3.6
def test_issue_480_threads(self):
client = self.sync_client
before = threading.active_count()
for idx in range(10):
response = client.api_test()
self.assertIsNotNone(response)
after = threading.active_count()
self.assertEqual(0, after - before)
# fails with Python 3.6
@async_test
async def test_issue_480_threads_async(self):
client = self.async_client
before = threading.active_count()
for idx in range(10):
response = await client.api_test()
self.assertIsNotNone(response)
after = threading.active_count()
self.assertEqual(0, after - before)
| mit | Python | |
1609c5cd83fc99887ec45ff6beba2ee0dba712a8 | Create D_Velocity_components.py | Herpinemmanuel/Oceanography | Cas_1/D_Velocity_components.py | Cas_1/D_Velocity_components.py | import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
import cartopy.crs as ccrs
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
plt.ion()
dir1 = '/homedata/bderembl/runmit/test_southatlgyre'
ds1 = open_mdsdataset(dir1,prefix=['U','V'])
nt = -1
nz = 0
# Cartography U et V
plt.figure(1)
ax = plt.subplot(projection=ccrs.PlateCarree());
ds1['U'][nt,nz,:,:].plot.pcolormesh('XG', 'YC', ax=ax);
plt.title('Case 1 : Zonal Component - U')
ax.coastlines()
gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');
gl.xlabels_top = False
gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
plt.savefig('U_Zonal_component_cas1'+'.png')
plt.clf()
plt.figure(2)
ax = plt.subplot(projection=ccrs.PlateCarree());
ds1['V'][nt,nz,:,:].plot.pcolormesh('XC', 'YG', ax=ax);
plt.title(' Case 1 : Meridional Component - V')
ax.coastlines()
gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');
gl.xlabels_top = False
gl.ylabels_right = False
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
plt.savefig('V_Meridional_Component_cas1'+'.png')
plt.clf()
# Averages
Average_U = ds1.U.mean().values
print('Case 1 : Average of Zonal Component - U')
print(Average_U,'m/s')
Average_U_mask = ds1.U.where(ds1.hFacW>0).mean().values
print('Case 1 : Average of Zonal Component without continents')
print(Average_U_mask,'m/s')
Average_V = ds1.V.mean().values
print('Case 1 :Average of Meridional Component - V')
print(Average_V,'m/s')
Average_V_mask = ds1.V.where(ds1.hFacS>0).mean().values
print('Case 1 : Average of Meridional Component without continents')
print(Average_V_mask,'m/s')
| mit | Python | |
5977d5f01a740150eebd01d8aa110e864a92da95 | Create 3-temperature.py | CamJam-EduKit/EduKit2 | Code/3-temperature.py | Code/3-temperature.py | # Import Libraries
import os
import glob
import time
# Initialize the GPIO Pins
os.system('modprobe w1-gpio') # Turns on the GPIO module
os.system('modprobe w1-therm') # Turns on the Temperature module
# Finds the correct device file that holds the temperature data
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
# A function that reads the sensors data
def read_temp_raw():
f = open(device_file, 'r') # Opens the temperature device file
lines = f.readlines() # Returns the text
f.close()
return lines
# Convert the value of the sensor into a temperature
def read_temp():
lines = read_temp_raw() # Read the temperature 'device file'
# While the first line does not contain 'YES', wait for 0.2s
# and then read the device file again.
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
# Look for the position of the '=' in the second line of the
# device file.
equals_pos = lines[1].find('t=')
# If the '=' is found, convert the rest of the line after the
# '=' into degrees Celsius, then degrees Fahrenheit
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.0
temp_f = temp_c * 9.0 / 5.0 + 32.0
return temp_c, temp_f
# Print out the temperature until the program is stopped.
while True:
print(read_temp())
time.sleep(1)
| mit | Python | |
d7854b71e778103ca14a488cc80e436aff46389b | Create Super_calculateur.py | Alumet/Codingame | Difficult/Super_calculateur.py | Difficult/Super_calculateur.py |
n = int(input())
liste=[]
for i in range(n):
j, d = [int(j) for j in input().split()]
liste.append([j,j+d-1])
liste.sort(key=lambda x: x[0])
liste.sort(key=lambda x: x[1])
J_max=0
count=0
for el in liste:
if el[0]>J_max:
J_max=el[1]
count+=1
print(count)
| mit | Python | |
f0ac1914790e69fe786d6d3182cf15fd09302c28 | Add test for missing building part seen in production. | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | integration-test/912-missing-building-part.py | integration-test/912-missing-building-part.py | # http://www.openstreetmap.org/way/287494678
z = 18
x = 77193
y = 98529
while z >= 16:
assert_has_feature(
z, x, y, 'buildings',
{ 'kind': 'building',
'id': 287494678 })
z -= 1
x /= 2
y /= 2
| mit | Python | |
95126bc5889e66ab7646e49f6ed773d3d4cd9a37 | save received fw in archive dir. Separate installer for new userpackage found | iottly/iottly-device-agent-py,iottly/iottly-device-agent-py | iottly-device-agent-py/install_userpackage.py | iottly-device-agent-py/install_userpackage.py | import os, shutil, logging, tarfile
from iottly.settings import settings
logging.basicConfig(level=logging.INFO,
format='%(asctime)s [%(levelname)s] (%(processName)-9s) %(message)s',)
userpackagepath = 'userpackage/'
# check that iottly service is not running
# check if a new fw is available
logging.info('Searching for firmware archive in {} ...'.format(settings.IOTTLY_USERPACKAGE_UPLOAD_DIR))
fws = os.listdir(settings.IOTTLY_USERPACKAGE_UPLOAD_DIR)
fwfilename = None
if len(fws) > 1:
raise Exception('Found more than 1 firmware available. Something weird happened here.')
elif len(fws) == 0:
logging.info('No firmware found. Exiting installer ...')
quit()
elif len(fws) == 1:
fwfilename = os.path.join(settings.IOTTLY_USERPACKAGE_UPLOAD_DIR, fws[0])
logging.info('Found firmware: \n{}.\nInstalling.'.format(fwfilename))
if fwfilename:
try:
# remove old fw
if os.path.exists(userpackagepath):
logging.info('Removing old package ...')
shutil.rmtree(userpackagepath)
# untar new fw into proper destination
logging.info('Extracting archive ...')
with tarfile.open(fwfilename) as tar:
tar.extractall()
logging.info('Installation successful!')
except Exception as e:
logging.error(e)
finally:
#always remove firmware fwfilename
logging.info('Removing archive ...')
os.remove(fwfilename)
logging.info('Done!') | apache-2.0 | Python | |
9f7296b34d1e65ac14cb9b98734e2a01aee345a2 | Add missing file | chargehound/chargehound-python | chargehound/models.py | chargehound/models.py | from collections import namedtuple
from bunch import Bunch
class ChargehoundObject(Bunch):
pass
class List(ChargehoundObject):
pass
class Dispute(ChargehoundObject):
pass
class Product(ChargehoundObject):
pass
Response = namedtuple('Response', 'status')
| mit | Python | |
3c2d85b4b7a497ceffac3e562ac1a468f1f6a4b0 | add solution for First Bad Version | zhyu/leetcode,zhyu/leetcode | algorithms/firstBadVersion/firstBadVersion.py | algorithms/firstBadVersion/firstBadVersion.py | # The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
class Solution(object):
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int
"""
l, r = 1, n
while l < r:
mid = (l+r) >> 1
if isBadVersion(mid):
r = mid
else:
l = mid + 1
return r
| mit | Python | |
3ad0213e15e2ccb7894a1d0beb88bd86ae3d9e67 | Create setup.py | daxeel/pypixoto | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='pypixoto',
version='1.0',
description='Python SDK for Pixoto.com',
author='Daxeel Soni',
author_email='sayhi@daxeelsoni.in',
url='https://www.daxeelsoni.in',
)
| mit | Python | |
c5a7f18f3b97c40489f9b098e4822ba7ce3a5927 | Create ex4_3.py | laetrid/learning | First_course/ex4_3.py | First_course/ex4_3.py | #!/usr/bin/env python
'''
III. Create a program that converts the following uptime strings to a time in seconds.
uptime1 = 'twb-sf-881 uptime is 6 weeks, 4 days, 2 hours, 25 minutes'
uptime2 = '3750RJ uptime is 1 hour, 29 minutes'
uptime3 = 'CATS3560 uptime is 8 weeks, 4 days, 18 hours, 16 minutes'
uptime4 = 'rtr1 uptime is 5 years, 18 weeks, 8 hours, 23 minutes'
For each of these strings store the uptime in a dictionary using the device name as the key.
During this conversion process, you will have to convert strings to integers. For these string to integer conversions use try/except to catch any string to integer conversion exceptions.
For example:
int('5') works fine
int('5 years') generates a ValueError exception.
Print the dictionary to standard output.
'''
uptime_in = '''
uptime1 = 'twb-sf-881 uptime is 6 weeks, 4 days, 2 hours, 25 minutes'
uptime2 = '3750RJ uptime is 1 hour, 29 minutes'
uptime3 = 'CATS3560 uptime is 8 weeks, 4 days, 18 hours, 16 minutes'
uptime4 = 'rtr1 uptime is 5 years, 18 weeks, 8 hours, 23 minutes'
'''
uptime_list = uptime_in.split('\n')[1:-1]
result_dic = {}
for line in uptime_list:
a, b = line[11:-1].split(' uptime is ')
result_dic[a] = b
for key in result_dic.keys():
time_list = result_dic[key].split(', ')
years = 0
weeks = 0
days = 0
hours = 0
minutes = 0
for time in time_list:
try:
if 'year' in time: years = int(time.split()[0]) * 31556926
if 'week' in time: weeks = int(time.split()[0]) * 604800
if 'day' in time: days = int(time.split()[0]) * 86400
if 'hour' in time: hours = int(time.split()[0]) * 3600
if 'minute' in time: minutes = int(time.split()[0]) * 60
except ValueError:
print("Error with converting string")
result_dic[key] = "%-10dsec (%s)" % (years + weeks + days + hours + minutes, result_dic[key])
print ""
print "=" * 70
for key in result_dic.keys():
print "%-12s:%s" % (key, result_dic[key])
print "=" * 70
print ""
# The END
| apache-2.0 | Python | |
d8b6cbc9703dbb3f3b7fed17a9594148aae1d75e | Create Magic8Ball.py | Vlek/plugins | HexChat/Magic8Ball.py | HexChat/Magic8Ball.py | import hexchat
from random import choice
__module_name__ = 'Magic8ball'
__module_version__ = '0.0.1'
__module_description__ = 'Allows one ask magic8ball questions with answers'
__module_author__ = 'Vlek'
_8ball_answers = [
'It is certain', 'It is decidedly so',
'Without a doubt', 'Yes, definitely',
'You may rely on it', 'As I see it, yes',
'Most likely', 'Outlook good',
'Yes', 'Signs point to yes',
'Reply hazy, try again', 'Ask again later',
'Better not tell you now', 'Cannot predict now',
'Concentrate and ask again', "Don't count on it",
'My reply is no', 'My sources say no',
'Outlook not so good', 'Very doubtful']
def ask8ball(word, word_to_eol, userdata):
if len(word) == 1:
say('/help 8ball')
context = hexchat.find_context()
#Magic8Ball... will this malware be more effective if it has a fancy GUI? ... 'Outlook good'
context.prnt("Magic8Ball... {}? .. '{}'".format(' '.join(word[1:]), choice(_8ball_answers)))
return hexchat.EAT_ALL
for command in ['magic8ball', '8ball']:
hexchat.hook_command(command, ask8ball, help="/8ball (question)")
| mit | Python | |
5e81fca928862b1c9574f1092a131337735b63f4 | Add basic IAM integration test | lra/boto,jotes/boto,Asana/boto,nexusz99/boto,serviceagility/boto,ekalosak/boto,shipci/boto,yangchaogit/boto,varunarya10/boto,kouk/boto,podhmo/boto,j-carl/boto,janslow/boto,disruptek/boto,nikhilraog/boto,bryx-inc/boto,khagler/boto,TiVoMaker/boto,s0enke/boto,stevenbrichards/boto,acourtney2015/boto,weka-io/boto,rayluo/boto,awatts/boto,appneta/boto,zzzirk/boto,dimdung/boto,alex/boto,trademob/boto,revmischa/boto,appneta/boto,ramitsurana/boto,ddzialak/boto,felix-d/boto,shaunbrady/boto,abridgett/boto,kouk/boto,tpodowd/boto,vijaylbais/boto,ocadotechnology/boto,Pretio/boto,disruptek/boto,garnaat/boto,israelbenatar/boto,zachmullen/boto,alex/boto,campenberger/boto,alfredodeza/boto,weebygames/boto,SaranyaKarthikeyan/boto,bleib1dj/boto,nishigori/boto,ryansb/boto,darjus-amzn/boto,tpodowd/boto,elainexmas/boto,drbild/boto,pfhayes/boto,vishnugonela/boto,drbild/boto,jindongh/boto,rosmo/boto,clouddocx/boto | tests/integration/iam/test_connection.py | tests/integration/iam/test_connection.py | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
import time
from boto.compat import unittest
class TestIAM(unittest.TestCase):
def test_group_users(self):
# A very basic test to create a group, a user, add the user
# to the group and then delete everything
iam = boto.connect_iam()
name = 'boto-test-%d' % time.time()
username = 'boto-test-user-%d' % time.time()
iam.create_group(name)
iam.create_user(username)
iam.add_user_to_group(name, username)
iam.remove_user_from_group(name, username)
iam.delete_user(username)
iam.delete_group(name)
| mit | Python | |
21f34851a88480cc2e060361ee3119bb0d1c79ea | bump to 0.4.1 | opencivicdata/pupa,influence-usa/pupa,rshorey/pupa,rshorey/pupa,mileswwatkins/pupa,mileswwatkins/pupa,influence-usa/pupa,datamade/pupa,opencivicdata/pupa,datamade/pupa | pupa/__init__.py | pupa/__init__.py | __version__ = '0.4.1' # pragma: no cover
| __version__ = '0.4.0-dev' # pragma: no cover
| bsd-3-clause | Python |
1b2a1b0db5d09999f0461df3d9d565e7ba0b42f7 | Add reader module | wbolster/whip-neustar | whip_neustar/reader.py | whip_neustar/reader.py | # encoding: UTF-8
"""
Neustar (Quova) data set reader module.
"""
import csv
import datetime
import itertools
import logging
import math
import os
import re
import socket
import struct
logger = logging.getLogger(__name__)
ISO8601_DATETIME_FMT = '%Y-%m-%dT%H:%M:%S'
# Regular expression to match file names. From the docs:
#
# Data File V7 Naming Convention
#
# Every file is named with information that qualifies the intended
# recipient and data release information. The file name is named
# using the following components:
#
# <QuovaNet_customer_id>_v<data_version>_<internal_id>_<yyyymmdd>.csv.gz
#
# For example, a file created from release version 470.63, production
# job 15.27, on May 25, 2010 for customer quova would have the name:
# quova_v470.63_15.27_20100525.gz
#
# However, in reality, the suffix is '.csv.gz', not '.gz'.
#
DATA_FILE_RE = re.compile(r'''
^
(?P<customer_id>.+)
_v(?P<version>.+)
_(?P<internal_id>.+)
_(?P<year>\d{4})(?P<month>\d{2})(?P<day>\d{2})
\.csv(:?\.gz)?
$
''', re.VERBOSE)
FIELDS = (
'start_ip_int',
'end_ip_int',
'continent',
'country',
'country_code',
'country_cf',
'region',
'state',
'state_code',
'state_cf',
'city',
'city_cf',
'postal_code',
'area_code',
'time_zone',
'latitude',
'longitude',
'dma',
'msa',
'connection_type',
'line_speed',
'ip_routing_type',
'asn',
'sld',
'tld',
'organization',
'carrier',
'anonymizer_status',
)
INTEGER_FIELDS = frozenset(('asn', 'country_cf', 'state_cf', 'city_cf'))
FLOAT_FIELDS = frozenset(('latitude', 'longitude'))
IGNORED_FIELDS = frozenset(('dma', 'msa'))
def clean_field(v):
return None if v == '' else v
def format_ipv4_address(s, _inet_ntoa=socket.inet_ntoa,
_pack=struct.Struct('>L').pack):
return _inet_ntoa(_pack(int(s)))
def iter_records(data_file):
logger.info("Using data file %r", data_file)
match = DATA_FILE_RE.match(os.path.basename(data_file))
if not match:
raise RuntimeError(
"Unrecognized data file name: %r (is it the correct file?)"
% data_file)
match_dict = match.groupdict()
version = match_dict['version']
dt = datetime.datetime(int(match_dict['year']),
int(match_dict['month']),
int(match_dict['day']))
dt_as_str = dt.strftime(ISO8601_DATETIME_FMT)
logger.info(
"Detected date %s and version %s for data file %r",
dt_as_str, version, data_file)
# Prepare for reading the CSV data
with open(data_file, 'rb') as fp:
reader = csv.reader(fp)
it = iter(reader)
# Skip header line, but make sure it is actually a header line
header_line = next(it)
if header_line[0] != FIELDS[0]:
raise ValueError(
"First line of input file %r does not seem a header line"
% data_file)
for n, record in enumerate(it, 1):
out = dict(itertools.izip(FIELDS, map(clean_field, record)))
# Data file information
out['datetime'] = dt_as_str
# Drop unwanted fields
for k in IGNORED_FIELDS:
del out[k]
# Network information
out['begin'] = format_ipv4_address(out.pop('start_ip_int'))
out['end'] = format_ipv4_address(out.pop('end_ip_int'))
# Convert numeric fields (if not None)
for key in INTEGER_FIELDS:
if out[key] is not None:
out[key] = int(out[key])
for key in FLOAT_FIELDS:
if out[key] is not None:
out[key] = float(out[key])
# Convert time zone string like '-3.5' into ±HH:MM format
if out['time_zone'] is not None:
tz_frac, tz_int = math.modf(float(out['time_zone']))
out['time_zone'] = '%+03d:%02d' % (tz_int, abs(60 * tz_frac))
yield out
logger.info("Finished reading %r (%d records)", data_file, n)
| bsd-3-clause | Python | |
1528ac33ae3bfb81645fb45dece72b0f6f69b431 | Create sample.py | auth0/auth0-python,auth0/auth0-python | sample.py | sample.py | import webapp2
import urllib2
import urllib
import json
## CHANGE THIS
CLIENT_ID = "YOUR_CLIENT_ID"
CLIENT_SECRET = "YOUR_CLIENT_SECRET"
DOMAIN = "YOURS.auth0.com"
CALLBACK_URL = "http://localhost:8080/callback"
MAIN_PAGE_HTML = """\
<html>
<body>
<script src="https://d19p4zemcycm7a.cloudfront.net/w2/auth0-widget-2.4.min.js"></script>
<script type="text/javascript">
var widget = new Auth0Widget({
domain: '%s',
clientID: '%s',
callbackURL: '%s'
});
</script>
<button onclick="widget.signin()">Login</button>
</body>
</html>
""" % (DOMAIN, CLIENT_ID, CALLBACK_URL)
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(MAIN_PAGE_HTML)
class LoginCallback(webapp2.RequestHandler):
def get(self):
code = self.request.get("code")
base_url = "https://{domain}".format(domain=DOMAIN)
data = urllib.urlencode([('client_id', CLIENT_ID),
('redirect_uri', CALLBACK_URL),
('client_secret', CLIENT_SECRET),
('code', code),
('grant_type', 'authorization_code')])
req = urllib2.Request(base_url + "/oauth/token", data)
response = urllib2.urlopen(req)
oauth = json.loads(response.read())
userinfo = base_url + "/userinfo?access_token=" + oauth['access_token']
response = urllib2.urlopen(userinfo)
data = response.read()
## print user data
self.response.write(data)
application = webapp2.WSGIApplication([
('/', MainPage),
('/callback', LoginCallback)
], debug=True)
| mit | Python | |
1b610ec0dafdb299e1a04a9be90156fafc40c5ba | Create mainmenu2.py | ChristinaHammer/Client_Database | mainmenu2.py | mainmenu2.py | #Christina Hammer
#main menu Gui
from tkinter import *
from tkinter import messagebox
def addnew():
import newclientinter2
return
def quitprogram():
mmGui.destroy()
return
def logoff():
import logGui
mmGui.destory()
return
def csearch():
#messagebox.INFO(title='Client Search', text='Searching Database...')
#connect to client database!
pass
return
def hsearch():
#connectto household database
pass
return
mmGui=Tk()
mmGui.geometry('700x400+200+200')
mmGui.title=('Main Menu -- Name of Food Pantry')
topframe=Frame(mmGui).pack()
toptitle=Label(topframe,text='Food Pantry Database', font=("Helvetica", 16)).pack()
#photo=PhotoImage(file="foodpantryimg.jpg")
#imagelabel=Label(topframe,image=photo).pack()
searchl=Label(mmGui, text='Client Search: ').pack()
seav=StringVar()
searchent=Entry(mmGui,textvariable=seav, width=60).pack()
seav.set('Please Enter a Name')
searb=Button(mmGui,text='Search!', command=csearch).pack()
houseav=StringVar()
housear=Label(mmGui, text='Household Search: ').pack()
housent=Entry(mmGui, textvariable=houseav, width=60).pack()
houseav.set('Please Enter an Address')
hsearb=Button(mmGui,text='Search!',command=hsearch).pack()
menubar=Menu(mmGui)
optionsmenu=Menu(menubar,tearoff=0)
optionsmenu.add_command(label='Quit',command=quitprogram)
optionsmenu.add_command(label='log off',command=logoff)
optionsmenu.add_command(label='Account Settings')
menubar.add_cascade(label='Options',menu=optionsmenu)
clientmenu=Menu(menubar,tearoff=0)
clientmenu.add_command(label='Add New Client', command=addnew)
clientmenu.add_command(label='Search Clients')
clientmenu.add_command(label='Add member to household')
menubar.add_cascade(label='Clients',menu=clientmenu)
mmGui.config(menu=menubar)
mmGui.mainloop()
| mit | Python | |
1c709f8bde2a304d0d8eb326e69a95d1a011efeb | add script for parsing all mat files into common matfile (structure of arrays) | vrsys/lamure,vrsys/lamure,vrsys/lamure,vrsys/lamure | apps/fem_vis_ssbo/parse_to_single_mat_file.py | apps/fem_vis_ssbo/parse_to_single_mat_file.py | #!/usr/bin/python
import scipy.io as sio
import numpy as np
import sys
import pathlib
import os
number_of_arguments = len(sys.argv)
if number_of_arguments < 2:
print("This program takes an *.mat-File with the FEM-Attributes as defined before and creates a binary stream of the relevant data as *.mat.bin file")
print("Please provide an input *.mat-File to parse!")
sys.exit(-1)
complete_path_string = sys.argv[1]
complete_path = pathlib.PurePath(complete_path_string)
directory_name = str(complete_path.name)
print("Directory name: " + directory_name )
print(sys.argv[1])
complete_out_path_base_name = complete_path_string
mat_file_list = []
for mat_file in os.listdir( sys.argv[1] ):
if mat_file.startswith(directory_name) and mat_file.endswith(".mat"):
mat_file_list.append(complete_path_string + "/" + mat_file)
#print(os.path.join("/mydir", mat_file))
mat_file_list.sort()
num_attributes_files_to_open = 0
open_attribute_file_paths = []
open_attribute_file_handles = []
all_attributes_file_handle = 0
current_mag_x = 0
current_mag_y = 0
current_mag_z = 0
for mat_file_string in mat_file_list:
in_current_mat_file_name = mat_file_string
print("X: " + in_current_mat_file_name)
curr_mat_contents = sio.loadmat(in_current_mat_file_name)
curr_sim_array = curr_mat_contents['dataSave']
curr_num_attributes_in_sim_array = curr_sim_array.shape[1]
if 0 == num_attributes_files_to_open:
num_attributes_files_to_open = curr_num_attributes_in_sim_array
print(num_attributes_files_to_open)
for attrib_id in range(num_attributes_files_to_open - 3):
open_attribute_file_paths.append(complete_path_string + "/attribute_" + str(attrib_id) + ".mat.bin")
open_attribute_file_handles.append( open(open_attribute_file_paths[attrib_id], 'wb') )
else:
if num_attributes_files_to_open != curr_num_attributes_in_sim_array:
print("Different number of attributes per timestep. Exiting.")
sys.exit(-1)
additional_mag_u_offset = 0
for attrib_id in range(num_attributes_files_to_open-1):
if 3 == attrib_id:
additional_mag_u_offset = 1
curr_attrib_for_all_vertices = curr_sim_array[:,(1 + attrib_id)]
curr_attrib_for_all_vertices = curr_attrib_for_all_vertices.astype(np.float32)
#for now we assume that attribute 0 will be mag x
if 0 == attrib_id:
current_mag_x = curr_attrib_for_all_vertices
elif 1 == attrib_id:
current_mag_y = curr_attrib_for_all_vertices
elif 2 == attrib_id:
current_mag_z = curr_attrib_for_all_vertices
current_mag_u = np.sqrt(current_mag_x*current_mag_x + current_mag_y * current_mag_y + current_mag_z * current_mag_z)
open_attribute_file_handles[0].write(current_mag_u.tobytes())
if(attrib_id > 2):
open_attribute_file_handles[additional_mag_u_offset + attrib_id - 3].write(curr_attrib_for_all_vertices.tobytes())
#path = os.path.dirname(os.path.realpath(sys.argv[1]))
#print(mat_contents)
#print(sim_array.shape[1])
#in_mat_file_name = sys.argv[1]
#mat_contents = sio.loadmat(in_mat_file_name)
#sim_array = mat_contents['dataSave']
for attrib_id in range(num_attributes_files_to_open - 3):
open_attribute_file_handles[attrib_id].close()
append_count = 0
all_attribs_file_path = complete_path_string + "/all_attributes.mat.bin"
for attrib_file_path in open_attribute_file_paths:
print(str(attrib_file_path) + " " + str(all_attribs_file_path))
if append_count > 0:
os.system("cat " + attrib_file_path + " >> " + all_attribs_file_path)
else:
os.system("cat " + attrib_file_path + " > " + all_attribs_file_path)
append_count += 1
#all_attributes_file_handle = open(complete_path_string + "/all_attributes.mat.bin", 'wb')
#all_attributes_file_handle.close()
sys.exit(-1) | bsd-3-clause | Python | |
8613deaffe5de066075141e577faa578169d3b41 | add progress reporting module | gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine | openquake/utils/progress.py | openquake/utils/progress.py | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010-2012, GEM Foundation.
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Utility functions related to reporting the progress of a calculation.
"""
import logging
# Place the new level between info and warning
logging.STATUS = 25
logging.addLevelName(logging.STATUS, "STATUS")
LOG = logging.getLogger(__name__)
def report_status(msg, *args, **kwargs):
LOG._log(logging.STATUS, msg, *args, **kwargs)
| agpl-3.0 | Python | |
06a0070bc20d18eec6d2b065a6e143c45323fbe6 | Implement sinus generation script | Rookfighter/fft-spartan6,Rookfighter/fft-spartan6 | scripts/sin_gen.py | scripts/sin_gen.py | # twiddle.py
#
# Created on: 15 May 2017
# Author: Fabian Meyer
import argparse
import math
VERSION = '0.1.0'
N = 16
def parse_args():
'''Parse command line arguments.'''
parser = argparse.ArgumentParser(
description="Calculate twiddle factor.")
parser.add_argument('--version', action='version', version=VERSION)
parser.add_argument('-f', dest='f', type=int, required=True,
help='Frequency of sinus.')
parser.add_argument('-a', dest='a', type=int, required=True,
help='Amplitude of sinus.')
return parser.parse_args()
def calc_sin(a, f):
result = []
for i in range(N):
val = a * math.sin(2 * math.pi * f * (float(i) / 16.0))
result.append((round(val, 4), 0.0))
return result
def sin_to_str(mysin):
tf_strs = [str(tf) for tf in mysin]
return '(\n {}\n)'.format(',\n '.join(tf_strs))
if __name__ == '__main__':
cfg = parse_args()
mysin = calc_sin(cfg.a, cfg.f)
print(sin_to_str(mysin))
| mit | Python | |
5d22c6ddf0c2534df4bef02fbbf386a43c8f6203 | make some migrations yo | jumbocodespring2017/bostonathleticsassociation,jumbocodespring2017/bostonathleticsassociation,jumbocodespring2017/bostonathleticsassociation | back-end/interface/migrations/0001_initial.py | back-end/interface/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-12-04 20:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Document',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('upload', models.FileField(upload_to='uploads/')),
],
),
]
| mit | Python | |
8dfb5792ef73d822b16dde55bb090c1e613cc2ed | add refresh_creds.py | sso2712/python | refresh_creds.py | refresh_creds.py | #!/usr/bin/env python3
import sys
import gimme_aws_creds.main
import gimme_aws_creds.ui
account_alias_id_map = {
"master": "123456789012",
"shared-services": "123456789012",
"network": "123456789012",
"security": "123456789012",
"logging": "123456789012",
"dev": "123456789012",
"test": "123456789012",
"stage": "123456789012",
"prod": "123456789012"
}
account_alias = sys.argv[1]
role_name = sys.argv[2]
if account_alias:
account_ids = [account_alias_id_map[k]
for k in account_alias_id_map if k == account_alias]
else:
account_ids = [account_alias_id_map[k]
for k in account_alias_id_map]
account_id_pattern = "|".join(sorted(set(account_ids)))
role_name_pattern = r"role\/{}".format(role_name) if role_name else ''
pattern = '/:({}):{}/'.format(account_id_pattern, role_name_pattern)
ui = gimme_aws_creds.ui.CLIUserInterface(
argv=[sys.argv[0], '--profile', 'acme', '--roles', pattern])
creds = gimme_aws_creds.main.GimmeAWSCreds(ui=ui)
for data in creds.iter_selected_aws_credentials():
creds.write_aws_creds_from_data(data)
| mit | Python | |
c1c3624b1247bf6c7b939549059866165bf5776f | test case with expected murmur hashes from the original murmur hash library | jcaberio/MurmurV3,jcaberio/MurmurV3,jcaberio/MurmurV3 | test_hash.py | test_hash.py | import murmur
import string
expected_dict = {'a': 2456313694, 'c': 754329161, 'b': 2260187636, 'e': 3115762238, 'd': 4163039750, 'g': 1545794298, 'f': 4226522672, 'i': 3451942824, 'h': 1069002520, 'k': 3288208012, 'j': 3131388162, 'm': 3020367812, 'l': 2169669117, 'o': 1720432690, 'n': 1785613168, 'q': 2083633015, 'p': 834694889, 's': 389143345, 'r': 744399309, 'u': 1479000828, 't': 2418444476, 'w': 1340422676, 'v': 3414904798, 'y': 3657681515, 'x': 372604132, 'z': 2195360465}
actual_dict = {letter : murmur.string_hash(letter) for letter in string.ascii_lowercase}
unmatched_items = set(expected_dict.items()) ^ set(actual_dict.items())
assert len(unmatched_items) == 0
| mit | Python | |
7813663002d618abccfe12f189e9351e18afcdbb | Create GPS1.py | jancelin/geo-poppy,jancelin/geo-poppy | tracking/GPS1.py | tracking/GPS1.py | import time
import serial
import os
import sys
import psycopg2
import psycopg2.extras
from datetime import datetime
firstFixFlag = False # this will go true after the first GPS fix.
firstFixDate = ""
DEBUG = False
SLEEP = 10
# Try to connect
try:
conn=psycopg2.connect("host='postgis' port='5432' dbname='geopoppy' user='docker' password='docker'")
except:
print "I am unable to connect to the database."
# Set up serial:
ser = serial.Serial(
port='/dev/ttyUSB0',\
baudrate=4800,\
parity=serial.PARITY_NONE,\
stopbits=serial.STOPBITS_ONE,\
bytesize=serial.EIGHTBITS,\
timeout=1)
# Helper function to take HHMM.SS, Hemisphere and make it decimal:
def degrees_to_decimal(data, hemisphere):
try:
decimalPointPosition = data.index('.')
degrees = float(data[:decimalPointPosition-2])
minutes = float(data[decimalPointPosition-2:])/60
output = degrees + minutes
if hemisphere is 'N' or hemisphere is 'E':
return output
if hemisphere is 'S' or hemisphere is 'W':
return -output
except:
return ""
# Helper function to take a $GPRMC sentence, and turn it into a Python dictionary.
# This also calls degrees_to_decimal and stores the decimal values as well.
def parse_GPRMC(data):
data = data.split(',')
dict = {
# $GPRMC,161854.000,
'fix_time': data[1],
'validity': data[2],
'latitude': data[3],
'latitude_hemisphere' : data[4],
'longitude' : data[5],
'longitude_hemisphere' : data[6],
'speed': data[7],
'true_course': data[8],
'fix_date': data[9],
'variation': data[10],
'variation_e_w' : data[11],
'checksum' : data[12]
}
dict['decimal_latitude'] = degrees_to_decimal(dict['latitude'], dict['latitude_hemisphere'])
dict['decimal_longitude'] = degrees_to_decimal(dict['longitude'], dict['longitude_hemisphere'])
return dict
# Main program loop:
print "Start : %s" % time.ctime()
while True:
line = ser.readline()
if "$GPRMC" in line: # This will exclude other NMEA sentences the GPS unit provides.
if DEBUG is True:
print "GPRMC entering..." + line
try:
gpsData = parse_GPRMC(line) # Turn a GPRMC sentence into a Python dictionary called gpsData
except parseerror as error:
print "!!! Error catched... stopping program "
print "!!! -------------------------------- !!!! "
print "!!! Parsing GPRMC error : " + error
print "Ending at : %s" % time.ctime()
sys.exit(0)
if gpsData['validity'] == "A": # If the sentence shows that there's a fix, then we can log the line
if DEBUG is True:
print "Writing into database..."
cus_date = datetime.strptime(gpsData['fix_date'], "%d%m%Y").date()
# Data to insert
os.system("docker exec -u postgres pirate_postgis_1 psql geopoppy -c " +
"\"insert into trame ( jour, heure, latitude, longitude, geom ) " +
"values ( current_date ,'"+ gpsData['fix_time']+"',"+str(gpsData['decimal_latitude'])+","+ str(gpsData['decimal_longitude'])+",st_setsrid( st_makepoint( "+ str( gpsData['decimal_longitude']) +","+ str(gpsData['decimal_latitude']) +" ), 4326 ) );\" ")
time.sleep( SLEEP )
| agpl-3.0 | Python | |
8d7f2b4dbdc64a28f6864e607b1a18e1f2018b11 | move to ini file and config parser | vignanl/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,kkampardi/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,jvalleroy/plinth-debian,jvalleroy/plinth-debian,vignanl/Plinth,harry-7/Plinth,jvalleroy/plinth-debian,vignanl/Plinth,jvalleroy/plinth-debian,kkampardi/Plinth,jvalleroy/plinth-debian,harry-7/Plinth,vignanl/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth | cfg.py | cfg.py | from menu import Menu
import os
from ConfigParser import SafeConfigParser
parser = SafeConfigParser(
defaults={
'root':os.path.dirname(os.path.realpath(__file__)),
'product_name':"",
'box_name':"",
'file_root':"",
'data_dir':"",
'store_file':"",
'user_db':"",
'status_log_file':"",
'access_log_file':"",
'users_dir':"",
'host':"127.0.0.1",
'port':""
})
parser.read('plinth.config')
product_name = parser.get('Name', 'product_name')
box_name = parser.get('Name', 'box_name')
root = parser.get('Path', 'root')
file_root = parser.get('Path', 'file_root')
data_dir = parser.get('Path', 'data_dir')
store_file = parser.get('Path', 'store_file')
user_db = parser.get('Path', 'user_db')
status_log_file = parser.get('Path', 'status_log_file')
access_log_file = parser.get('Path', 'access_log_file')
users_dir = parser.get('Path', 'users_dir')
host = parser.get('Network', 'host')
port = int(parser.get('Network', 'port'))
html_root = None
main_menu = Menu()
base_href = ""
| agpl-3.0 | Python | |
c3d87e837c85284baa132104e4843c3fd8f429d3 | Complete day 4 part 2 | foxscotch/advent-of-code,foxscotch/advent-of-code | day-04-2.py | day-04-2.py | import hashlib
puzzle_input = b'iwrupvqb'
number = 100000
while True:
key = puzzle_input + str(number).encode()
if hashlib.md5(key).hexdigest()[:6] == '000000':
break
number += 1
print(number)
# Now that I think about it, starting with 100,000 was probably not the right
# thing to do. I could've easily never found my answer. But I did, and I guess
# it probably saved a little time. So okay.
# My answer: 9958218
| mit | Python | |
d30b61a82533347f8ea2d0250e3d5346e4dcbc07 | Create variables | josuemontano/blender_wrapper | variables.py | variables.py | LAYER_1 = (True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False)
LAYER_2 = (False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False)
LAYER_3 = (False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False)
LAYER_4 = (False, False, False, True, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False)
ORIGIN = (0, 0, 0)
| mit | Python | |
754b9d76e8a02040c423d7c15737073d48458eb0 | fix non-fastbuild chrome_split_dll | littlstar/chromium.src,dednal/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,Jonekee/chromium.src,ltilve/chromium,ondra-novak/chromium.src,chuan9/chromium-crosswalk,patrickm/chromium.src,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,littlstar/chromium.src,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,anirudhSK/chromium,hujiajie/pa-chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,M4sse/chromium.src,hujiajie/pa-chromium,M4sse/chromium.src,dednal/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,bright-sparks/chromium-spacewalk,Chilledheart/chromium,chuan9/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,patrickm/chromium.src,patrickm/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hujiajie/pa-chromium,hujiajie/pa-chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,markYoungH/chromium.src,dednal/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Just-D/chromium-1,anirudhSK/chromium,jaruba/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,anirudhSK/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,littlstar/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,dushu1203/chromium.src,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,krieger-od/nwjs_chromium.src,dednal/chromium.src,patrickm/chromium.src,markYoungH/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,jaruba/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,patrickm/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,Fireblend/chromium-crosswalk | chrome/installer/mini_installer_syzygy.gyp | chrome/installer/mini_installer_syzygy.gyp | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'version_py': '<(DEPTH)/chrome/tools/build/version.py',
'version_path': '<(DEPTH)/chrome/VERSION',
'lastchange_path': '<(DEPTH)/build/util/LASTCHANGE',
# 'branding_dir' is set in the 'conditions' section at the bottom.
'msvs_use_common_release': 0,
'msvs_use_common_linker_extras': 0,
},
'includes': [
'../../build/win_precompile.gypi',
],
'conditions': [
# This target won't build in fastbuild, since there are no PDBs.
['OS=="win" and fastbuild==0 and chrome_split_dll==0', {
'targets': [
{
'target_name': 'mini_installer_syzygy',
'type': 'executable',
'product_name': 'mini_installer',
'variables': {
'chrome_dll_project': '../chrome_syzygy.gyp:chrome_dll_syzygy',
'chrome_dll_path': '<(PRODUCT_DIR)/syzygy/chrome.dll',
'output_dir': '<(PRODUCT_DIR)/syzygy',
},
# Bulk of the build configuration comes from here.
'includes': [ 'mini_installer.gypi', ],
},
],
},{
'targets': [],
}],
[ 'branding == "Chrome"', {
'variables': {
'branding_dir': '../app/theme/google_chrome',
},
}, { # else branding!="Chrome"
'variables': {
'branding_dir': '../app/theme/chromium',
},
}],
],
}
| # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'version_py': '<(DEPTH)/chrome/tools/build/version.py',
'version_path': '<(DEPTH)/chrome/VERSION',
'lastchange_path': '<(DEPTH)/build/util/LASTCHANGE',
# 'branding_dir' is set in the 'conditions' section at the bottom.
'msvs_use_common_release': 0,
'msvs_use_common_linker_extras': 0,
},
'includes': [
'../../build/win_precompile.gypi',
],
'conditions': [
# This target won't build in fastbuild, since there are no PDBs.
['OS=="win" and fastbuild==0', {
'targets': [
{
'target_name': 'mini_installer_syzygy',
'type': 'executable',
'product_name': 'mini_installer',
'variables': {
'chrome_dll_project': '../chrome_syzygy.gyp:chrome_dll_syzygy',
'chrome_dll_path': '<(PRODUCT_DIR)/syzygy/chrome.dll',
'output_dir': '<(PRODUCT_DIR)/syzygy',
},
# Bulk of the build configuration comes from here.
'includes': [ 'mini_installer.gypi', ],
},
],
}],
[ 'branding == "Chrome"', {
'variables': {
'branding_dir': '../app/theme/google_chrome',
},
}, { # else branding!="Chrome"
'variables': {
'branding_dir': '../app/theme/chromium',
},
}],
],
}
| bsd-3-clause | Python |
685347e9f7fbf629e09427d5c63c1e81dfe43446 | Create config file | victormartinez/shub_cli | shub_cli/config.py | shub_cli/config.py | from prompt_toolkit.styles import style_from_dict
from pygments.token import Token
error_style = style_from_dict({
Token.ErrorMessage: '#ff0066',
Token.ShubFileModel: '#ccaa33',
})
tokens = [
(Token.ErrorMessage, 'You need to set up your .scrapinghub.yml with a default project and api key:\n'),
(Token.ShubFileModel,
'''
~/.scrapinghub.yml
apikeys:
default: v65a787a987k08k9s797d7s8l98298sw
projects:
default: 89090
'''
)
]
| mit | Python | |
1dd1111bd1bab62ed900d74f347a7fe10d03eb03 | Test that changelog is not outdated | lorien/user_agent | test/release.py | test/release.py | from __future__ import absolute_import
import user_agent
import re
def test_changelog():
"""
Parse changelog and ensure that it contains
* unreleased version younger than release date
* release version has a date
"""
re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$')
ver_dates = {}
ver_history = []
for line in open('CHANGELOG.md'):
if line.startswith('## ['):
ver = line.split('[')[1].split(']')[0]
date = line.split('-', 1)[1].strip().lower()
ver_dates[ver] = date
ver_history.append(ver)
release = user_agent.__version__
print(ver_dates)
print(ver_history)
assert 'unreleased' not in ver_dates[release]
assert re_date.match(ver_dates[release])
assert ver_history.index(release) == 1
| mit | Python | |
fb02defe8ba3bb5d7a76cfd515ebe1a7369a02da | Add time unit and duration to string function | Harmon758/Harmonbot,Harmon758/Harmonbot | units/time.py | units/time.py |
import datetime
from .errors import UnitExecutionError
def duration_to_string(duration, weeks = True, milliseconds = False, microseconds = False,
abbreviate = False, separator = ' '):
# TODO: Support colon format
if not isinstance(duration, datetime.timedelta):
raise UnitExecutionError("duration must be datetime.timedelta")
units = {"year": duration.days // 365}
if weeks:
units["week"] = duration.days % 365 // 7
units["day"] = duration.days % 365 % 7
else:
units["day"] = duration.days % 365
units["hour"] = duration.seconds // 3600
units["minute"] = duration.seconds // 60 % 60
units["second"] = duration.seconds % 60
if milliseconds:
units["millisecond"] = duration.microseconds // 1000
if microseconds:
units["microsecond"] = duration.microseconds % 1000
outputs = []
for name, value in units.items():
if not value:
continue
if abbreviate:
if name == "millisecond":
output = f"{value}ms"
elif name == "microsecond":
output = f"{value}μs"
else:
output = f"{value}{name[0]}"
else:
output = f"{value} {name}"
if value > 1:
output += 's'
outputs.append(output)
return separator.join(outputs)
| mit | Python | |
df87fde0862a2d3725505ca709367ae74faa53fd | Create Battleship.py | SamGriffith3/PyLearning-Projects,SamGriffith3/PyLearning-Projects | Battleship.py | Battleship.py | #Text Based Battle Ship
Print(" Are You Ready to Play?")
| mit | Python | |
e9a37e7c4db4278c1fbdce3ec5041929910f9e3a | Add patterns module. | damonkelley/pattern-matcher | pattern_matcher/patterns.py | pattern_matcher/patterns.py | class Node(object):
WILDCARD = '*'
def __init__(self, value):
self.value = value
def is_wildcard(self):
return self.value == self.WILDCARD
def __str__(self):
return self.value
def __repr__(self):
return '<Node: \'{0}\'>'.format(str(self))
class Pattern(object):
def __init__(self, nodes):
self.score = 0
self.num_wildcards = 0
self.nodes = [Node(node) for node in nodes]
self.length = len(self.nodes)
self.__score_pattern()
def __score_pattern(self):
for index, node in enumerate(self.nodes):
if node.is_wildcard():
self.num_wildcards += 1
self.score += self.length / float(index + 2)
def has_wildcard(self):
return self.num_wildcards > 0
def __str__(self):
return ','.join([str(node) for node in self.nodes])
def __repr__(self):
return '<Pattern: \'{0}\'>'.format(str(self))
class Patterns(object):
def __init__(self, patterns):
self.min_wildcards = None
self.min_score = None
self.patterns = []
self.__add_patterns(patterns)
def __add_patterns(self, patterns):
for pattern in patterns:
p = Pattern(pattern)
self.patterns.append(p)
self._update_min_wildcards(p)
self._update_min_score(p)
def _update_min_wildcards(self, p):
if self.min_wildcards is None or p.num_wildcards < self.min_wildcards:
self.min_wildcards = p.num_wildcards
def _update_min_score(self, p):
if self.min_score is None or p.score < self.min_score:
self.min_score = p.score
def get_best_patterns(self):
patterns = [p for p in self.patterns if p.num_wildcards == self.min_wildcards]
if len(patterns) > 1:
return [p for p in patterns if p.score == self.min_score]
return patterns.pop()
| mit | Python | |
83d6fb2636a6b94a725ba469bbebedfe42f71a80 | Add verbatim lat lon (WGS84) to british national grid (OSBG36) | fmalina/bng_latlon,fmalina/bng_latlon,fmalina/bng_latlon,fmalina/bng_latlon | latlon_to_bng.py | latlon_to_bng.py | #This code converts lat lon (WGS84) to british national grid (OSBG36)
from scipy import *
import csv
def WGS84toOSGB36(lat, lon):
#First convert to radians
#These are on the wrong ellipsoid currently: GRS80. (Denoted by _1)
lat_1 = lat*pi/180
lon_1 = lon*pi/180
#Want to convert to the Airy 1830 ellipsoid, which has the following:
a_1, b_1 =6378137.000, 6356752.3141 #The GSR80 semi-major and semi-minor axes used for WGS84(m)
e2_1 = 1- (b_1*b_1)/(a_1*a_1) #The eccentricity of the GRS80 ellipsoid
nu_1 = a_1/sqrt(1-e2_1*sin(lat_1)**2)
#First convert to cartesian from spherical polar coordinates
H = 0 #Third spherical coord.
x_1 = (nu_1 + H)*cos(lat_1)*cos(lon_1)
y_1 = (nu_1+ H)*cos(lat_1)*sin(lon_1)
z_1 = ((1-e2_1)*nu_1 +H)*sin(lat_1)
#Perform Helmut transform (to go between GRS80 (_1) and Airy 1830 (_2))
s = 20.4894*10**-6 #The scale factor -1
tx, ty, tz = -446.448, 125.157, -542.060 #The translations along x,y,z axes respectively
rxs,rys,rzs = -0.1502, -0.2470, -0.8421#The rotations along x,y,z respectively, in seconds
rx, ry, rz = rxs*pi/(180*3600.), rys*pi/(180*3600.), rzs*pi/(180*3600.) #In radians
x_2 = tx + (1+s)*x_1 + (-rz)*y_1 + (ry)*z_1
y_2 = ty + (rz)*x_1+ (1+s)*y_1 + (-rx)*z_1
z_2 = tz + (-ry)*x_1 + (rx)*y_1 +(1+s)*z_1
#Back to spherical polar coordinates from cartesian
#Need some of the characteristics of the new ellipsoid
a, b = 6377563.396, 6356256.909 #The GSR80 semi-major and semi-minor axes used for WGS84(m)
e2 = 1- (b*b)/(a*a) #The eccentricity of the Airy 1830 ellipsoid
p = sqrt(x_2**2 + y_2**2)
#Lat is obtained by an iterative proceedure:
lat = arctan2(z_2,(p*(1-e2))) #Initial value
latold = 2*pi
while abs(lat - latold)>10**-16:
lat, latold = latold, lat
nu = a/sqrt(1-e2*sin(latold)**2)
lat = arctan2(z_2+e2*nu*sin(latold), p)
#Lon and height are then pretty easy
lon = arctan2(y_2,x_2)
H = p/cos(lat) - nu
#E, N are the British national grid coordinates - eastings and northings
F0 = 0.9996012717 #scale factor on the central meridian
lat0 = 49*pi/180#Latitude of true origin (radians)
lon0 = -2*pi/180#Longtitude of true origin and central meridian (radians)
N0, E0 = -100000, 400000#Northing & easting of true origin (m)
n = (a-b)/(a+b)
#meridional radius of curvature
rho = a*F0*(1-e2)*(1-e2*sin(lat)**2)**(-1.5)
eta2 = nu*F0/rho-1
M1 = (1 + n + (5/4)*n**2 + (5/4)*n**3) * (lat-lat0)
M2 = (3*n + 3*n**2 + (21/8)*n**3) * sin(lat-lat0) * cos(lat+lat0)
M3 = ((15/8)*n**2 + (15/8)*n**3) * sin(2*(lat-lat0)) * cos(2*(lat+lat0))
M4 = (35/24)*n**3 * sin(3*(lat-lat0)) * cos(3*(lat+lat0))
#meridional arc
M = b * F0 * (M1 - M2 + M3 - M4)
I = M + N0
II = nu*F0*sin(lat)*cos(lat)/2
III = nu*F0*sin(lat)*cos(lat)**3*(5- tan(lat)**2 + 9*eta2)/24
IIIA = nu*F0*sin(lat)*cos(lat)**5*(61- 58*tan(lat)**2 + tan(lat)**4)/720
IV = nu*F0*cos(lat)
V = nu*F0*cos(lat)**3*(nu/rho - tan(lat)**2)/6
VI = nu*F0*cos(lat)**5*(5 - 18* tan(lat)**2 + tan(lat)**4 + 14*eta2 - 58*eta2*tan(lat)**2)/120
N = I + II*(lon-lon0)**2 + III*(lon- lon0)**4 + IIIA*(lon-lon0)**6
E = E0 + IV*(lon-lon0) + V*(lon- lon0)**3 + VI*(lon- lon0)**5
#Job's a good'n.
return E,N
#Read in from a file
LatLon = csv.reader(open('LatLon.csv', 'rU'), delimiter = ',')
LatLon.next()
#Get the output file ready
outputFile = open('LatLonandBNG.csv', 'wb')
output=csv.writer(outputFile,delimiter=',')
output.writerow(['Lat', 'Lon', 'E', 'N'])
#Loop through the data
for line in LatLon:
lat = line[0]
lon = line[1]
E, N = WGS84toOSGB36(float(lat), float(lon))
output.writerow([str(lat), str(lon), str(E), str(N)])
#Close the output file
outputFile.close() | mit | Python | |
1b83a31090cd803d2eca0b9caed0f4cc9a149fbd | Raise ConfigurationError error that causes server to fail and dump whole stacktrace | noyeitan/cubes,jell0720/cubes,zejn/cubes,jell0720/cubes,she11c0de/cubes,cesarmarinhorj/cubes,jell0720/cubes,she11c0de/cubes,cesarmarinhorj/cubes,she11c0de/cubes,zejn/cubes,pombredanne/cubes,ubreddy/cubes,cesarmarinhorj/cubes,pombredanne/cubes,pombredanne/cubes,noyeitan/cubes,ubreddy/cubes,zejn/cubes,ubreddy/cubes,noyeitan/cubes | cubes/stores.py | cubes/stores.py | from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise ConfigurationError("Unknown store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise ConfigurationError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
| from .errors import *
from .browser import AggregationBrowser
from .extensions import get_namespace, initialize_namespace
__all__ = (
"open_store",
"Store"
)
def open_store(name, **options):
"""Gets a new instance of a model provider with name `name`."""
ns = get_namespace("stores")
if not ns:
ns = initialize_namespace("stores", root_class=Store,
suffix="_store")
try:
factory = ns[name]
except KeyError:
raise CubesError("Unable to find store '%s'" % name)
return factory(**options)
def create_browser(type_, cube, store, locale, **options):
"""Creates a new browser."""
ns = get_namespace("browsers")
if not ns:
ns = initialize_namespace("browsers", root_class=AggregationBrowser,
suffix="_browser")
try:
factory = ns[type_]
except KeyError:
raise CubesError("Unable to find browser of type '%s'" % type_)
return factory(cube=cube, store=store, locale=locale, **options)
class Store(object):
"""Abstract class to find other stores through the class hierarchy."""
pass
| mit | Python |
bd31cd36db0a2780047caee02076c6dd4e44cc3f | Create MinStack_001.py | Chasego/codi,Chasego/codi,cc13ny/Allin,cc13ny/Allin,Chasego/cod,cc13ny/algo,Chasego/codirit,Chasego/codi,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/codirit,Chasego/codirit,Chasego/codi,Chasego/cod,cc13ny/algo,cc13ny/Allin,cc13ny/Allin,Chasego/cod,Chasego/cod,cc13ny/algo,Chasego/cod,Chasego/codirit,cc13ny/algo | leetcode/155-Min-Stack/MinStack_001.py | leetcode/155-Min-Stack/MinStack_001.py | class MinStack:
# @param x, an integer
def __init__(self):
self.stack = []
self.min_stack = []
# @return an integer
def push(self, x):
self.stack.append(x)
if len(self.min_stack) == 0 or self.min_stack[-1] >= x:
self.min_stack.append(x)
# @return nothing
def pop(self):
num = self.stack.pop(-1)
if self.min_stack[-1] == num:
self.min_stack.pop(-1)
# @return an integer
def top(self):
return self.stack[-1]
# @return an integer
def getMin(self):
return self.min_stack[-1]
| mit | Python | |
d78c14cd8f6329f14628ac67345781c7eda6240c | add lists services script | vtcsec/wargame-scorer | list_services.py | list_services.py | import services
from services import *
print len(services.Service.plugins)
for p in services.Service.plugins:
print p.name
| bsd-3-clause | Python | |
5b8bf127ed7bbb3ea8e0bf05b9e4fc6d00962402 | add a script to use the data converter | agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft | data_convert.py | data_convert.py | # Copyright (c) 2012 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from legacy import convert
from pycroft import model
if __name__ == "__main__":
print "drop old db model"
model.drop_db_model()
print "create new db model"
model.create_db_model()
print "convert data"
convert.do_convert()
print "complete"
| apache-2.0 | Python | |
d0ff03be32a7325f310237b49d06f622a751e448 | add easy-rdf.py | Dans-labs/easy-lod | easy-rdf.py | easy-rdf.py | import itertools
import re
import os
import rdflib
from oaipmh.client import Client
from oaipmh.metadata import MetadataRegistry, oai_dc_reader
dc11 = rdflib.Namespace('http://purl.org/dc/elements/1.1/')
geo = rdflib.Namespace('http://www.w3.org/2003/01/geo/wgs84_pos#')
virtrdf = rdflib.Namespace('http://www.openlinksw.com/schemas/virtrdf#')
easy_id = rdflib.Namespace('https://easy.dans.knaw.nl/ui/datasets/id/easy-dataset:')
def easy_url(oai_id):
namespace, dataset = oai_id.rsplit(':', 1)
if namespace != 'oai:easy.dans.knaw.nl:easy-dataset':
raise(Exception("Unknown namespace: {0}".format(namespace)))
return easy_id[dataset]
def make_graphs(oai_records):
for header, metadata, _ in oai_records:
if metadata is None:
continue
graph = rdflib.Graph()
s = easy_url(header.identifier())
metadata_fields = metadata.getMap().iteritems()
for p, vv in metadata_fields:
for v in vv:
graph.add((s, dc11[p], rdflib.Literal(v)))
yield graph
def oai_metadata(oai_endpoint):
registry = MetadataRegistry()
registry.registerReader('oai_dc', oai_dc_reader)
client = Client(oai_endpoint, registry)
return make_graphs(client.listRecords(metadataPrefix='oai_dc'))
def add_geo(graph):
for s, o in graph.subject_objects(dc11.coverage):
coord = re.findall(u'\u03c6\u03bb=([\d.]+\s[\d.]+);\sprojection=http://www.opengis.net/def/crs/EPSG/0/4326;', o)
if coord:
wkt_point = "point({0})".format(coord[0])
graph.add((s, geo.geometry, rdflib.Literal(wkt_point, datatype=virtrdf.Geometry)))
return graph
def tranform(transformation, records):
return (transformation(r) for r in records)
def limit(records, max_records=None):
return itertools.islice(records, max_records)
def dump_nt(records, filename, mode='w'):
fout = open(filename, mode)
for record in records:
record.serialize(fout, format='nt')
easy_rdf = tranform(add_geo, oai_metadata('http://easy.dans.knaw.nl/oai/'))
if __name__ == '__main__':
dump_nt(easy_rdf, 'easy.nt')
| bsd-3-clause | Python | |
2234214adc252e37ff6e83776a32a2826a37f79f | add test for labelers | ntucllab/libact,ntucllab/libact,ntucllab/libact | libact/labelers/tests/test_labelers.py | libact/labelers/tests/test_labelers.py | import unittest
import numpy as np
from libact.base.dataset import Dataset
from libact.labelers import IdealLabeler
class TestDatasetMethods(unittest.TestCase):
initial_X = np.arange(15).reshape((5, 3))
initial_y = np.array([1, 2, 3, 1, 4])
def setup_dataset(self):
return Dataset(self.initial_X, self.initial_y)
def test_label(self):
dataset = self.setup_dataset()
lbr = IdealLabeler(dataset)
ask_id = lbr.label(np.array([0, 1, 2]))
self.assertEqual(ask_id, 1)
ask_id = lbr.label(np.array([6, 7, 8]))
self.assertEqual(ask_id, 3)
ask_id = lbr.label([12, 13, 14])
self.assertEqual(ask_id, 4)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | Python | |
dda0f1ba84feac1cf7cd54769efbb543defa173a | add epinions data reader | Evfro/polara | polara/datasets/epinions.py | polara/datasets/epinions.py | import numpy as np
import scipy as sp
import pandas as pd
def compute_graph_laplacian(edges, index):
all_edges = set()
for a, b in edges:
try:
a = index.get_loc(a)
b = index.get_loc(b)
except KeyError:
continue
if a == b: # exclude self links
continue
# make graph undirectional
all_edges.add((a, b))
all_edges.add((b, a))
sp_edges = sp.sparse.csr_matrix((np.ones(len(all_edges)), zip(*all_edges)))
assert (sp_edges.diagonal() == 0).all()
return sp.sparse.csgraph.laplacian(sp_edges).tocsr(), sp_edges
def get_epinions_data(ratings_path=None, trust_data_path=None):
res = []
if ratings_path:
ratings = pd.read_csv(ratings_path,
delim_whitespace=True,
skiprows=[0],
skipfooter=1,
engine='python',
header=None,
skipinitialspace=True,
names=['user', 'film', 'rating'],
usecols=['user', 'film', 'rating'])
res.append(ratings)
if trust_data_path:
edges = pd.read_table(trust_data_path,
delim_whitespace=True,
skiprows=[0],
skipfooter=1,
engine='python',
header=None,
skipinitialspace=True,
usecols=[0, 1])
res.append(edges)
if len(res)==1: res = res[0]
return res
| mit | Python | |
22382726fa69f40c74611a79b99845df1bd3076f | Add rackHd inventory script | thaim/ansible,thaim/ansible | contrib/inventory/rackhd.py | contrib/inventory/rackhd.py | #!/usr/bin/python
import json
import requests
import os
import argparse
import types
MONORAIL_URL = 'http://localhost:8080'
class OnRackInventory(object):
def __init__(self, nodeids):
self._inventory = {}
for nodeid in nodeids:
self._load_inventory_data(nodeid)
output = '{\n'
for nodeid,info in self._inventory.iteritems():
output += self._format_output(nodeid, info)
output += ',\n'
output = output[:-2]
output += '}\n'
print output
def _load_inventory_data(self, nodeid):
info = {}
info['ohai'] = MONORAIL_URL + '/api/common/nodes/{0}/catalogs/ohai'.format(nodeid )
info['lookup'] = MONORAIL_URL + '/api/common/lookups/?q={0}'.format(nodeid)
results = {}
for key,url in info.iteritems():
r = requests.get( url, verify=False)
results[key] = r.text
self._inventory[nodeid] = results
def _format_output(self, nodeid, info):
output = ''
try:
node_info = json.loads(info['lookup'])
ipaddress = ''
if len(node_info) > 0:
ipaddress = node_info[0]["ipAddress"]
output += ' "' + nodeid + '" : {\n'
output += ' "hosts": [ "' + ipaddress + '" ],\n'
output += ' "vars" : {\n'
for key,result in info.iteritems():
output += ' "' + key + '": ' + json.dumps(json.loads(result), sort_keys=True, indent=2) + ',\n'
output += ' "ansible_ssh_user": "renasar"\n'
output += ' }\n'
output += ' }\n'
except KeyError:
pass
return output
try:
#check if monorail url(ie:10.1.1.45:8080) is specified in the environment
MONORAIL_URL = 'http://' + str(os.environ['MONORAIL'])
except:
#use default values
pass
# Use the nodeid specified in the environment to limit the data returned
# or return data for all available nodes
nodeids = []
try:
nodeids += os.environ['nodeid'].split(',')
except KeyError:
url = MONORAIL_URL + '/api/common/nodes'
r = requests.get( url, verify=False)
data = json.loads(r.text)
for entry in data:
if entry['type'] == 'compute':
nodeids.append(entry['id'])
OnRackInventory(nodeids)
| mit | Python | |
4046c743323a4357864afcac482a5625ed71c184 | Add solution for problem 6 | cifvts/PyEuler | euler006.py | euler006.py | #!/usr/bin/python
limit = 100
sum_sq = ((limit + 1) * limit) / 2
sum_sq *= sum_sq
sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6
print (int (sum_sq - sq_sum))
| mit | Python | |
e9a7d806030fc87ce63554a96c485ecf197e9efd | Create Accel_to_Pos.py | purduerov/X9-Core,purduerov/X9-Core,purduerov/X9-Core,purduerov/X9-Core,purduerov/X9-Core,purduerov/X9-Core | control/PID/Accel_to_Pos.py | control/PID/Accel_to_Pos.py | import time
class AccelConversion(object):
def __init__(self):
self.time_start = time.time()
def integration(self, accel):
delta_t = time.time() - self.time_start
pos = accel * (delta_t ** 2) # assumes the ROV has 0 velocity at the time of the switch to autopilot
return pos
| mit | Python | |
66f5ec45798b21996bf5216cd9b8bce4d7d831fd | Add missing migration | patjouk/djangogirls,patjouk/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls | core/migrations/0034_auto_20170124_1754.py | core/migrations/0034_auto_20170124_1754.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-24 17:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0033_auto_20170124_1300'),
]
operations = [
migrations.AlterField(
model_name='event',
name='is_page_live',
field=models.BooleanField(default=False, verbose_name='Website is ready'),
),
]
| bsd-3-clause | Python | |
a051b41773c5ea25b3ff5791544fa3b38bdfec4a | add test | snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx | awx/main/tests/unit/models/test_inventory.py | awx/main/tests/unit/models/test_inventory.py | import pytest
import mock
from awx.main.models import (
UnifiedJob,
InventoryUpdate,
Job,
)
@pytest.fixture
def dependent_job(mocker):
j = Job(id=3, name='I_am_a_job')
j.cancel = mocker.MagicMock(return_value=True)
return [j]
def test_cancel(mocker, dependent_job):
with mock.patch.object(UnifiedJob, 'cancel', return_value=True) as parent_cancel:
iu = InventoryUpdate()
iu.get_dependent_jobs = mocker.MagicMock(return_value=dependent_job)
iu.save = mocker.MagicMock()
build_job_explanation_mock = mocker.MagicMock()
iu._build_job_explanation = mocker.MagicMock(return_value=build_job_explanation_mock)
iu.cancel()
parent_cancel.assert_called_with(job_explanation=None)
dependent_job[0].cancel.assert_called_with(job_explanation=build_job_explanation_mock)
def test__build_job_explanation():
iu = InventoryUpdate(id=3, name='I_am_an_Inventory_Update')
job_explanation = iu._build_job_explanation()
assert job_explanation == 'Previous Task Canceled: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
('inventory_update', 'I_am_an_Inventory_Update', 3)
| apache-2.0 | Python | |
17725f25fa8ecd235d3c9a0b08320af680e3b8fc | Create Networking.py | Elviond/A4TL | Networking.py | Networking.py | import socket
import ast
class Server(object):
backlog = 5
client = None
def __init__(self, host, port):
self.socket = socket.socket()
self.socket.bind((host, port))
self.socket.listen(self.backlog)
def __del__(self):
self.close()
def accept(self):
# if a client is already connected, disconnect it
if self.client:
self.client.close()
self.client, self.client_addr = self.socket.accept()
return self
def send(self, data):
if not self.client:
raise Exception('Cannot send data, no client is connected')
_send(self.client, data)
return self
def recv(self):
if not self.client:
raise Exception('Cannot receive data, no client is connected')
return _recv(self.client)
def close(self):
if self.client:
self.client.close()
self.client = None
if self.socket:
self.socket.close()
self.socket = None
class Client(object):
socket = None
def __del__(self):
self.close()
def connect(self, host, port):
self.socket = socket.socket()
self.socket.connect((host, port))
return self
def send(self, data):
if not self.socket:
raise Exception('You have to connect first before sending data')
_send(self.socket, data)
return self
def recv(self):
if not self.socket:
raise Exception('You have to connect first before receiving data')
return _recv(self.socket)
def recv_and_close(self):
data = self.recv()
self.close()
return data
def close(self):
if self.socket:
self.socket.close()
self.socket = None
def _send(socket, data):
socket.sendall(str(data).encode('utf8'))
def _recv(socket):
data = socket.recv(4096)
data = ast.literal_eval(data)
return data
| mit | Python | |
1c745d9414b0a734ede929cd6c2698a68dd014e5 | Add timeout the runner | chilland/scraper,openeventdata/scraper | scraper_sched.py | scraper_sched.py | from functools import wraps
import errno
import os
import signal
from scraper import run_scraper
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.cron import CronTrigger
class TimeoutError(Exception):
pass
def timeout(seconds=10, error_message=os.strerror(errno.ETIME)):
def decorator(func):
def _handle_timeout(signum, frame):
raise TimeoutError(error_message)
def wrapper(*args, **kwargs):
signal.signal(signal.SIGALRM, _handle_timeout)
signal.alarm(seconds)
try:
result = func(*args, **kwargs)
finally:
signal.alarm(0)
return result
return wraps(func)(wrapper)
return decorator
@timeout(seconds=90 * 60)
def timeout_runner():
run_scraper()
if __name__ == '__main__':
scheduler = BlockingScheduler()
scheduler.add_job(timeout_runner, CronTrigger(minute=0))
scheduler.start()
| from scraper import run_scraper
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.cron import CronTrigger
if __name__ == '__main__':
scheduler = BlockingScheduler()
scheduler.add_job(run_scraper, CronTrigger(minute=0))
scheduler.start()
| mit | Python |
29470e8e8d1709a3b40122cc79eca864e60d898e | Add benchmark script for recomennder bolt | cutoffthetop/recommender,cutoffthetop/recommender,cutoffthetop/recommender | scripts/bench.py | scripts/bench.py | #!/usr/bin/env python
"""
SYNOPSIS
bench [-b,--base] [-h,--help] [-k,--rank] [-r,--ratio] [-s,--size]
[-t,--threshold]
DESCRIPTION
Benchmark the RecommendationBolt class from ../storm/src/py/resources.
-b int, --base int
Configure the size of the user base to influence the underlying SVD.
Defaults to 2500.
-h, --help
Show this message.
-k int, --rank int
Configure the rank of the matrix approximation. Defaults to 100.
-r float, --ratio float
Configure the how many percent of the testing data to use for
verification. Defaults to 0.5.
-s int, --size int
Configure how many users to generate recommendations for. The actual
size may vary depending on threshold setting. Defaults to 5000.
-t int, --threshold int
Set the minimum ammount of observations must be on record for a user
to be considered for testing. Defaults to 5.
AUTHOR
Nicolas Drebenstedt <nicolas.drebenstedt@zeit.de>
LICENSE
This script is BSD licenced, see LICENSE file for more info.
VERSION
0.1
"""
__import__('sys').path.append('../storm/src/py/resources')
from recommendation import RecommendationBolt
import optparse
import os
import traceback
import numpy as np
def main(base, rank, ratio, size, threshold):
# Supress floating-point error warnings.
np.seterr(all='ignore')
# Configure mock recommendation bolt.
rb = RecommendationBolt()
conf = {
'zeit.recommend.svd.base': base,
'zeit.recommend.svd.rank': rank
}
rb.initialize(conf, None)
# Generate test user base with a minimum observation count of 'threshold'.
goal = dict(
rb.generate_seed(from_=base + 1000, size=size, threshold=threshold)
)
# Omit observations from user base so they can be predicted.
test = goal.copy()
for user in test:
limit = int(len(test[user]) * ratio)
test[user] = list(test[user])[:-limit]
# Generate recommendations for incomplete test dict.
prediction = test.copy()
for user, events in prediction.items():
vector = rb.expand({user: events}).next()
for col, value in rb.recommend(np.array(vector)):
prediction[user].append(rb.cols[col])
# Expand goal and prediction dicts to matrices.
goal_matrix = np.array(list(rb.expand(goal)))
prediction_matrix = np.array(list(rb.expand(prediction)))
# Calculate mean absolute error.
aggregate = 0.0
for i in range(goal_matrix.shape[0]):
for j in range(goal_matrix.shape[1]):
aggregate += abs(prediction_matrix[i, j] - goal_matrix[i, j])
print 'Base:\t\t', options.base
print 'Rank:\t\t', options.rank
print 'Ratio:\t\t', options.ratio
print 'Size:\t\t', options.size
print 'Threshold:\t', options.threshold
print 'MAE:\t\t', aggregate / np.multiply(*goal_matrix.shape)
if __name__ == '__main__':
try:
parser = optparse.OptionParser(
formatter=optparse.TitledHelpFormatter(),
usage=globals()['__doc__'],
version='0.1'
)
parser.add_option(
'-b',
'--base',
default=2500,
help='size of original user base',
type='int'
)
parser.add_option(
'-k',
'--rank',
default=100,
help='rank of matrix approximation',
type='int'
)
parser.add_option(
'-r',
'--ratio',
default=0.5,
help='ratio of testing to verification data',
type='float'
)
parser.add_option(
'-s',
'--size',
default=5000,
help='size of test user base',
type='int'
)
parser.add_option(
'-t',
'--threshold',
default=5,
help='minimum observations per test user',
type='int'
)
(options, args) = parser.parse_args()
main(
options.base,
options.rank,
options.ratio,
options.size,
options.threshold
)
except SystemExit, e:
raise e
except UserWarning, e:
print str(e)
os._exit(1)
except Exception, e:
print str(e)
traceback.print_exc()
os._exit(1)
| bsd-2-clause | Python | |
b4332ef88f80fecdb80e33c72b8c8ad96b5fbda3 | delete non necessary files | ufabdyop/screenlock,ufabdyop/screenlock,ufabdyop/screenlock | screenlockApp.py | screenlockApp.py | import wx, ConfigParser, win32gui, win32process, win32con, subprocess, time, thread
from threading import *
from screenlockConfig import SLConfig
ID_SUBMIT = wx.NewId()
endFlag = False
class OverlayFrame( wx.Frame ) :
def __init__( self ) :
wx.Frame.__init__( self, None, title="Transparent Window",
style=wx.DEFAULT_FRAME_STYLE | wx.STAY_ON_TOP )
self.config = SLConfig()
self.ShowFullScreen( True )
self.alphaValue = 220
self.SetTransparent( self.alphaValue )
self.SetBackgroundColour('#CCE8CF')
font=wx.Font(16,wx.DECORATIVE,wx.NORMAL,wx.BOLD)
self.label = wx.StaticText(self, label="For Administrator Only:", pos=(10,10))
self.label.SetFont(font)
self.inputField = wx.TextCtrl(self, value="", size=(140, 30), pos=(10,45), name="input", style=wx.TE_PASSWORD)
self.inputField.SetFont(font)
self.submitbutton = wx.Button(self, ID_SUBMIT, 'Submit', pos=(160,50))
self.submitbutton.SetFont(font)
self.Bind(wx.EVT_BUTTON, self.OnSubmit, id=ID_SUBMIT)
self.Bind(wx.EVT_TEXT_ENTER, self.OnSubmit)
self.input = None
self.status = wx.StaticText(self, -1, '', pos=(10,80))
self.status.SetFont(font)
try:
thread.start_new_thread(self.deleteLabel, (self.status,))
except:
pass
def OnSubmit(self, event):
self.input = self.inputField.GetValue()
self.inputField.Clear()
if self.config.passwordCheck(self.input):
global endFlag
endFlag = True
self.Destroy()
else:
self.status.SetLabel('You are not authorized.')
def deleteLabel(self,status):
global endFlag
while not endFlag:
time.sleep(5)
if not endFlag:
if status.GetLabel() != '' :
time.sleep(5)
if not endFlag:
status.SetLabel('')
return
#end OverlayFrame class
# a method to be invoked by ControlFrameThread
def makeProgramAtFront():
def callback(hwnd, _):
if win32gui.GetWindowText(hwnd).find("Warning")!= -1:
win32gui.SetWindowPos(hwnd,win32con.HWND_TOP,0,0,500,500,win32con.SWP_NOMOVE | win32con.SWP_NOSIZE )
elif win32gui.GetWindowText(hwnd).find("Coral")!= -1:
global checkCoralOpen
checkCoralOpen = True
win32gui.SetWindowPos(hwnd,win32con.HWND_TOPMOST,0,0,500,500,win32con.SWP_NOMOVE | win32con.SWP_NOSIZE )
return True
try:
win32gui.EnumWindows(callback, None)
global checkCoralOpen
if not checkCoralOpen:
openCoral()
except:
pass
def openCoral ():
config = SLConfig()
path = config.get('front_window')
print ("opening %s" % path)
subprocess.Popen(path)
time.sleep (5)
# a thread class to do the infinite loop to make sure the
# Coral window at the most front
class ControlFrameThread(Thread):
def __init__(self):
Thread.__init__(self)
self.start()
def run(self):
global endFlag
while not endFlag:
global checkCoralOpen
checkCoralOpen = False
makeProgramAtFront()
time.sleep(1)
return
#=======================================================
if __name__ == '__main__' :
app = wx.App( False )
frm = OverlayFrame()
frm.Show()
newthread = ControlFrameThread()
app.MainLoop()
| mit | Python | |
b2d061113112634c34cb230090a97e25ef32c8b0 | Create scribe_level3.py | odorsey/pythonGame | scribe_level3.py | scribe_level3.py | print "Chapter 1: Breakin' Bad Habits"
print "You realize that you’re not cut out for a life on the run,
so you decide to head back to the palace and stand trial. As the judge concludes your charges, you stand
and defiantly plead "Not Guilty"\n"
action = raw_input("What is your argument? Do you claim you were taking notes in such a way to establish a new metadata scheme
and usher in the educational practice of information science OR do you claim your experiences
in the forest will make you a valuable warrior for the kingdom? Type either 'Information science' or 'Valuable warrior'\n")
if 'Information science' in action:
print "The king scoffs at your notion. '98% of our kingdom cannot even read. INFORMATION SCIENCE PAH?! I condemn you
to the guillotine! Ouch, sounds like the king isn't the only one losing his head over this."
guillotine = raw_input("As you approach the guillotine, the neighboring kingdom suddenly attacks the courtyard.
You still have your quill in the seat of your pants. Do you 'stay and defend the kingdom', or do you 'take the
opportunity to run'?\n")
if 'stay and defend the kingdom' in guillotine:
print "You withdraw your quill and line up next to the king's guard. 'FOR HONOR' you yell."
import scribe_level4.py
if 'take the opportunity to run' in guillotine:
print "The king catches you running away in the melee and orders a knight after you. You're no match for the horse's
speed, and the king orders an execution on the spot. Game over. Just kidding, care to try again?"
import scribe_level3.py
if 'Valuable warrior' in action:
print "The king considers your offer thoroughly. 'Hm' he ponders, 'We have been short a few heroes
recently.' Suddenly a loud crash is heard outside. 'MILORD' the king's page screams, 'We are besieged by the
neighboring kingdom!' The king turns to you, 'Alrighty lad, you say you'll be a valuable warrior, now's the time to
prove it.'\n"
courtyard = raw_input("Do you 'rush to meet the enemy head on' or
do you stay back, nervously observing your surroundings and making a break for the 'back gate' while the king
is distracted?\n")
if 'Meet the enemy head on' in courtyard:
print "You cautiously but confidently approach the enemy in the company of the king's guard. Quickly, you draw
your quill (no pun intended), causing the enemy to burst into laughter.\n"
import scribe_level4.py
if 'back gate' in courtyard:
print "The back gate is right behind the dungeon. Unfortunately, the dungeon master is privy to
your plan and halts you. Your quill barely scratches his imposing armor and thick mask
'Back in the cell ya go, laddie,' he says, tossing you in the familiarity of your cell. Dammit, not again\n"
import scribe_level1.py
| bsd-3-clause | Python | |
337e60c3d63b56b1237e3d5b052a96f3824cc6c2 | Add command to migrate SMSLog to SQL | dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq | corehq/apps/sms/management/commands/migrate_sms_to_sql.py | corehq/apps/sms/management/commands/migrate_sms_to_sql.py | from corehq.apps.sms.models import SMSLog, SMS
from custom.fri.models import FRISMSLog
from dimagi.utils.couch.database import iter_docs
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
class Command(BaseCommand):
args = ""
help = ("Migrates SMSLog to SMS")
option_list = BaseCommand.option_list + (
make_option("--balance_only",
action="store_true",
dest="balance_only",
default=False,
help="Include this option to only run the balancing step."),
)
def get_sms_couch_ids(self):
result = SMSLog.view(
'sms/by_domain',
include_docs=False,
reduce=False,
).all()
return [row['id'] for row in result if row['key'][1] == 'SMSLog']
def run_migration(self):
count = 0
ids = self.get_sms_couch_ids()
total_count = len(ids)
for doc in iter_docs(FRISMSLog.get_db(), ids):
couch_sms = FRISMSLog.wrap(doc)
try:
couch_sms._migration_do_sync()
except Exception as e:
print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e)
count += 1
if (count % 10000) == 0:
print 'Processed %s / %s documents' % (count, total_count)
def balance(self):
sql_count = SMS.objects.count()
couch_count = len(self.get_sms_couch_ids())
print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count)
def handle(self, *args, **options):
if not options['balance_only']:
self.run_migration()
self.balance()
| bsd-3-clause | Python | |
8122cddcc8817829e103462fbf019d1a3f4be9a8 | add xiami.py with login | hailinzeng/zhuaxia,wangjun/zhuaxia,kuyagic/zhuaxia,zhaodelong/zhuaxia,windygu/zhuaxia,yosef-gao/zhuaxia | zhuaxia/xiami.py | zhuaxia/xiami.py | # -*- coding:utf-8 -*-
####################################################################
# taken from https://gist.github.com/lepture/1014329
####################################################################
#
# Copyright (c) 2011, lepture.com
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import urllib
import httplib
from contextlib import closing
from Cookie import SimpleCookie
ua = 'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36'
#ua = 'Mozilla/5.0 (Linux; Android 4.0.4; Galaxy Nexus Build/IMM76B) AppleWebKit/535.19 (KHTML, like Gecko) Chrome/18.0.1025.133 Mobile Safari/535.19'
checkin_headers = {
'User-Agent': ua,
'Content-Length': '0',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With': 'XMLHttpRequest',
'Host': 'www.xiami.com',
'Origin': 'http://www.xiami.com/',
'Referer': 'http://www.xiami.com/',
'Content-Length': '0',
}
class xiami_login(object):
def __init__(self, email, password):
self.email = email
self.password = password
self._auth = None
def login(self):
print 'login ....'
_form = {
'email': self.email,
'password': self.password,
'LoginButton': '登录',
}
data = urllib.urlencode(_form)
headers = {'User-Agent': ua}
headers['Referer'] = 'http://www.xiami.com/web/login'
headers['Content-Type'] = 'application/x-www-form-urlencoded'
with closing(httplib.HTTPConnection('www.xiami.com')) as conn:
conn.request('POST', '/web/login', data, headers)
res = conn.getresponse()
cookie = res.getheader('Set-Cookie')
self._auth = SimpleCookie(cookie)['member_auth'].value
print 'login success'
return self._auth
def checkin(self):
if not self._auth:
self.login()
headers = checkin_headers
headers['Cookie'] = 'member_auth=%s; t_sign_auth=1' % self._auth
with closing(httplib.HTTPConnection('www.xiami.com')) as conn:
conn.request('POST', '/task/signin', None, headers)
res = conn.getresponse()
return res.read()
####################################################################
| mit | Python | |
75f81ff20dc3953b3b7c2e064105da34dde2edbf | Add experiment job manager | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon_cli/managers/experiment_job.py | polyaxon_cli/managers/experiment_job.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
from polyaxon_cli.managers.base import BaseConfigManager
from polyaxon_cli.utils.formatting import Printer
from polyaxon_schemas.experiment import ExperimentJobConfig
class ExperimentJobManager(BaseConfigManager):
"""Manages experiment job configuration .polyaxonjob file."""
IS_GLOBAL = False
IS_POLYAXON_DIR = True
CONFIG_FILE_NAME = '.polyaxonxpjob'
CONFIG = ExperimentJobConfig
@classmethod
def get_config_or_raise(cls):
job = cls.get_config()
if not job:
Printer.print_error('No job uuid was provided.')
sys.exit(1)
return job
| apache-2.0 | Python | |
83bda2fbb116d46dc0f4e6eb2d63f3f90a6b9733 | fix bugs in travis ci | ideaworld/BioDesigner,igemsoftware/HFUT-China_2015,igemsoftware/HFUT-China_2015,ideaworld/BioDesigner,igemsoftware/HFUT-China_2015,ideaworld/BioDesigner | BioDesigner/settings.py | BioDesigner/settings.py | """
Django settings for BioDesigner project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'd(ug9p%lojqv7ik_m_0hqdkk75d&d@1uewycs+77=so($ukf4('
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'design',
'version',
'system',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'BioDesigner.urls'
WSGI_APPLICATION = 'BioDesigner.wsgi.application'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'biodesigner.bio@gmail.com'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = 'biodesigner@bio.com'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'biodesigner',
'USER': 'root',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
os.path.join(BASE_DIR, 'uploads'),
os.path.join(BASE_DIR, 'downloads'),
)
STATIC_URL = '/static/'
| apache-2.0 | Python | |
d875d24d29b21d431f4ef5f0c95515afc0e78933 | Add Pidgin test | rolisz/Log,rolisz/Log | src/tests/pidgin_test.py | src/tests/pidgin_test.py | import unittest
import io
from src.data import parsers
conv = """<html><head><meta http-equiv="content-type" content="text/html; charset=UTF-8"><title>Conversation with Person 1 at Tue 31 Jul 2013 02:19:13 PM EEST on person2@gmail.com/ (jabber)</title></head><body><h3>Conversation with Person 1 at Tue 31 Jul 2012 02:19:13 PM EEST on person2@gmail.com/ (jabber)</h3>
<font color="#A82F2F"><font size="2">(02:19:32 PM)</font> <b>Person 1:</b></font> <html xmlns='http://jabber.org/protocol/xhtml-im'><body xmlns='http://www.w3.org/1999/xhtml'><span style='font-family: Arial; font-size: 10pt; color: #000000;'>Hello there</span></body></html><br/>
<font color="#16569E"><font size="2">(02:20:44 PM)</font> <b>person2@gmail.com/09AC6EA0:</b></font>...<br/>
<font color="#A82F2F"><font size="2">(02:30:43 PM)</font> <b>Person 1:</b></font> <html xmlns='http://jabber.org/protocol/xhtml-im'><body xmlns='http://www.w3.org/1999/xhtml'><span style='font-family: Arial; font-size: 10pt; color: #000000;'>what'sup</span></body></html><br/>
<font color="#16569E"><font size="2">(02:30:54 PM)</font> <b>person2@gmail.com/09AC6EA0:</b></font>riiight<br/>
<font color="#16569E"><font size="2">(04:48:09 PM)</font> <b>person2@gmail.com/09AC6EA0:</b></font> :>:><br/>
<font color="#16569E"><font size="2">(04:42:11 PM)</font> <b>Person 1:</b></font> <a href="http://www.test.org">http://www.test.org</a><br/>
</body></html>
"""
class PidginTest(unittest.TestCase):
def setUp(self):
self.pidgin = parsers.Pidgin()
def testNormal(self):
fake = io.StringIO(conv)
fake.name = "me@gmail.com"
contacts, lines = self.pidgin.parse_file(fake)
# Person 2 has address in messages. If ever changed to take contacts
# from header, this test should change
self.assertSetEqual(contacts, {"Person 1", "person2@gmail.com/09AC6EA0"})
self.assertEqual(len(lines), 6)
# Has space prefix because of inline HTML. If ever looking into doing it
# more correctly, this can go away too.
self.assertEqual(lines[0]['message'], " Hello there")
self.assertEqual(lines[1]['contact'], "person2@gmail.com/09AC6EA0")
self.assertEqual(lines[0]['protocol'], "Hangouts")
self.assertEqual(lines[1]['source'], "Pidgin")
self.assertEqual(lines[0]['timestamp'], "2013-07-31T14:19:32")
self.assertEqual(lines[4]['message'], " :>:>")
self.assertEqual(lines[5]['message'], " http://www.test.org")
| mit | Python | |
74764e9949de82d4623167e3604d313bc6cf850e | add rejidge | Changron/NTHUOJ_web,nthuoj/NTHUOJ_web,geniusgordon/NTHUOJ_web,nthuoj/NTHUOJ_web,geniusgordon/NTHUOJ_web,Changron/NTHUOJ_web,bruce3557/NTHUOJ_web,geniusgordon/NTHUOJ_web,bbiiggppiigg/NTHUOJ_web,bbiiggppiigg/NTHUOJ_web,bruce3557/NTHUOJ_web,henryyang42/NTHUOJ_web,henryyang42/NTHUOJ_web,henryyang42/NTHUOJ_web,Changron/NTHUOJ_web,bruce3557/NTHUOJ_web,nthuoj/NTHUOJ_web,bbiiggppiigg/NTHUOJ_web | utils/rejudge.py | utils/rejudge.py | """
The MIT License (MIT)
Copyright (c) 2014 NTHUOJ team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from problem.models import Problem
from problem.models import Submission
from problem.models import SubmissionDetail
from contest.models import Contest
'''
rejudge:
1. problem's all submission
2. a single submission
3. submissions during contest
'''
def rejudge(obj):
if isinstance(obj,problem):
rejudge_problem(obj)
elif isinstance(obj,submission):
rejudge_submission(obj)
elif isinstance(obj,contest):
rejudge_contest(obj)
#rejudge submissions of problem
def rejudge_problem(problem):
submissions = Submission.objects.filter(problem = problem)
for submission in submissions:
rejudge_submission(submission)
#rejudge single submission
def rejudge_submission(submission):
submission.status = Submission.WAIT
submission.save()
sd = SubmissionDetail.objects.filter(sid = submission)
sd.delete()
#rejudge submissions during contest
def rejudge_contest(contest):
for problem in contest.problem.all():
rejudge_contest_problem(contest, problem)
#rejudge submissions of problem in contest
def rejudge_contest_problem(contest, problem):
submissions = Submission.objects.filter(
problem = problem,
submit_time__gte = contest.start_time,
submit_time__lte = contest.end_time)
for submission in submissions:
rejudge_submission(submission)
| mit | Python | |
d1588bdf0a672de8d7d4f4f9cddcc236f5b9026e | Add plot for color property combinations to examples. | CrazyGuo/bokeh,ericdill/bokeh,saifrahmed/bokeh,stuart-knock/bokeh,schoolie/bokeh,PythonCharmers/bokeh,alan-unravel/bokeh,percyfal/bokeh,mindriot101/bokeh,satishgoda/bokeh,roxyboy/bokeh,daodaoliang/bokeh,ChristosChristofidis/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,ChristosChristofidis/bokeh,Karel-van-de-Plassche/bokeh,CrazyGuo/bokeh,msarahan/bokeh,stonebig/bokeh,mindriot101/bokeh,bokeh/bokeh,rothnic/bokeh,bokeh/bokeh,aiguofer/bokeh,laurent-george/bokeh,xguse/bokeh,timothydmorton/bokeh,stuart-knock/bokeh,mutirri/bokeh,CrazyGuo/bokeh,birdsarah/bokeh,draperjames/bokeh,justacec/bokeh,msarahan/bokeh,tacaswell/bokeh,bsipocz/bokeh,josherick/bokeh,jakirkham/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,laurent-george/bokeh,ericmjl/bokeh,muku42/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,caseyclements/bokeh,clairetang6/bokeh,ChristosChristofidis/bokeh,KasperPRasmussen/bokeh,srinathv/bokeh,jakirkham/bokeh,maxalbert/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,gpfreitas/bokeh,azjps/bokeh,aavanian/bokeh,ChinaQuants/bokeh,josherick/bokeh,tacaswell/bokeh,ericmjl/bokeh,muku42/bokeh,mutirri/bokeh,aiguofer/bokeh,stuart-knock/bokeh,aiguofer/bokeh,tacaswell/bokeh,timsnyder/bokeh,justacec/bokeh,aavanian/bokeh,dennisobrien/bokeh,quasiben/bokeh,stonebig/bokeh,dennisobrien/bokeh,PythonCharmers/bokeh,daodaoliang/bokeh,rhiever/bokeh,rothnic/bokeh,paultcochrane/bokeh,bsipocz/bokeh,mindriot101/bokeh,ptitjano/bokeh,akloster/bokeh,muku42/bokeh,paultcochrane/bokeh,carlvlewis/bokeh,justacec/bokeh,alan-unravel/bokeh,laurent-george/bokeh,phobson/bokeh,maxalbert/bokeh,schoolie/bokeh,alan-unravel/bokeh,xguse/bokeh,carlvlewis/bokeh,eteq/bokeh,aavanian/bokeh,azjps/bokeh,percyfal/bokeh,deeplook/bokeh,ericdill/bokeh,draperjames/bokeh,eteq/bokeh,carlvlewis/bokeh,timsnyder/bokeh,percyfal/bokeh,ptitjano/bokeh,xguse/bokeh,abele/bokeh,mindriot101/bokeh,matbra/bokeh,phobson/bokeh,stonebig/bokeh,josherick/bokeh,alan-unravel/bokeh,aavanian/bokeh,bsipocz/bokeh,dennisobrien/bokeh,evidation-health/bokeh,rothnic/bokeh,Karel-van-de-Plassche/bokeh,matbra/bokeh,birdsarah/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,rs2/bokeh,htygithub/bokeh,ahmadia/bokeh,quasiben/bokeh,paultcochrane/bokeh,draperjames/bokeh,jakirkham/bokeh,timsnyder/bokeh,caseyclements/bokeh,evidation-health/bokeh,maxalbert/bokeh,timothydmorton/bokeh,azjps/bokeh,laurent-george/bokeh,philippjfr/bokeh,evidation-health/bokeh,saifrahmed/bokeh,ericdill/bokeh,azjps/bokeh,timsnyder/bokeh,satishgoda/bokeh,tacaswell/bokeh,bokeh/bokeh,jplourenco/bokeh,srinathv/bokeh,timothydmorton/bokeh,awanke/bokeh,phobson/bokeh,evidation-health/bokeh,srinathv/bokeh,satishgoda/bokeh,ptitjano/bokeh,mutirri/bokeh,ericdill/bokeh,philippjfr/bokeh,matbra/bokeh,ChinaQuants/bokeh,satishgoda/bokeh,akloster/bokeh,khkaminska/bokeh,rs2/bokeh,ericmjl/bokeh,birdsarah/bokeh,roxyboy/bokeh,xguse/bokeh,deeplook/bokeh,bokeh/bokeh,dennisobrien/bokeh,eteq/bokeh,ptitjano/bokeh,rhiever/bokeh,DuCorey/bokeh,paultcochrane/bokeh,PythonCharmers/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,schoolie/bokeh,bsipocz/bokeh,stonebig/bokeh,jplourenco/bokeh,philippjfr/bokeh,schoolie/bokeh,rs2/bokeh,gpfreitas/bokeh,birdsarah/bokeh,ahmadia/bokeh,timothydmorton/bokeh,maxalbert/bokeh,khkaminska/bokeh,philippjfr/bokeh,aiguofer/bokeh,phobson/bokeh,eteq/bokeh,carlvlewis/bokeh,clairetang6/bokeh,PythonCharmers/bokeh,azjps/bokeh,clairetang6/bokeh,daodaoliang/bokeh,saifrahmed/bokeh,khkaminska/bokeh,clairetang6/bokeh,schoolie/bokeh,awanke/bokeh,ericmjl/bokeh,akloster/bokeh,percyfal/bokeh,abele/bokeh,philippjfr/bokeh,daodaoliang/bokeh,gpfreitas/bokeh,justacec/bokeh,jplourenco/bokeh,jplourenco/bokeh,rhiever/bokeh,caseyclements/bokeh,htygithub/bokeh,deeplook/bokeh,awanke/bokeh,abele/bokeh,josherick/bokeh,caseyclements/bokeh,bokeh/bokeh,srinathv/bokeh,DuCorey/bokeh,percyfal/bokeh,KasperPRasmussen/bokeh,saifrahmed/bokeh,htygithub/bokeh,ChristosChristofidis/bokeh,rothnic/bokeh,ptitjano/bokeh,ericmjl/bokeh,quasiben/bokeh,awanke/bokeh,rhiever/bokeh,DuCorey/bokeh,ahmadia/bokeh,akloster/bokeh,msarahan/bokeh,deeplook/bokeh,ChinaQuants/bokeh,jakirkham/bokeh,msarahan/bokeh,muku42/bokeh,roxyboy/bokeh,abele/bokeh,ahmadia/bokeh,CrazyGuo/bokeh,stuart-knock/bokeh,matbra/bokeh,khkaminska/bokeh,roxyboy/bokeh,mutirri/bokeh,draperjames/bokeh | examples/plotting/file/properties_alpha.py | examples/plotting/file/properties_alpha.py | import bokeh.plotting as plt
from itertools import product
plt.output_file('properties_alpha.html')
cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA']
p = plt.figure(x_range=cats, y_range=cats,
title="Fill and Line Color Property Combinations")
alpha = 0.5
fill_color = (242, 44, 64)
fill_color_alpha = (242, 44, 64, alpha)
line_color = (64, 126, 231)
line_color_alpha = (64, 126, 231, alpha)
# define fill and line color combinations
fill = [(1, {'fill_color': fill_color}),
(2, {'fill_color': fill_color_alpha}),
(3, {'fill_alpha': alpha, 'fill_color': fill_color}),
(4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})]
line = [(1, {'line_color': line_color}),
(2, {'line_color': line_color_alpha}),
(3, {'line_alpha': alpha, 'line_color': line_color}),
(4, {'line_alpha': alpha, 'line_color': line_color_alpha})]
# plot intersection of fill and line combinations
combinations = product(fill, line)
for comb in combinations:
x, fill_options = comb[0]
y, line_options = comb[1]
options = fill_options.copy()
options.update(line_options)
p.circle(x, y, line_width=7, size=50, **options)
p.xaxis[0].axis_label = "Fill Options"
p.yaxis[0].axis_label = "Line Options"
plt.show(p)
| bsd-3-clause | Python | |
bbc08cc30837ba1ce505d346b67f5808aed628af | Create OverlapGraphs.py | guylaor/rosalind | strings/OverlapGraphs.py | strings/OverlapGraphs.py | # solution for http://rosalind.info/problems/grph/
import re
Strings = {}
Prefixes = {}
Suffixes = {}
index = ""
f = open('fasta.txt', 'r')
for line in f:
match = re.match(r">", line)
#match = str(line).find(">")
if match:
index = line.strip()
index = index[1:len(index)]
Strings[index] = ""
Prefixes[index] = ""
Suffixes[index] = ""
else:
Strings[index] = Strings[index] + line.strip()
Prefixes[index] = Strings[index][:3]
Suffixes[index] = Strings[index][-3:]
for v, i in enumerate(Strings):
for v2, i2 in enumerate(Strings):
if Prefixes[i] == Suffixes[i2]:
if i != i2:
print i2, i
| mpl-2.0 | Python | |
090c73c20e3a57f5b2710c270b0dfc139633d623 | Add tests for GameNode module | blairck/jaeger | test/test_gamenode.py | test/test_gamenode.py | """ Tests for the GameNode module """
from contextlib import contextmanager
from io import StringIO
import sys
import unittest
from src import gamenode
@contextmanager
def captured_output():
""" Redirects stdout to StringIO so we can inspect Print statements """
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
class TestGameNode(unittest.TestCase):
""" Tests for the GameNode module """
def test_default_instantiation(self):
""" Test a known default instantiation """
gn_obj = gamenode.GameNode()
result = gn_obj.gameState[0][0]
self.assertEqual(result, -1)
self.assertFalse(gn_obj.leafP)
self.assertFalse(gn_obj.rootP)
self.assertFalse(gn_obj.score)
def test_initialize(self):
""" Test initialization """
gn_obj = gamenode.GameNode()
result = gn_obj.initialize()
self.assertFalse(result)
def test_getState_default(self):
""" Test a known getState value """
gn_obj = gamenode.GameNode()
result = gn_obj.getState(0, 0)
self.assertEqual(result, -1)
def test_getState_bad_location(self):
""" Test getState with a bad location """
gn_obj = gamenode.GameNode()
self.assertRaises(IndexError, gn_obj.getState, 0, 100)
def test_setState_good_location(self):
""" Test setState with good location """
gn_obj = gamenode.GameNode()
gn_obj.setState(0, 0, 5)
result = gn_obj.getState(0, 0)
self.assertEqual(result, 5)
def test_setState_bad_location(self):
""" Test setState with bad location """
gn_obj = gamenode.GameNode()
self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5)
def test_print_board(self):
"""Check that print_board works"""
with captured_output() as out:
gn_obj = gamenode.GameNode()
gn_obj.print_board()
actual_print = out.getvalue().strip()
expected_print = ("-1-1-1 \n"
" -1-1-1 \n"
"-1-1-1-1-1-1-1\n"
"-1-1-1-1-1-1-1\n"
"-1-1-1-1-1-1-1\n"
" -1-1-1 \n"
" -1-1-1")
self.assertEqual(actual_print, expected_print)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.