commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
dc9c9cfd45726e4f3458fcc0f02c29c61482c0bc | Create gzip_identifier.py | Bindernews/TheHound | identifiers/gzip_identifier.py | identifiers/gzip_identifier.py | from identifier import Result
GZ_PATTERNS = [
'1F 8B 08 08'
]
class GzResolver:
def identify(self, stream):
return Result('GZ')
def load(hound):
hound.add_matches(GZ_PATTERNS, GzResolver())
| mit | Python | |
ffe76ac5d3cd5485dd288415a5845c2d5a247633 | add shipping | douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo,douglassquirrel/combo,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/microservices-hackathon-july-2014,douglassquirrel/combo | components/shipping_reckoner/shipping_reckoner.py | components/shipping_reckoner/shipping_reckoner.py | #! /usr/bin/env python
from json import loads, dumps
from pika import BlockingConnection, ConnectionParameters
RABBIT_MQ_HOST = '54.76.183.35'
RABBIT_MQ_PORT = 5672
def shipping(ch, method, properties, body):
product = loads(body)
sku, price = product['sku'], product['price']
if price < 50:
shipping = 5.0
else:
shipping = 10.0
shipping_fact = {'sku': sku, 'shipping': shipping}
print 'Calculated shipping %s' % (shipping_fact,)
channel.basic_publish(exchange='alex2',
routing_key='shipping',
body=dumps(shipping_fact))
connection = BlockingConnection(ConnectionParameters(host=RABBIT_MQ_HOST,
port=RABBIT_MQ_PORT))
channel = connection.channel()
channel.exchange_declare(exchange='alex2', type='topic')
result = channel.queue_declare(exclusive=True)
queue = result.method.queue
channel.queue_bind(exchange='alex2', queue=queue, routing_key='new_products')
channel.basic_consume(shipping, queue=queue, no_ack=True)
channel.start_consuming()
| mit | Python | |
112fadcee2bcb0f0122916ed248df7c965189c36 | Add pagination tests for permission groups | mociepka/saleor,mociepka/saleor,mociepka/saleor | tests/api/pagination/test_account.py | tests/api/pagination/test_account.py | import pytest
from django.contrib.auth import models as auth_models
from ..utils import get_graphql_content
@pytest.fixture
def permission_groups_for_pagination(db):
return auth_models.Group.objects.bulk_create(
[
auth_models.Group(name="Group1"),
auth_models.Group(name="GroupGroup1"),
auth_models.Group(name="GroupGroup2"),
auth_models.Group(name="Group2"),
auth_models.Group(name="Group3"),
]
)
QUERY_PERMISSION_GROUPS_PAGINATION = """
query (
$first: Int, $last: Int, $after: String, $before: String,
$sortBy: PermissionGroupSortingInput, $filter: PermissionGroupFilterInput
){
permissionGroups (
first: $first, last: $last, after: $after, before: $before,
sortBy: $sortBy, filter: $filter
) {
edges {
node {
name
}
}
pageInfo{
startCursor
endCursor
hasNextPage
hasPreviousPage
}
}
}
"""
@pytest.mark.parametrize(
"sort_by, permission_groups_order",
[
({"field": "NAME", "direction": "ASC"}, ["Group1", "Group2", "Group3"]),
(
{"field": "NAME", "direction": "DESC"},
["GroupGroup2", "GroupGroup1", "Group3"],
),
],
)
def test_permission_groups_pagination_with_sorting(
sort_by,
permission_groups_order,
staff_api_client,
permission_manage_staff,
permission_groups_for_pagination,
):
page_size = 3
variables = {"first": page_size, "after": None, "sortBy": sort_by}
response = staff_api_client.post_graphql(
QUERY_PERMISSION_GROUPS_PAGINATION,
variables,
permissions=[permission_manage_staff],
)
content = get_graphql_content(response)
permission_groups_nodes = content["data"]["permissionGroups"]["edges"]
assert permission_groups_order[0] == permission_groups_nodes[0]["node"]["name"]
assert permission_groups_order[1] == permission_groups_nodes[1]["node"]["name"]
assert permission_groups_order[2] == permission_groups_nodes[2]["node"]["name"]
assert len(permission_groups_nodes) == page_size
@pytest.mark.parametrize(
"filter_by, permission_groups_order",
[
({"search": "GroupGroup"}, ["GroupGroup1", "GroupGroup2"]),
({"search": "Group1"}, ["Group1", "GroupGroup1"]),
],
)
def test_permission_groups_pagination_with_filtering(
filter_by,
permission_groups_order,
staff_api_client,
permission_manage_staff,
permission_groups_for_pagination,
):
page_size = 2
variables = {"first": page_size, "after": None, "filter": filter_by}
response = staff_api_client.post_graphql(
QUERY_PERMISSION_GROUPS_PAGINATION,
variables,
permissions=[permission_manage_staff],
)
content = get_graphql_content(response)
permission_groups_nodes = content["data"]["permissionGroups"]["edges"]
assert permission_groups_order[0] == permission_groups_nodes[0]["node"]["name"]
assert permission_groups_order[1] == permission_groups_nodes[1]["node"]["name"]
assert len(permission_groups_nodes) == page_size
| bsd-3-clause | Python | |
8c85761d37625393cdb70664af33853cbe105a9d | Create binarytree_intree.py | HeyIamJames/CodingInterviewPractice,HeyIamJames/CodingInterviewPractice | binarytree_intree.py | binarytree_intree.py | """"
find if a is a subset of b,
where both a and b are binary trees
"""
| mit | Python | |
cbef1510b8bf0e7d0d9a3bb5a678d2d118822979 | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/d33e1ade02f55fd63e04ccf6e6188d009c9d3b69. | tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,karllessard/tensorflow,yongtang/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "d33e1ade02f55fd63e04ccf6e6188d009c9d3b69"
TFRT_SHA256 = "ac62839411fe0f0cde68daa8f50a3563dccda1b1623e7c7131d71218ae514bc8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "272fc1c72b798def2a9e2a384c478a6ee4b357c9"
TFRT_SHA256 = "a6173ac76fc7d2361729f6c268b3cf3b39d40c82930b13f69f9f7b8d9a0fbf93"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
aeafc18cee3e8d833160d3bc2df6a971878022fe | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/90ada7c89c5d812ae3fe09d7c2cbd9a77e7273b8. | yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "90ada7c89c5d812ae3fe09d7c2cbd9a77e7273b8"
TFRT_SHA256 = "8f2aab5b834112dd815bac9b04168019914e307826db523c13c3aebfdc127eca"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "e66a3653ca77861cb3d9c34eb6bbe05d4f67dfae"
TFRT_SHA256 = "edabb53b75f8fef59d308310d2bf01d50d6ed783d49da8bd1514aca108b968e8"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
941f89dfb8f4c8bf0a8445d3c57ff9e652ad85d1 | Create server.py | rimpybharot/CMPE273 | lab1/server.py | lab1/server.py | ok
| mit | Python | |
bc46efa788a0e3d0aacab724e16b8da01a671331 | Add p1. | bm5w/5problems | p1.py | p1.py | def p1_for(input_list):
"""Compute some of numbers in list with for loop."""
out = 0
for i in input_list:
out += i
return out
def p1_while(input_list):
"""Compute some of numbers in list with while loop."""
out = 0
count = 0
while count < len(input_list):
out += input_list[count]
return out
def p1_rec(input_list, out=0):
"""Compute some of numbers in list with for loop."""
if len(input_list) > 0:
p1_rec(input_list[1:], out+input_list[0])
else:
return out
| mit | Python | |
798a7e553bd67c5a75c7639b73f3a22c03bf32a2 | Create simple_RSA_implementation.py | IEEE-NITK/Daedalus,IEEE-NITK/Daedalus,chinmaydd/NITK_IEEE_SaS,IEEE-NITK/Daedalus | Prabhanjan/simple_RSA_implementation.py | Prabhanjan/simple_RSA_implementation.py | Implementation of RSA
#This can primarily be divided into 3 steps-
#1. Key Generation
#2. Encryption
#3. Decryption
import math
import random
def create_list_primes(sieve_size):
#Returns a list of primes less than or equal to sieveSize
sieve=[True]*sieve_size
sieve[0]=False
sieve[1]=False
#zero and one are not prime
#creating the sieve
for i in range (2,int(math.sqrt(sieve_size))+1):
pointer=i*2
while pointer<sieve_size:
sieve[pointer]=False
pointer+=i
#compile list of primes
primes=[]
for i in range(sieve_size):
if sieve[i]==True:
primes.append(i)
return primes
def gcd(a, b):
#Returns the greatest common divisor of two numbers a,b
while b:
a,b=b,a%b
return a
def extended_euclid(e,n):
#Returns number d which is the multiplicative inverse of e congruent modulo n.
d,new_d,r,new_r=0,1,n,e
while new_r!=0:
quotient=r/new_r
d,new_d=new_d,d-quotient*new_d
r,new_r=new_r,r-quotient*new_r
if d<0:
d=d+n
return d
def expmod(m,e,n):
#Returns c congruent to m power e modulo n.
c=1
for i in range(0,e):
c=(c*m)%n
return c
def encrypt(m,e,n):
#Encrypts message m with public key exponent e.
#To be combined with expmod as the same function.
return expmod(m,e,n)
def decrypt(c,d,n):
#Decrypts message using private key d
return expmod(c,d,n)
#Key Generation consists of the following steps-
#1. Selecting 2 large distinct prime numbers- p,q at random.
# We use Sieve of Eratosthenes in the create_list_primes function to do this. Can be improved using better Number Field Sieves.
primes=create_list_primes(5000)
p,q=0,0
while p<1000:
p=random.choice(primes)
while (q<1000 or q==p):
q=random.choice(primes)
#2. We calculate n=pq modulus for the public and private
n=p*q
#3. We compute Phi(n)-Number of natural numbers which are less than n and relatively prime to n.
#Since n=pq, Phi(n)=Phi(p)Phi(q)
#If p is any prime, then Phi(p)=p-1
phi_n=(p-1)*(q-1)
#4. We choose an arbitrary integer e such that 1<e<Phi(n) and e,Phi(n) are co prime. e is released as the public key exponent
e=1
while 1:
e=random.choice(range(2,phi_n))
if gcd(e,phi_n)==1:
break
#5. We compute d- multiplicative inverse of e congruent modulo phi_n. We use Extended Euclidean Algorithm to compute d. d is kept as private key exponent
d=extended_euclid(e,phi_n)
print "Public Key pair- %d,%d\nPrivate Key-%d\n"%(e,n,d)
#Encryption
#Each byte of the message is encrypted as c=m^e(mod n), where c is the encrypted byte, m is the message byte.
plaintext=raw_input("Enter message to be encrypted- ")
ciphertext=[]
for i in plaintext:
ciphertext.append(encrypt(ord(i),e,n))
print "Encyrpted Message-",
print ciphertext
#Decryption
#Each byte of the encrypted message is decrypted using the private key exponent
message=""
for i in ciphertext:
message=message+chr(decrypt(i,d,n))
print "Decrypted message-",
print message
| mit | Python | |
1a6818d4829c3da42750f6d0f042df203434595c | Add a little to models | LeeYiFang/Carkinos,LeeYiFang/Carkinos,LeeYiFang/Carkinos,LeeYiFang/Carkinos | Carkinos/probes/migrations/0002_auto_20160106_2307.py | Carkinos/probes/migrations/0002_auto_20160106_2307.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-06 15:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('probes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProbeID',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Probe_id', models.CharField(max_length=20)),
('Gene_symbol', models.CharField(max_length=20)),
('Entrez_id', models.IntegerField()),
('Gene_name', models.TextField(blank=True, default='')),
],
),
migrations.DeleteModel(
name='ProbeID_GeneSymbol',
),
]
| mit | Python | |
96d89e4398a68aab8df033847a1cf8fa47cef5dc | Create find-duplicate-file-in-system.py | yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,jaredkoontz/leetcode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,jaredkoontz/leetcode,yiwen-luo/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,jaredkoontz/leetcode | Python/find-duplicate-file-in-system.py | Python/find-duplicate-file-in-system.py | # Time: O(n * l), l is the average length of file content
# Space: O(n * l)
# Given a list of directory info including directory path,
# and all the files with contents in this directory,
# you need to find out all the groups of duplicate files
# in the file system in terms of their paths.
#
# A group of duplicate files consists of at least two files that have exactly the same content.
#
# A single directory info string in the input list has the following format:
#
# "root/d1/d2/.../dm f1.txt(f1_content) f2.txt(f2_content) ... fn.txt(fn_content)"
#
# It means there are n files (f1.txt, f2.txt ... fn.txt
# with content f1_content, f2_content ... fn_content, respectively) in
# directory root/d1/d2/.../dm. Note that n >= 1 and m >= 0. If m = 0, it means the directory is just the root directory.
#
# The output is a list of group of duplicate file paths. For each group,
# it contains all the file paths of the files that have the same content.
# A file path is a string that has the following format:
#
# "directory_path/file_name.txt"
#
# Example 1:
# Input:
# ["root/a 1.txt(abcd) 2.txt(efgh)", "root/c 3.txt(abcd)", "root/c/d 4.txt(efgh)", "root 4.txt(efgh)"]
# Output:
# [["root/a/2.txt","root/c/d/4.txt","root/4.txt"],["root/a/1.txt","root/c/3.txt"]]
# Note:
# No order is required for the final output.
# You may assume the directory name, file name and file content only has letters
# and digits, and the length of file content is in the range of [1,50].
#
# The number of files given is in the range of [1,20000].
# You may assume no files or directories share the same name in the same directory.
# You may assume each given directory info represents a unique directory.
# Directory path and file info are separated by a single blank space.
#
# Follow-up beyond contest:
# 1. Imagine you are given a real file system, how will you search files? DFS or BFS?
# 2. If the file content is very large (GB level), how will you modify your solution?
# 3. If you can only read the file by 1kb each time, how will you modify your solution?
# 4. What is the time complexity of your modified solution?
# What is the most time-consuming part and memory consuming part of it? How to optimize?
# 5. How to make sure the duplicated files you find are not false positive?
class Solution(object):
def findDuplicate(self, paths):
"""
:type paths: List[str]
:rtype: List[List[str]]
"""
files = collections.defaultdict(list)
for path in paths:
s = path.split(" ")
for i in xrange(1,len(s)):
file_name = s[0] + "/" + s[i][0:s[i].find("(")]
file_content = s[i][s[i].find("(")+1:s[i].find(")")]
files[file_content].append(file_name)
result = []
for file_content, file_names in files.iteritems():
if len(file_names)>1:
result.append(file_names)
return result
| mit | Python | |
2a6d8bc208463a0f903a2b62021abb913ab510c5 | Add TestSuiteHandler. | joyxu/kernelci-backend,kernelci/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend | app/handlers/test_suite.py | app/handlers/test_suite.py | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""The RequestHandler for /test/suite URLs."""
import bson
import datetime
import types
import handlers.base as hbase
import handlers.common as hcommon
import handlers.response as hresponse
import models
import models.test_suite as mtsuite
import utils.db
class TestSuiteHandler(hbase.BaseHandler):
def __init__(self, application, request, **kwargs):
super(TestSuiteHandler, self).__init__(application, request, **kwargs)
@property
def collection(self):
return models.TEST_SUITE_COLLECTION
@staticmethod
def _valid_keys(method):
return hcommon.TEST_SUITE_VALID_KEYS.get(method, None)
@staticmethod
def _token_validation_func():
return hcommon.valid_token_tests
def _post(self, *args, **kwargs):
response = hresponse.HandlerResponse()
suite_id = kwargs.get("id", None)
if suite_id:
response.status_code = 400
response.reason = "To update a test suite, use a PUT request"
else:
test_suite_json = kwargs.get("json_obj", None)
suite_pop = test_suite_json.pop
test_set = suite_pop(models.TEST_SET_KEY, [])
test_case = suite_pop(models.TEST_CASE_KEY, [])
test_suite = mtsuite.TestSuiteDocument.from_json(test_suite_json)
test_suite.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
ret_val, doc_id = utils.db.save(self.db, test_suite)
if ret_val == 201:
response.status_code = ret_val
response.reason = (
"Test suite '%s' created with ID: %s" %
(test_suite.name, doc_id))
if all([test_set, isinstance(test_set, types.ListType)]):
response.status_code = 202
response.messages = (
"Associated test sets will be parsed and imported")
# TODO: import async the test sets.
pass
if all([test_case, isinstance(test_case, types.ListType)]):
response.status_code = 202
response.messages = (
"Associated test cases will be parsed and imported")
# TODO: import async the test cases.
pass
else:
response.status_code = 500
response.reason = (
"Error saving test set '%s'" % test_suite.name)
return response
| agpl-3.0 | Python | |
d1ce76f33bdf93ee631f3ee38b565e79a1f38343 | add a demo of building a custom python wrapper using libgvc | tkelman/graphviz,pixelglow/graphviz,tkelman/graphviz,MjAbuz/graphviz,pixelglow/graphviz,ellson/graphviz,BMJHayward/graphviz,kbrock/graphviz,BMJHayward/graphviz,pixelglow/graphviz,jho1965us/graphviz,jho1965us/graphviz,ellson/graphviz,kbrock/graphviz,BMJHayward/graphviz,tkelman/graphviz,BMJHayward/graphviz,BMJHayward/graphviz,tkelman/graphviz,MjAbuz/graphviz,kbrock/graphviz,kbrock/graphviz,pixelglow/graphviz,tkelman/graphviz,MjAbuz/graphviz,tkelman/graphviz,jho1965us/graphviz,pixelglow/graphviz,MjAbuz/graphviz,pixelglow/graphviz,tkelman/graphviz,ellson/graphviz,BMJHayward/graphviz,jho1965us/graphviz,jho1965us/graphviz,pixelglow/graphviz,MjAbuz/graphviz,jho1965us/graphviz,pixelglow/graphviz,kbrock/graphviz,jho1965us/graphviz,ellson/graphviz,BMJHayward/graphviz,MjAbuz/graphviz,kbrock/graphviz,ellson/graphviz,ellson/graphviz,MjAbuz/graphviz,tkelman/graphviz,ellson/graphviz,jho1965us/graphviz,BMJHayward/graphviz,tkelman/graphviz,tkelman/graphviz,kbrock/graphviz,BMJHayward/graphviz,tkelman/graphviz,kbrock/graphviz,BMJHayward/graphviz,MjAbuz/graphviz,ellson/graphviz,jho1965us/graphviz,ellson/graphviz,pixelglow/graphviz,kbrock/graphviz,kbrock/graphviz,kbrock/graphviz,MjAbuz/graphviz,MjAbuz/graphviz,jho1965us/graphviz,jho1965us/graphviz,BMJHayward/graphviz,ellson/graphviz,pixelglow/graphviz,pixelglow/graphviz,MjAbuz/graphviz,ellson/graphviz | dot.demo/gv_test.py | dot.demo/gv_test.py | #!/usr/bin/python
import gv
g = gv.digraph("G")
n = gv.node(g,"hello")
m = gv.node(g,"world")
e = gv.edge(n,m)
gv.layout(g, "dot")
gv.render(g, "png", "gv_test.png")
gv.rm(g)
| epl-1.0 | Python | |
d9b4607819d985dee086ff4f435972d304e3a45e | update Site admin | zdw/xos,jermowery/xos,wathsalav/xos,xmaruto/mcord,cboling/xos,xmaruto/mcord,xmaruto/mcord,xmaruto/mcord,cboling/xos,wathsalav/xos,opencord/xos,wathsalav/xos,cboling/xos,open-cloud/xos,open-cloud/xos,jermowery/xos,wathsalav/xos,opencord/xos,jermowery/xos,cboling/xos,zdw/xos,open-cloud/xos,cboling/xos,jermowery/xos,zdw/xos,opencord/xos,zdw/xos | plstackapi/core/models/site.py | plstackapi/core/models/site.py | import os
from django.db import models
from plstackapi.core.models import PlCoreBase
from plstackapi.core.models import DeploymentNetwork
from plstackapi.openstack.driver import OpenStackDriver
# Create your models here.
class Site(PlCoreBase):
tenant_id = models.CharField(max_length=200, help_text="Keystone tenant id", blank=True)
name = models.CharField(max_length=200, help_text="Name for this Site")
site_url = models.URLField(null=True, blank=True, max_length=512, help_text="Site's Home URL Page")
enabled = models.BooleanField(default=True, help_text="Status for this Site")
longitude = models.FloatField(null=True, blank=True)
latitude = models.FloatField(null=True, blank=True)
login_base = models.CharField(max_length=50, unique=True, help_text="Prefix for Slices associated with this Site")
is_public = models.BooleanField(default=True, help_text="Indicates the visibility of this site to other members")
abbreviated_name = models.CharField(max_length=80)
deployments = models.ManyToManyField(DeploymentNetwork, blank=True, related_name='sites')
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
driver = OpenStackDriver()
if not self.tenant_id:
tenant = driver.create_tenant(tenant_name=self.login_base,
description=self.name,
enabled=self.enabled)
self.tenant_id = tenant.id
# update the record
if self.id:
driver.update_tenant(self.tenant_id,
name=self.login_base,
description=self.name,
enabled=self.enabled)
super(Site, self).save(*args, **kwds)
def delete(self, *args, **kwds):
driver = OpenStackDriver()
if self.tenant_id:
driver.delete_tenant(self.tenant_id)
super(Site, self).delete(*args, **kwds)
class SitePrivilege(PlCoreBase):
user = models.ForeignKey('User', related_name='site_privileges')
site = models.ForeignKey('Site', related_name='site_privileges')
role = models.ForeignKey('Role')
def __unicode__(self): return u'%s %s %s' % (self.site, self.user, self.role)
| import os
from django.db import models
from plstackapi.core.models import PlCoreBase
from plstackapi.core.models import DeploymentNetwork
from plstackapi.openstack.driver import OpenStackDriver
# Create your models here.
class Site(PlCoreBase):
tenant_id = models.CharField(max_length=200, help_text="Keystone tenant id")
name = models.CharField(max_length=200, help_text="Name for this Site")
site_url = models.URLField(null=True, blank=True, max_length=512, help_text="Site's Home URL Page")
enabled = models.BooleanField(default=True, help_text="Status for this Site")
longitude = models.FloatField(null=True, blank=True)
latitude = models.FloatField(null=True, blank=True)
login_base = models.CharField(max_length=50, unique=True, help_text="Prefix for Slices associated with this Site")
is_public = models.BooleanField(default=True, help_text="Indicates the visibility of this site to other members")
abbreviated_name = models.CharField(max_length=80)
deployments = models.ManyToManyField(DeploymentNetwork, blank=True, related_name='sites')
def __unicode__(self): return u'%s' % (self.name)
class SitePrivilege(PlCoreBase):
user = models.ForeignKey('User', related_name='site_privileges')
site = models.ForeignKey('Site', related_name='site_privileges')
role = models.ForeignKey('Role')
def __unicode__(self): return u'%s %s %s' % (self.site, self.user, self.role)
| apache-2.0 | Python |
42192dad06079a654945a9a53dcdc548aa7085c4 | Create __init__.py | Timothylock/league-carnage-notifier-Raspberry-Pi,Timothylock/league-carnage-notifier-Raspberry-Pi | modules/datasave/__init__.py | modules/datasave/__init__.py | apache-2.0 | Python | ||
ad72e7cdc02d746ec56dfd85ed5a46de59e57684 | add the pocket pla machine laerning algorithm | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | machine_learning/python/pocket_pla.py | machine_learning/python/pocket_pla.py | import random
import numpy as np
FEATURE = 5
def sign(num):
if np.sign(num) <= 0:
return -1.0
return 1.0
def pla(X,y,m):
''' improved (pocket) perceptron learning algorithm
Arguments:
X {list or numpy array}
y {list or numpy array} -- target
m {integer} -- the size of training dataset
Returns:
list or numpy array -- updated weight
'''
random.seed()
flag = True
pw = np.zeros((1,FEATURE+1))
random_set = list(range(m))
random.shuffle(random_set)
iteration = 0
while flag:
flag = False
for n in random_set:
w = np.copy(pw + X[n]*y[n])
total = 0
correct = 0
error = 0
for i in range(m):
total += pw*X[i].T
correct += w*X[i].T
if y[i] != sign(total.sum()):
error += 1
if y[i] != sign(correct.sum()):
error -= 1
if error < 0:
pw = np.copy(w)
iteration+=1
return pw
| cc0-1.0 | Python | |
4fe62ac1211e68f1d9c656453bdf71d6849c3daf | Add organisation values for the Enterprise Europe Network. | alphagov/notifications-api,alphagov/notifications-api | migrations/versions/0101_een_logo.py | migrations/versions/0101_een_logo.py | """empty message
Revision ID: 0101_een_logo
Revises: 0100_notification_created_by
Create Date: 2017-06-26 11:43:30.374723
"""
from alembic import op
revision = '0101_een_logo'
down_revision = '0100_notification_created_by'
ENTERPRISE_EUROPE_NETWORK_ID = '89ce468b-fb29-4d5d-bd3f-d468fb6f7c36'
def upgrade():
op.execute("""INSERT INTO organisation VALUES (
'{}',
'',
'een_x2.png',
'Enterprise Europe Network'
)""".format(ENTERPRISE_EUROPE_NETWORK_ID))
def downgrade():
op.execute("""
DELETE FROM organisation WHERE "id" = '{}'
""".format(ENTERPRISE_EUROPE_NETWORK_ID))
| mit | Python | |
eff52ac7e0177cdf79b4fe4f076bf411a4b2b54b | Add migration file | cgwire/zou | migrations/versions/bf1347acdee2_.py | migrations/versions/bf1347acdee2_.py | """empty message
Revision ID: bf1347acdee2
Revises: b4dd0add5f79
Create Date: 2018-03-23 17:08:11.289953
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bf1347acdee2'
down_revision = 'b4dd0add5f79'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_entity', sa.String(length=10), nullable=True))
op.drop_constraint('task_type_uc', 'task_type', type_='unique')
op.create_unique_constraint('task_type_uc', 'task_type', ['name', 'for_entity', 'department_id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('task_type_uc', 'task_type', type_='unique')
op.create_unique_constraint('task_type_uc', 'task_type', ['name', 'department_id'])
op.drop_column('task_type', 'for_entity')
# ### end Alembic commands ###
| agpl-3.0 | Python | |
845615f2a34c5680ed22a2f4eafa5febe7cd7246 | Add date updated to Owner | CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords,CityOfNewYork/NYCOpenRecords | alembic/versions/20087beff9ea_added_date_updated_t.py | alembic/versions/20087beff9ea_added_date_updated_t.py | """Added date updated to Owner
Revision ID: 20087beff9ea
Revises: 2dc72d16c188
Create Date: 2014-03-09 01:43:00.648013
"""
# revision identifiers, used by Alembic.
revision = '20087beff9ea'
down_revision = '2dc72d16c188'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('owner', sa.Column('date_updated', sa.DateTime()))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('owner', 'date_updated')
### end Alembic commands ###
| apache-2.0 | Python | |
b0480b55ea19bc661091924fb21b1b9b95b54200 | Create NoDuplicateLetters-Hard.py | JLJTECH/TutorialTesting | Edabit/NoDuplicateLetters-Hard.py | Edabit/NoDuplicateLetters-Hard.py | #!/usr/bin/env python3
'''
Given a common phrase, return False if any individual word in the phrase contains duplicate letters.
Return True otherwise.
'''
def no_duplicate_letters(phrase):
val = [phrase]
nlst = ' '.join(val).split()
st = [len(i) for i in nlst]
ev = [len(set(i)) for i in nlst]
return st == ev
#Alternative solutions
def no_duplicate_letters(phrase):
return all(i.count(j)==1 for i in phrase.split() for j in i)
def no_duplicate_letters(phrase):
return all([len(set(i))==len(i) for i in phrase.split(' ')])
| mit | Python | |
d62b0e231177edbd60730470c5213dbff3bfa11c | Implement the PRIVMSG and NOTICE commands | DesertBus/txircd,ElementalAlchemist/txircd,Heufneutje/txircd | txircd/modules/cmd_privmsg_notice.py | txircd/modules/cmd_privmsg_notice.py | from twisted.words.protocols import irc
from txircd.modbase import Command
class MessageCommand(object):
def __init__(self, ircd):
self.ircd = ircd
def onUse(self, cmd, user, data):
if ("targetchan" not in data or not data["targetchan"]) and ("targetuser" not in data or not data["targetuser"]):
return
if "message" not in data or not data["message"]:
user.sendMessage(irc.ERR_NOTEXTTOSEND, ":No text to send")
return
targetChans = data["targetchan"]
targetUsers = data["targetuser"]
channelModifiers = data["chanmod"]
message = data["message"]
for index, channel in data["targetchan"].enumerate():
if channelModifiers[index]:
prefixLevel = self.prefixes[self.prefix_symbols[channelModifiers[index]]][0]
for u in channels.users.itervalues():
if u.channels[channel.name]["status"] and self.prefixes[u.channels[channel.name]["status"][0]][0] >= prefixLevel:
u.sendMessage(cmd, message, to="{}{}".format(channelModifiers[index], channel.name), prefix=user.prefix())
else:
for u in channel.users.itervalues():
u.sendMessage(cmd, message, to=channel.name, prefix=user.prefix())
def processParams(self, cmd, user, params):
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, cmd, ":Not enough parameters")
return {}
if len(params) < 2:
user.sendMessage(irc.ERR_NOTEXTTOSEND, ":No text to send")
return {}
targetChans = []
targetUsers = []
targetChanModifiers = []
for target in params[0].split(","):
if target in self.ircd.users:
targetUsers.append(self.ircd.users[target])
elif target in self.ircd.channels:
targetChans.append(self.ircd.channels[target])
targetChanModifiers.append("")
elif target[0] in self.ircd.prefix_symbols and target[1:] in self.ircd.channels:
targetChans.append(self.ircd.channels[target[1:]])
targetChanModifiers.append(target[0])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, target, ":No such nick/channel")
return {
"user": user,
"targetchan": targetChans,
"chanmod": targetChanModifiers,
"targetuser": targetUsers,
"message": params[1]
}
class PrivMsgCommand(Command):
def __init__(self, msgHandler):
self.msg_handler = msgHandler
def onUse(self, user, data):
self.msg_handler.onUse("PRIVMSG", user, data)
def processParams(self, user, params):
return self.msg_handler.processParams("PRIVMSG", user, params)
class NoticeCommand(Command):
def __init__(self, msgHandler):
self.msg_handler = msgHandler
def onUse(self, user, data):
self.msg_handler.onUse("NOTICE", user, data)
def processParams(self, user, params):
return self.msg_handler.processParams("NOTICE", user, params)
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
messageHandler = MessageCommand(self.ircd)
return {
"commands": {
"PRIVMSG": PrivMsgCommand(messageHandler),
"NOTICE": NoticeCommand(messageHandler)
}
}
def cleanup():
del self.ircd.commands["PRIVMSG"]
del self.ircd.commands["NOTICE"] | bsd-3-clause | Python | |
4745a6e80b978d6310ab59c8ac3f0ce562dd7aa8 | Add module to compile with nuitka | NitorCreations/nitor-deploy-tools,NitorCreations/nitor-deploy-tools,NitorCreations/nitor-deploy-tools | n_utils/nitor-dt-load-project-env.py | n_utils/nitor-dt-load-project-env.py | from n_utils.project_util import load_project_env
load_project_env() | apache-2.0 | Python | |
68b865c120e49c8a64cdeb010893ca6e6c0de32f | Create webapp.py | sky-adams/Practice-for-DSW,sky-adams/Practice-for-DSW,sky-adams/Practice-for-DSW | webapp.py | webapp.py | from flask import Flask
app = Flask(__name__) #__name__ = "__main__" if this is the file that was run. Otherwise, it is the name of the file (ex. webapp)
@app.route("/")
def render_main():
return url_for('static', filename='home.html')
if __name__=="__main__":
app.run(debug=False, port=54321)
| mit | Python | |
a2386d80fbbff2cd3ab3814cd850a8471280dde8 | Check if user.token attribute exists | noironetworks/horizon,NeCTAR-RC/horizon,NeCTAR-RC/horizon,BiznetGIO/horizon,NeCTAR-RC/horizon,NeCTAR-RC/horizon,openstack/horizon,ChameleonCloud/horizon,ChameleonCloud/horizon,yeming233/horizon,BiznetGIO/horizon,BiznetGIO/horizon,openstack/horizon,BiznetGIO/horizon,yeming233/horizon,openstack/horizon,yeming233/horizon,noironetworks/horizon,noironetworks/horizon,ChameleonCloud/horizon,noironetworks/horizon,yeming233/horizon,ChameleonCloud/horizon,openstack/horizon | openstack_dashboard/views.py | openstack_dashboard/views.py | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django import shortcuts
import django.views.decorators.vary
import horizon
from horizon import base
from horizon import exceptions
from horizon import notifications
MESSAGES_PATH = getattr(settings, 'MESSAGES_PATH', None)
def get_user_home(user):
try:
token = user.token
except AttributeError:
raise exceptions.NotAuthenticated()
# Domain Admin, Project Admin will default to identity
if token.project.get('id') is None or user.is_superuser:
try:
dashboard = horizon.get_dashboard('identity')
except base.NotRegistered:
pass
else:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@django.views.decorators.vary.vary_on_cookie
def splash(request):
if not request.user.is_authenticated():
raise exceptions.NotAuthenticated()
response = shortcuts.redirect(horizon.get_user_home(request.user))
if 'logout_reason' in request.COOKIES:
response.delete_cookie('logout_reason')
# Display Message of the Day message from the message files
# located in MESSAGES_PATH
if MESSAGES_PATH:
notifications.process_message_notification(request, MESSAGES_PATH)
return response
| # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django import shortcuts
import django.views.decorators.vary
import horizon
from horizon import base
from horizon import exceptions
from horizon import notifications
MESSAGES_PATH = getattr(settings, 'MESSAGES_PATH', None)
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
# Domain Admin, Project Admin will default to identity
if (user.token.project.get('id') is None or
(user.is_superuser and user.token.project.get('id'))):
dashboard = horizon.get_dashboard('identity')
return dashboard.get_absolute_url()
@django.views.decorators.vary.vary_on_cookie
def splash(request):
if not request.user.is_authenticated():
raise exceptions.NotAuthenticated()
response = shortcuts.redirect(horizon.get_user_home(request.user))
if 'logout_reason' in request.COOKIES:
response.delete_cookie('logout_reason')
# Display Message of the Day message from the message files
# located in MESSAGES_PATH
if MESSAGES_PATH:
notifications.process_message_notification(request, MESSAGES_PATH)
return response
| apache-2.0 | Python |
256bd96aabbb4b12972e070e11bfbd95248c3f29 | add new recipe | tonibagur/kivy-ios,tonibagur/kivy-ios,kivy/kivy-ios,cbenhagen/kivy-ios,kivy/kivy-ios,cbenhagen/kivy-ios,rnixx/kivy-ios,rnixx/kivy-ios,kivy/kivy-ios | recipes/sdl2_mixer/__init__.py | recipes/sdl2_mixer/__init__.py | from toolchain import Recipe, shprint
import sh
class LibSDL2MixerRecipe(Recipe):
version = "2.0.0"
url = "http://www.libsdl.org/projects/SDL_mixer/release/SDL2_mixer-{version}.tar.gz"
library = "Xcode-iOS/build/Release-{arch.sdk}/libSDL2_mixer.a"
include_dir = "SDL_mixer.h"
depends = ["sdl2"]
def build_arch(self, arch):
shprint(sh.xcodebuild,
"ONLY_ACTIVE_ARCH=NO",
"ARCHS={}".format(arch.arch),
"HEADER_SEARCH_PATHS=$HEADER_SEARCH_PATHS {}".format(" ".join(arch.include_dirs)),
"-sdk", arch.sdk,
"-project", "Xcode-iOS/SDL_mixer.xcodeproj",
"-target", "Static Library",
"-configuration", "Release")
recipe = LibSDL2MixerRecipe()
| mit | Python | |
0c9dcb8c23be8685bfe32a465f8e1b3023c95934 | Create manager_D3S.py | cllamb0/dosenet-raspberrypi,cllamb0/dosenet-raspberrypi,yarocoder/dosenet-raspberrypi,tybtab/dosenet-raspberrypi,yarocoder/dosenet-raspberrypi,bearing/dosenet-raspberrypi,tybtab/dosenet-raspberrypi,bearing/dosenet-raspberrypi | manager_D3S.py | manager_D3S.py | #!/usr/bin/env python
import argparse
import kromek
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--transport', '-t', dest='transport', default='any')
parser.add_argument('--interval', '-i', dest='interval', default=1)
parser.add_argument('--count', '-c', dest='count', default=0)
parser.add_argument('--device', '-d', dest='device', default='all')
parser.add_argument('--log-bytes', '-b', dest='log_bytes', default=False, action='store_true')
args = parser.parse_args()
interval = int(args.interval)
count = int(args.count)
if args.transport == 'any':
devs = kromek.discover()
else:
devs = kromek.discover(args.transport)
print 'Discovered %s' % devs
if len(devs) <= 0:
return
filtered = []
for dev in devs:
if args.device == 'all' or dev[0] in args.device:
filtered.append(dev)
devs = filtered
if len(devs) <= 0:
return
done_devices = set()
with kromek.Controller(devs, interval) as controller:
for reading in controller.read():
serial = reading[0]
dev_count = reading[1]
if serial not in done_devices:
print reading
if dev_count >= count > 0:
done_devices.add(serial)
controller.stop_collector(serial)
if len(done_devices) >= len(devs):
break
if __name__ == '__main__':
main()
| mit | Python | |
dc9449768dd93ec29a031b31c3a975c2db797252 | Create test_Deter_as_Server_and_Play_Audio.py | nksheridan/elephantAI,nksheridan/elephantAI | test_Deter_as_Server_and_Play_Audio.py | test_Deter_as_Server_and_Play_Audio.py | # DETER DEVICE
# this is test code for putting the deter device into server mode, and getting a message via bluetooth from the detection device, and
# then going ahead and playing scare sounds. You need to determine your MAC address. It is for the server in this case, so the MAC address
# of the deter device. You also need to pair the deter device with the detection device via Bluetooth prior to using this. You can do
# that from the Bluetooth icon in the Raspian GUI.
import socket
import time
import os
import random
hostMACaddress = 'xxx'
port = 9
backlog = 1
size = 1024
s = socket.socket(socket.AF_BLUETOOTH, socket.SOCK_STREAM, socket.BTPROTO_RFCOMM)
s.bind((hostMACaddress, port))
s.listen(backlog)
print("We are waiting for a message from the detection device to arrive via bluetooth!")
try:
client, address = s.accept()
data = client.recv(size)
if data:
print(data)
client.send(data)
#echo back
except:
print("closing the socket")
client.close()
s.close()
message = str(data)
#convert the data received to a string
print(message)
if message == "b'yes_audio'":
print("play scare sounds now")
time.sleep(3)
scare_sounds = ['aplay bees1.wav', 'aplay bees2.wav', aplay bees3.wav']
i = 0
while i <10:
i = i+1
to_play = random.choice(scare_sounds)
print(to_play)
os.system(to_play)
print("Finished scare. Now can message detection device, and await another message from it")
| mit | Python | |
7735ea4909d30a6592b3bcbcf2752fb3fba8ebb7 | Add Depth-First Search | XeryusTC/search | dfs.py | dfs.py | # -*- coding: utf-8 -*-
import draw
import grid
import util
def dfs(g, start, goal):
return dfs_rec(g, start, goal, [start])
def dfs_rec(g, pos, goal, path):
if pos == goal:
return path
if len(path) > 50:
return None
for n in g.neighbours(*pos):
if n in path:
continue # skip already visited neighbours
p = dfs_rec(g, n, goal, path + [n])
if p != None:
return p
return None
if __name__ == '__main__':
g, start, goal = util.generate_problem(16, 16, 0.2)
print('Start:', start, 'goal:', goal)
path = dfs(g, start, goal)
print('Found length vs heuristic:', len(path), grid.dist(start, goal))
draw.draw_path(draw.draw_grid(g), path).show()
| mit | Python | |
58d1011b2758f537a3539321bea64cf1c77f39fe | Add spider for Au Bon Pain; closes #783 | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/aubonpain.py | locations/spiders/aubonpain.py | import scrapy
import re
import json
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
class AuBonPainSpider(scrapy.Spider):
name = "aubonpain"
download_delay = 0.5
allowed_domains = [
"www.aubonpain.com",
]
start_urls = (
'https://www.aubonpain.com/stores/all-stores',
)
def parse_hours(self, items):
opening_hours = OpeningHours()
for day in items:
open_time = day["Open"]
close_time = day["Close"]
if close_time == 'Closed' or open_time == 'Closed':
continue
elif close_time == 'Open 24 Hrs' or open_time == 'Open 24 Hrs':
open_time = '12:00 AM'
close_time = '12:00 AM'
elif close_time == 'Open for Special Events':
continue
opening_hours.add_range(day=day["Day"][:2],
open_time=open_time,
close_time=close_time,
time_format='%I:%M %p')
return opening_hours.as_opening_hours()
def parse_store(self, response):
ref = re.findall(r"[^(\/)]+$", response.url)[0]
scripts = "".join(response.xpath('//script/text()').extract())
lat, lon = re.search(r'.*Microsoft.Maps.Location\(([0-9.-]*),\s+([0-9-.]*)\).*', scripts).groups()
address1, address2 = response.xpath('//dt[contains(text(), "Address")]/following-sibling::dd/text()').extract()
city, state, zipcode = re.search(r'^(.*),\s+([a-z]{2})\s+([0-9]+)$', address2.strip(), re.IGNORECASE).groups()
properties = {
'addr_full': address1.strip(', '),
'phone': response.xpath('//dt[contains(text(), "Phone")]/following-sibling::dd/a/text()').extract_first(),
'city': city,
'state': state,
'postcode': zipcode,
'ref': ref,
'website': response.url,
'lat': float(lat),
'lon': float(lon),
}
hours = json.loads(re.search(r'.*var\shours\s*=\s*(.*?);.*', scripts).groups()[0])
hours = self.parse_hours(hours)
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse(self, response):
urls = response.xpath('//section/div/div//a[contains(@href, "stores")]/@href').extract()
for url in urls:
url = url.replace('\r\n', '')
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
| mit | Python | |
ab7006cd6a174273c026fda15e3471b9571ed0a6 | Add some simple tests | remik/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,oliciv/django-page-cms,pombredanne/django-page-cms-1,remik/django-page-cms,akaihola/django-page-cms,pombredanne/django-page-cms-1,akaihola/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,batiste/django-page-cms,batiste/django-page-cms,oliciv/django-page-cms | pages/tests.py | pages/tests.py | from django.test import TestCase
from pages.models import *
class PagesTestCase(TestCase):
fixtures = ['tests']
def test_01_managers(self):
"""Check the managers"""
pages = Page.published.all()
for p in pages:
self.assertEqual(p.status, 1)
pages = Page.drafts.all()
for p in pages:
self.assertEqual(p.status, 0)
def test_02_template_inheritance(self):
"""Check if the template inheritance is working right"""
for p in Page.objects.all():
if p.template == '' and not p.get_template() == None:
#there is a parent with a template, go find it
parent = p.parent
while True:
if parent.template:
break
parent = p.parent
#the page must have inherited of it's parent template
self.assertEqual(parent.template, p.get_template())
def test_03_content(self):
"""Check if there is always one content a least by page"""
for p in Page.objects.all():
for l in Language.objects.all():
if Content.get_content(p, l, 0) == None:
# if there is no content in this, there must at least have something
# in another one
self.assertNotEqual(Content.get_content(p, l, 0, True), None) | bsd-3-clause | Python | |
414c0a799f7fa1441150d6caae8447a135b9443a | Create sentiment-analysis.py | COMP90024CloudComputing/TwitterHarvest,COMP90024CloudComputing/TwitterHarvest,COMP90024CloudComputing/TwitterHarvest | data-analysis/sentiment-Google-API/sentiment-analysis.py | data-analysis/sentiment-Google-API/sentiment-analysis.py | import couchdb
from google.cloud import language
language_client = language.Client()
try:
couch = couchdb.Server('http://localhost:15984/')
print("Connect to local db")
except:
print ("Cannot find CouchDB Server ... Exiting\n")
print ("----_Stack Trace_-----\n")
raise
db = couch['ten']
print ("Connecting to db: ten")
for each in db:
try:
text = db[each].get('text')
document = language_client.document_from_text(text)
sentiment = document.analyze_sentiment().sentiment
score = sentiment.score
print text +' ------------ '+ str(score)
except Exception as e:
print '******************'
print Exception.message
print text
print '******************'
| apache-2.0 | Python | |
2859ca788a07338d5bf22a697669358e1f027382 | Add dataset_generation. | ProjetPP/PPP-QuestionParsing-ML-Standalone,ProjetPP/PPP-QuestionParsing-ML-Standalone | dataset_generation.py | dataset_generation.py | import itertools
person = [
"Obama",
"Barack Obama",
"Barack Hussein Obama",
"Lennon",
"John Lennon",
"Saint-Exupéry",
"Antoine de Saint-Exupéry",
"Antoine Marie Jean-Baptiste Roger de Saint-Exupéry"
]
country = [
"United States",
"United States of America",
"USA",
"US",
"U.S.A.",
"U.S.",
"United Kingdom",
"UK",
"U.K.",
"France",
"North Korea",
"Saudi Arabia",
"New Zealand"
]
city = [
"Paris",
"New York",
"San Francisco",
"Los Angeles",
"Ho Chi Minh City",
"New Delhi"
]
location = country+city+[
"South America",
"Middle East",
"Africa",
"Pacific Ocean",
"Mediterranean Sea",
"America",
"Europe",
"Asia",
"Oceania"
]
book = [
"A Tale of Two Cities",
"The Lord of the Rings",
"Le Petit Prince",
"Harry Potter and the Philosopher's Stone",
"And Then There Were None",
"Dream of the Red Chamber",
"The Hobbit",
"She: A History of Adventure"
]
film = [
"Avatar",
"Titanic",
"The Avengers",
"Harry Potter and the Deathly Hallows",
"Frozen",
"Iron Man 3",
"Transformers: Dark of the Moon",
"The Lord of the Rings: The Return of the King"
]
single = [
"White Christmas",
"I Will Always Love You",
"We Are the World",
"Da Da Da",
"Hey Jude",
"Bohemian Rhapsody"
]
art = book+film+single
def print_data(subject,predicate):
triple = "\n{0} | {1} | _".format(subject," ".join(predicate))
for perm in itertools.permutations(predicate):
for i in range(0,len(perm)+1):
sentence = perm[0:i]+(subject,)+perm[i:len(perm)]
print(" ".join(sentence+(triple,)) + "\n")
print((" ".join(sentence+(triple,)) + "\n").lower())
def print_person():
for p in person:
for ev in {"death","birth"}:
for obj in {"place","date"}:
print_data(p,[obj,ev])
def print_country():
for c in country:
print_data(c,["president"])
print_data(c,["prime", "minister"])
def print_city():
for c in city:
print_data(c,["mayor"])
def print_location():
for l in location:
print_data(l,["population"])
def print_film():
for f in film:
print_data(f,["cast","member"])
print_data(f,["director"])
def print_book():
for b in book:
print_data(b,["original","language"])
print_data(b,["author"])
def print_single():
for s in single:
print_data(s,["record","label"])
def print_art():
for a in art:
print_data(a,["official","website"])
print_data(a,["date","publication"])
def print_all():
print_person()
print_country()
print_city()
print_location()
print_film()
print_book()
print_single()
print_art()
print_all()
| mit | Python | |
0781d105e4182bdd8abf1a8c7185311a48273c28 | Add imgadm beacons for SmartOS | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/beacons/smartos_imgadm.py | salt/beacons/smartos_imgadm.py | # -*- coding: utf-8 -*-
'''
Beacon that fires events on image import/delete.
.. code-block:: yaml
## minimal
# - check for new images every 1 second (salt default)
# - does not send events at startup
beacons:
imgadm: []
## standard
# - check for new images every 60 seconds
# - send import events at startup for all images
beacons:
imgadm:
- interval: 60
- startup_import_event: True
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals
import logging
# Import 3rd-party libs
# pylint: disable=import-error
from salt.ext.six.moves import map
# pylint: enable=import-error
__virtualname__ = 'imgadm'
IMGADM_STATE = {
'first_run': True,
'images': [],
}
log = logging.getLogger(__name__)
def __virtual__():
'''
Provides imgadm beacon on SmartOS
'''
if 'imgadm.list' in __salt__:
return True
else:
return (
False,
'{0} beacon can only be loaded on SmartOS compute nodes'.format(
__virtualname__
)
)
def validate(config):
'''
Validate the beacon configuration
'''
vcfg_ret = True
vcfg_msg = 'Valid beacon configuration'
if not isinstance(config, list):
vcfg_ret = False
vcfg_msg = 'Configuration for imgadm beacon must be a list!'
return vcfg_ret, vcfg_msg
def beacon(config):
'''
Poll imgadm and compare available images
'''
ret = []
# NOTE: lookup current images
current_images = __salt__['imgadm.list'](verbose=True)
# NOTE: apply configuration
if IMGADM_STATE['first_run']:
log.info('Applying configuration for imgadm beacon')
_config = {}
list(map(_config.update, config))
if 'startup_import_event' not in _config or not _config['startup_import_event']:
IMGADM_STATE['images'] = current_images
# NOTE: import events
for uuid in current_images:
event = {}
if uuid not in IMGADM_STATE['images']:
event['tag'] = "imported/{}".format(uuid)
for label in current_images[uuid]:
event[label] = current_images[uuid][label]
if event:
ret.append(event)
# NOTE: delete events
for uuid in IMGADM_STATE['images']:
event = {}
if uuid not in current_images:
event['tag'] = "deleted/{}".format(uuid)
for label in IMGADM_STATE['images'][uuid]:
event[label] = IMGADM_STATE['images'][uuid][label]
if event:
ret.append(event)
# NOTE: update stored state
IMGADM_STATE['images'] = current_images
# NOTE: disable first_run
if IMGADM_STATE['first_run']:
IMGADM_STATE['first_run'] = False
return ret
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
| apache-2.0 | Python | |
8b63dc73b4e3303d1b86faf42f635f3ce01e9da4 | Create helper script providing multiprocessing support. | kubkon/des-in-python | run.py | run.py | #!/usr/bin/env python
# encoding: utf-8
import argparse
import subprocess as sub
### Parse command line arguments
parser = argparse.ArgumentParser(description="M/M/1 queue simulation -- Helper script")
parser.add_argument('reps', metavar='repetitions',
type=int, help='number of repetitions')
parser.add_argument('sim_duration', metavar='simulation_duration',
type=int, help='duration of each simulation stage in seconds')
parser.add_argument('int_rate', metavar='interarrival_rate',
type=int, help='mean packet interarrival rate in seconds')
parser.add_argument('sr_rate', metavar='service_rate',
type=int, help='mean packet service rate in seconds')
parser.add_argument('--batch_size', dest='batch_size', default=4,
type=int, help='batch size for multiprocessing')
parser.add_argument('--initial_seed', dest='init_seed', default=0,
type=int, help='base for seed values')
args = parser.parse_args()
repetitions = args.reps
sim_duration = args.sim_duration
interarrival_rate = args.int_rate
service_rate = args.sr_rate
batch_size = args.batch_size
init_seed = args.init_seed
### Run simulations
try:
# One process at a time
if batch_size == 1:
for n in range(repetitions):
sub.call("python mm1_main.py {} {} {} --seed={}".format(sim_duration, interarrival_rate, service_rate, n+init_seed), shell=True)
# In batches
else:
# Split num of repetitions into batches
quotient = repetitions // batch_size
remainder = repetitions % batch_size
# Run the simulations in parallel as subprocesses
num_proc = batch_size if batch_size <= repetitions else remainder
procs = [sub.Popen("python mm1_main.py {} {} {} --seed={}".format(sim_duration, interarrival_rate, service_rate, n+init_seed), shell=True) for n in range(num_proc)]
while True:
procs_poll = list(map(lambda x: x.poll() != None, procs))
if not all(procs_poll):
procs[procs_poll.index(False)].wait()
elif num_proc < repetitions:
temp_num = batch_size if num_proc + batch_size <= repetitions else remainder
for n in range(num_proc, num_proc + temp_num):
procs += [sub.Popen("python mm1_main.py {} {} {} --seed={}".format(sim_duration, interarrival_rate, service_rate, n+init_seed), shell=True)]
num_proc += temp_num
else:
break
except OSError as e:
print("Execution failed: ", e)
| mit | Python | |
401b98d0f3834eebc71342746beb8ce28a73d75f | Add check for AddToHueAndSaturation | aleju/imgaug,aleju/ImageAugmenter,aleju/imgaug | checks/check_add_to_hue_and_saturation.py | checks/check_add_to_hue_and_saturation.py | from __future__ import print_function, division
import imgaug as ia
from imgaug import augmenters as iaa
import numpy as np
from skimage import data
import cv2
from itertools import cycle
VAL_PER_STEP = 1
TIME_PER_STEP = 10
def main():
image = data.astronaut()
cv2.namedWindow("aug", cv2.WINDOW_NORMAL)
cv2.imshow("aug", image)
cv2.waitKey(TIME_PER_STEP)
# for value in cycle(np.arange(-255, 255, VAL_PER_STEP)):
for value in np.arange(-255, 255, VAL_PER_STEP):
aug = iaa.AddToHueAndSaturation(value=value)
img_aug = aug.augment_image(image)
img_aug = ia.pad(img_aug, bottom=40)
img_aug = ia.draw_text(img_aug, x=0, y=img_aug.shape[0]-38, text="value=%d" % (value,), size=30)
cv2.imshow("aug", img_aug)
cv2.waitKey(TIME_PER_STEP)
images_aug = iaa.AddToHueAndSaturation(value=(-255, 255), per_channel=True).augment_images([image] * 64)
ia.imshow(ia.draw_grid(images_aug))
if __name__ == "__main__":
main()
| mit | Python | |
e0c82bec30568eb845c71fb0335d6ac5edef18e9 | Add migration that had conflict from merge with master | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/translations/migrations/0002_transifexblacklist.py | corehq/apps/translations/migrations/0002_transifexblacklist.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-01-09 19:35
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('translations', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='TransifexBlacklist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('domain', models.CharField(max_length=255)),
('app_id', models.CharField(max_length=32)),
('module_id', models.CharField(max_length=32)),
('field_type', models.CharField(choices=[('detail', 'Case Detail'), ('list', 'Case List')], max_length=100)),
('field_name', models.TextField(help_text="\nThis is the same string that appears in the bulk translations download.\nUsually the string in either case list or detail under 'property'.\nThis could be an xpath or case property name.\nIf it is an ID Mapping then the property should be '<property> (ID Mapping Text)'.\nFor the values each value should be '<id mapping value> (ID Mapping Value)'.\nExample: case detail for tasks_type would have entries:\n tasks_type (ID Mapping Text)\n child (ID Mapping Value)\n pregnancy (ID Mapping Value)\n")),
('display_text', models.TextField(help_text="The default language's translation for this detail/list. If display_text is not filled out then all translations that match the field_type and field_name will be blacklisted")),
],
),
]
| bsd-3-clause | Python | |
09a2a2aac94c4ab2a47161d32d77218d18eb6c75 | add queue for data pre_processing using multi-thread | xiaotaw/chembl | dnn_model/pk_queue.py | dnn_model/pk_queue.py | import time
import Queue
import threading
import pk_input as pki
target_list = ["cdk2", "egfr_erbB1", "gsk3b", "hgfr", "map_k_p38a", "tpk_lck", "tpk_src", "vegfr2"]
target = target_list[0]
d = pki.Datasets(target_list)
# using queue
# producer thread
class Producer(threading.Thread):
def __init__(self, t_name, d, queue):
threading.Thread.__init__(self, name=t_name)
self.queue = queue
self.d = d
def run(self):
for i in range(10):
t0 = time.time()
batch = self.d.next_train_batch(target, 256, 256*25)
t1 = time.time()
#print("%s: %s generate batch with neg_begin=%d %5.3f" % (time.ctime(), self.getName(), self.d.neg.train_begin, t1-t0))
self.queue.put(batch, block=True, timeout=None)
#print("%s: %s finished!" % (time.ctime(), self.getName()))
# consumer thread
class Consumer(threading.Thread):
def __init__(self, t_name, queue):
threading.Thread.__init__(self, name=t_name)
self.queue = queue
def run(self):
while True:
try:
t0 = time.time()
batch = self.queue.get(block=True, timeout=5)
time.sleep(0.5)
t1 = time.time()
#print("%s: %s generate batch %5.3f" % (time.ctime(), self.getName(), t1-t0))
except:
#print("%s: %s finished!" % (time.ctime(), self.getName()))
break
if __name__ == "__main__":
queue = Queue.Queue(50)
pro_list = []
for i in range(10):
pro_list.append(Producer("Pro%d" % i, d, queue))
con = Consumer("Con", queue)
for pro in pro_list:
pro.start()
con.start()
for pro in pro_list:
pro.join()
con.join()
"""
# normal
for i in range(0, 10000):
t0 = time.time()
compds_batch, labels_batch = d.next_train_batch(target, 256, 256*25)
t1 = float(time.time())
if i % 10 == 0 or (i+1) == 10000:
print("%d %5.3f" % (i, t1-t0))
0 0.851
10 0.815
20 0.762
30 0.787
40 0.811
50 0.805
60 0.795
70 0.812
80 0.766
90 0.755
100 0.842
110 0.831
120 0.819
130 0.765
140 0.817
150 0.746
160 0.810
170 0.790
180 0.775
190 0.812
200 0.648
210 0.685
220 0.794
230 0.686
240 0.573
250 0.691
260 0.698
270 0.778
280 0.767
290 0.841
"""
| apache-2.0 | Python | |
6aeab650a43235a463cf9ff8c55b4cd22e6866c7 | Add mapping code | miyyer/qb,miyyer/qb,Pinafore/qb,miyyer/qb,Pinafore/qb,miyyer/qb | qanta/ingestion/annotated_mapping.py | qanta/ingestion/annotated_mapping.py | from unidecode import unidecode
from collections import defaultdict, Counter
import string
import re
PUNCTUATION = string.punctuation
PAREN = re.compile(r'\([^)]*\)')
BRACKET = re.compile(r'\[[^)]*\]')
MULT_SPACE = re.compile(r'\s+')
ANGLE = re.compile(r'<[^>]*>')
def split_and_remove_punc(text):
for i in text.split():
word = "".join(x for x in i.lower() if x not in PUNCTUATION)
if word:
yield word
def normalize_answer(answer):
answer = answer.lower().replace("_ ", " ").replace(" _", " ").replace("_", "")
answer = answer.replace("{", "").replace("}", "")
answer = PAREN.sub('', answer)
answer = BRACKET.sub('', answer)
answer = ANGLE.sub('', answer)
answer = MULT_SPACE.sub(' ', answer)
answer = " ".join(split_and_remove_punc(answer))
return answer
def parse_unambiguous_mappings(path):
with open(path) as f:
mappings = {}
for line in f:
splits = line.strip().split('\t')
if len(splits) != 2:
continue
else:
source, target = splits
mappings[source] = target
return mappings
| mit | Python | |
28ccfa6f1a2cd58907105afd4844c865191d423d | Add a `more_tearDown` method to MpiTestCase. | RaoUmer/distarray,enthought/distarray,enthought/distarray,RaoUmer/distarray | distarray/testing.py | distarray/testing.py | import unittest
import importlib
import tempfile
import os
from uuid import uuid4
from functools import wraps
from distarray.error import InvalidCommSizeError
from distarray.mpiutils import MPI, create_comm_of_size
def temp_filepath(extension=''):
"""Return a random 8-character filename."""
tempdir = tempfile.gettempdir()
filename = str(uuid4())[:8] + extension
return os.path.join(tempdir, filename)
def import_or_skip(name):
"""Try importing `name`, raise SkipTest on failure.
Parameters
----------
name : str
Module name to try to import.
Returns
-------
module : module object
Module object imported by importlib.
Raises
------
unittest.SkipTest
If the attempted import raises an ImportError.
Examples
--------
>>> h5py = import_or_skip('h5py')
>>> h5py.get_config()
<h5py.h5.H5PYConfig at 0x103dd5a78>
"""
try:
return importlib.import_module(name)
except ImportError:
errmsg = '%s not found... skipping.' % name
raise unittest.SkipTest(errmsg)
def comm_null_passes(fn):
"""Decorator. If `self.comm` is COMM_NULL, pass."""
@wraps(fn)
def wrapper(self, *args, **kwargs):
if self.comm == MPI.COMM_NULL:
pass
else:
return fn(self, *args, **kwargs)
return wrapper
class MpiTestCase(unittest.TestCase):
"""Base test class for MPI test cases.
Overload `get_comm_size` to change the default comm size (default is 4).
Overload `more_setUp` to add more to the default `setUp`.
Overload `more_tearDown` to add more to the default `tearDown`.
"""
def get_comm_size(self):
return 4
def more_setUp(self):
pass
def setUp(self):
try:
self.comm = create_comm_of_size(self.get_comm_size())
except InvalidCommSizeError:
msg = "Must run with comm size >= {}."
raise unittest.SkipTest(msg.format(self.get_comm_size()))
else:
self.more_setUp()
def more_tearDown(self):
pass
def tearDown(self):
self.more_tearDown()
if self.comm != MPI.COMM_NULL:
self.comm.Free()
| import unittest
import importlib
import tempfile
import os
from uuid import uuid4
from functools import wraps
from distarray.error import InvalidCommSizeError
from distarray.mpiutils import MPI, create_comm_of_size
def temp_filepath(extension=''):
"""Return a random 8-character filename."""
tempdir = tempfile.gettempdir()
filename = str(uuid4())[:8] + extension
return os.path.join(tempdir, filename)
def import_or_skip(name):
"""Try importing `name`, raise SkipTest on failure.
Parameters
----------
name : str
Module name to try to import.
Returns
-------
module : module object
Module object imported by importlib.
Raises
------
unittest.SkipTest
If the attempted import raises an ImportError.
Examples
--------
>>> h5py = import_or_skip('h5py')
>>> h5py.get_config()
<h5py.h5.H5PYConfig at 0x103dd5a78>
"""
try:
return importlib.import_module(name)
except ImportError:
errmsg = '%s not found... skipping.' % name
raise unittest.SkipTest(errmsg)
def comm_null_passes(fn):
"""Decorator. If `self.comm` is COMM_NULL, pass."""
@wraps(fn)
def wrapper(self, *args, **kwargs):
if self.comm == MPI.COMM_NULL:
pass
else:
return fn(self, *args, **kwargs)
return wrapper
class MpiTestCase(unittest.TestCase):
"""Base test class for MPI test cases.
Overload `get_comm_size` to change the default comm size (default is
4). Overload `more_setUp` to add more to the default `setUp`.
"""
def get_comm_size(self):
return 4
def more_setUp(self):
pass
def setUp(self):
try:
self.comm = create_comm_of_size(self.get_comm_size())
except InvalidCommSizeError:
msg = "Must run with comm size >= {}."
raise unittest.SkipTest(msg.format(self.get_comm_size()))
else:
self.more_setUp()
def tearDown(self):
if self.comm != MPI.COMM_NULL:
self.comm.Free()
| bsd-3-clause | Python |
b3df89c93a5d924b83d0dba6d83dd92fe2331f7c | Implementa a classe Agendamento conforme diagrama de classes | ESEGroup/Paraguai,ESEGroup/Paraguai,ESEGroup/Paraguai | domain/Agendamento.py | domain/Agendamento.py | from domain import IntervaloDeTempo
class Agendamento():
def __init__(self, intervalo, IDUsuario = None):
self.intervalo = intervalo
self.responsavel = IDUsuario
| apache-2.0 | Python | |
b41b6998dc14920193f22399804552209f7adcab | add migration script | binoculars/osf.io,baylee-d/osf.io,felliott/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,sloria/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,aaxelb/osf.io,sloria/osf.io,mattclark/osf.io,baylee-d/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,icereval/osf.io,saradbowman/osf.io,aaxelb/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,cslzchen/osf.io,mfraezz/osf.io,chennan47/osf.io,erinspace/osf.io,adlius/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,caseyrollins/osf.io,icereval/osf.io,Johnetordoff/osf.io,icereval/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,mfraezz/osf.io,pattisdr/osf.io,sloria/osf.io,aaxelb/osf.io,felliott/osf.io,caseyrollins/osf.io,leb2dg/osf.io,mattclark/osf.io,cslzchen/osf.io,binoculars/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,adlius/osf.io,adlius/osf.io,mattclark/osf.io,pattisdr/osf.io,erinspace/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io | osf/migrations/0078_ensure_schemas.py | osf/migrations/0078_ensure_schemas.py | from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.utils.migrations import ensure_schemas, remove_schemas
logger = logging.getLogger(__file__)
class Migration(migrations.Migration):
dependencies = [
('osf', '0077_add_maintenance_permissions'),
]
operations = [
migrations.RunPython(ensure_schemas, remove_schemas),
] | apache-2.0 | Python | |
3037d356552634bc5d67568e97a9d7aedceb0fdf | Add description to email for non pyvideo events. | xfxf/veyepar,EricSchles/veyepar,CarlFK/veyepar,xfxf/veyepar,yoe/veyepar,CarlFK/veyepar,EricSchles/veyepar,EricSchles/veyepar,yoe/veyepar,yoe/veyepar,CarlFK/veyepar,CarlFK/veyepar,xfxf/veyepar,xfxf/veyepar,yoe/veyepar,yoe/veyepar,EricSchles/veyepar,CarlFK/veyepar,xfxf/veyepar,EricSchles/veyepar | dj/scripts/email_title.py | dj/scripts/email_title.py | #!/usr/bin/python
# email_url.py
# emails the video URL to the presenters
from django.core.mail import get_connection, EmailMessage
from django.template import Context, Template
from process import process
from email_ab import email_ab
# from django.conf import settings
class email_title(email_ab):
ready_state = None
subject_template = '[{{ep.show.name}}] Video metadata for "{{ep.name}}"'
body_template = """
Hi,
This is Veyepar, the automated video processing system.
Please review the following meta data about your talk so that mistakes can be corrected now and not after the video has gone live.
Released: {{ep.released}}
{% if ep.released %}Permission has been given to record your talk and post it online.
{% if not ep.location.active %}However, we are not planning on recording any of the talks in {{ ep.location.name }}. {% endif %}
{% else %} "None" means it may get recorded and processed, but it will not be made public.
"False" means you have requested for the video not to be released. However the a video may be made anyway and available for review in case you change your mind. {% endif %}
{% if pyvideo %}
The video will be titled with the following image:
{{MEDIA_URL}}{{ep.show.client.slug}}/{{ep.show.slug}}/titles/{{ep.slug}}.png
{% endif %}
{% if ep.public_url%}The main page for the video will be here:
{{ep.public_url}}
{% else %}and the Description:
=== begin ===
{{ep.description}}
=== end description ===
{% endif %}
{% if ep.show.schedule_url %}
Problems with the text will need to be fixed in the event database that drives: {{ep.conf_url}}
{{ep.show.schedule_url}}
Except for odd word wrap on the title image. If it bothers you, let us know how you would like it and we will try to accommodate.
{% endif %}
If everything looks good, you don't need to do anything. Good luck with your talk; expect another email when the video is posted.
Your talk is scheduled for {{ep.start}} in the room called {{ep.location.name}} and you have been alloted {{ep.get_minutes}} minutes. The event organizers will give you instructions on how to check in before your talk.
Please bring what is needed to hook your laptop up to good old 15 pin VGA. We may have an adaptor, but don't count on it, someone may have taken it.
"""
py_name = "email_title.py"
def more_context(self, ep):
# If there is a Richard (pyvideo) url, use that;
# else use the youtube url.
pyvideo = ep.public_url is not None and "pyvideo" in ep.public_url
return {'pyvideo':pyvideo}
if __name__ == '__main__':
p=email_title()
p.main()
| #!/usr/bin/python
# email_url.py
# emails the video URL to the presenters
from django.core.mail import get_connection, EmailMessage
from django.template import Context, Template
from process import process
from email_ab import email_ab
# from django.conf import settings
class email_title(email_ab):
ready_state = None
subject_template = '[{{ep.show.name}}] Video metadata for "{{ep.name}}"'
body_template = """
Hi,
This is Veyepar, the automated video processing system.
Please review the following meta data about your talk so that mistakes can be corrected now and not after the video has gone live.
Released: {{ep.released}}
{% if ep.released %}Permission has been given to record your talk and post it online.
{% if not ep.location.active %}However, we are not planning on recording any of the talks in {{ ep.location.name }}. {% endif %}
{% else %} "None" means it may get recorded and processed, but it will not be made public.
"False" means you have requested for the video not to be released. However the a video may be made anyway and available for review in case you change your mind. {% endif %}
{% if pyvideo %}
The video will be titled with the following image:
{{MEDIA_URL}}{{ep.show.client.slug}}/{{ep.show.slug}}/titles/{{ep.slug}}.png
{% endif %}
{% if ep.public_url%}The main page for the video will be here:
{{ep.public_url}} {% endif %}
{% if 0 %}
Problems with the text will need to be fixed in the event database that drives: {{ep.conf_url}} {{ep.show.schedule_url}}
Except for odd word wrap on the title image. If it bothers you, let us know how you would like it and we will try to accommodate.
{% endif %}
If everything looks good, you don't need to do anything. Good luck with your talk; expect another email when the video is posted.
Your talk is scheduled for {{ep.start}} in the room called {{ep.location.name}} and you have been alloted {{ep.get_minutes}} minutes. The event organizers will give you instructions on how to check in before your talk.
Please bring what is needed to hook your laptop up to good old 15 pin VGA. We may have an adaptor, but don't count on it, someone may have taken it.
"""
py_name = "email_title.py"
def more_context(self, ep):
# If there is a Richard (pyvideo) url, use that;
# else use the youtube url.
pyvideo = "pyvideo" in ep.public_url
return {'pyvideo':pyvideo}
if __name__ == '__main__':
p=email_title()
p.main()
| mit | Python |
c98ff54d2888a0fdaa6634d065876d4cf5514b65 | Add compat file | scaramallion/pynetdicom3,scaramallion/pynetdicom3,scaramallion/pynetdicom,scaramallion/pynetdicom | pynetdicom3/compatibility.py | pynetdicom3/compatibility.py | """Compatibility module for python2.7/python3."""
import sys
IN_PYTHON2 = sys.version_info[0] == 2
if IN_PYTHON2:
import Queue as queue
else:
import queue
| mit | Python | |
5be91f4e7b3607090e94fbf221628a359063823d | Use indexed text with sqlite | PDOK/data.labs.pdok.nl,PDOK/data.labs.pdok.nl,PDOK/data.labs.pdok.nl,PDOK/data.labs.pdok.nl,PDOK/data.labs.pdok.nl | data/bag-brk/create_db.py | data/bag-brk/create_db.py | import csv
import sqlite3
conn = sqlite3.connect('processed-lines.db')
c = conn.cursor()
# c.execute('CREATE TABLE processed (cadastral_designation text, bag_pand_id text, match_type text, parcel_uri text, '
# 'dummy text, mother_parcel_match text, parcel_error text, timestamp timestamp default CURRENT_TIMESTAMP)')
# c.execute('create index cadastral_designation_idx on processed (cadastral_designation)')
processed_lines = []
print('Opening file...')
with open('processed-lines.csv', 'r') as pr:
processed = csv.reader(pr)
for line in processed:
processed_lines.append(line)
print('Inserting rows into database...')
c.executemany('INSERT INTO processed (cadastral_designation, bag_pand_id, match_type, parcel_uri, '
'dummy, mother_parcel_match, parcel_error) VALUES (?, ?, ?, ?, ?, ?, ?)', processed_lines)
print('Committing records...')
conn.commit()
conn.close()
print('Done!')
| mit | Python | |
8279485c7c29453bea744ad137bb38552ac83bdb | Create master_script.py | muhdamrullah/air-auth,muhdamrullah/air-auth,muhdamrullah/air-auth | scripts/hello_again/master_script.py | scripts/hello_again/master_script.py | import subprocess
import os
FNULL = open(os.devnull, 'w')
subprocess.Popen('python live_stream.py', shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
subprocess.Popen('python processed_stream.py', shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
subprocess.Popen('python database_lookup.py', shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
subprocess.Popen('python trigger.py', shell=True, stdout=FNULL, stderr=subprocess.STDOUT)
| mit | Python | |
149631d1ad2733792feedf24d67575159289025c | set MOCK_FORENSIC_AUTH default to False | TeamHG-Memex/Datawake,Sotera/datawake-prefetch,Sotera/datawake-prefetch,Sotera/Datawake-Legacy,diffeo/Datawake,Sotera/Datawake-Legacy,Sotera/datawake-prefetch,TeamHG-Memex/Datawake,Sotera/Datawake-Legacy,Sotera/Datawake-Legacy,Sotera/datawake-prefetch,Sotera/Datawake-Legacy,TeamHG-Memex/Datawake,diffeo/Datawake,diffeo/Datawake | forensic/session.py | forensic/session.py | """
Copyright 2014 Sotera Defense Solutions, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import tangelo
import cherrypy
from datawaketools import googleauth
from datawaketools import datawake_db
from datawaketools import datawakeconfig
"""
To use sessions you must set the cherrypy configuration. Currently this is done manually / hard coded.
working with kitware to improve for tangelo
"""
MOCK_FORENSIC_AUTH = False
MOCK_USER_ORG = 'MEMEXDEMO'
@tangelo.restful
def get():
if 'user' in cherrypy.session:
return json.dumps(dict(user=cherrypy.session['user'], hasSession=True))
return json.dumps(dict(hasSession=False))
@tangelo.restful
def post(token=u''):
user = None
if 'user' in cherrypy.session and 'token' in cherrypy.session and cherrypy.session['token'] == token:
tangelo.log('plugin-sever.session tokens matched using existing session.')
user = cherrypy.session['user']
else:
user = googleauth.getUserFromToken(token,mock = MOCK_FORENSIC_AUTH)
tangelo.log('session.post verified user: '+str(user))
if not datawakeconfig.MOCK_AUTH and not MOCK_FORENSIC_AUTH:
orgs = datawake_db.getOrgLinks(user['email'])
assert(len(orgs) == 1)
user['org'] = orgs[0]
else:
user['org'] = MOCK_USER_ORG
cherrypy.session['user'] = user
cherrypy.session['token'] = token
return json.dumps(user)
@tangelo.restful
def delete():
if 'user' in cherrypy.session:
del cherrypy.session['user']
if 'token' in cherrypy.session:
del cherrypy.session['token']
cherrypy.lib.sessions.expire()
tangelo.log('manually expired session')
return json.dumps(dict(removedSession=True))
| """
Copyright 2014 Sotera Defense Solutions, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import tangelo
import cherrypy
from datawaketools import googleauth
from datawaketools import datawake_db
from datawaketools import datawakeconfig
"""
To use sessions you must set the cherrypy configuration. Currently this is done manually / hard coded.
working with kitware to improve for tangelo
"""
MOCK_FORENSIC_AUTH = True
MOCK_USER_ORG = 'MEMEXDEMO'
@tangelo.restful
def get():
if 'user' in cherrypy.session:
return json.dumps(dict(user=cherrypy.session['user'], hasSession=True))
return json.dumps(dict(hasSession=False))
@tangelo.restful
def post(token=u''):
user = None
if 'user' in cherrypy.session and 'token' in cherrypy.session and cherrypy.session['token'] == token:
tangelo.log('plugin-sever.session tokens matched using existing session.')
user = cherrypy.session['user']
else:
user = googleauth.getUserFromToken(token,mock = MOCK_FORENSIC_AUTH)
tangelo.log('session.post verified user: '+str(user))
if not datawakeconfig.MOCK_AUTH and not MOCK_FORENSIC_AUTH:
orgs = datawake_db.getOrgLinks(user['email'])
assert(len(orgs) == 1)
user['org'] = orgs[0]
else:
user['org'] = MOCK_USER_ORG
cherrypy.session['user'] = user
cherrypy.session['token'] = token
return json.dumps(user)
@tangelo.restful
def delete():
if 'user' in cherrypy.session:
del cherrypy.session['user']
if 'token' in cherrypy.session:
del cherrypy.session['token']
cherrypy.lib.sessions.expire()
tangelo.log('manually expired session')
return json.dumps(dict(removedSession=True))
| apache-2.0 | Python |
7e5800d46d8dd49b421b3cdcd12701f64da994c3 | Add script for testing performance. | tkelman/utf8rewind,tkelman/utf8rewind,tkelman/utf8rewind,tkelman/utf8rewind | performance.py | performance.py | import argparse
import os.path
import re
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Runs combinations of performance tests.')
parser.add_argument(
'--config',
dest = 'config',
default = '',
help = 'Override configuration to <Platform>_<Configuration>, i.e. x64_Debug.'
)
parser.add_argument(
'--casefolding',
dest = 'casefolding',
action = 'store_true',
help = 'Compare casefolding performance against lowercasing.'
)
args = parser.parse_args()
path = None
if os.path.exists('output/windows'):
path = 'output/windows'
elif os.path.exists('output/linux'):
path = 'output/linux'
if not path:
print('Failed to find executable path.')
exit(-1)
if len(args.config) > 0:
matches = re.match('(\w+)_(\w+)', args.config)
path = path + '/' + matches.group(1) + '/' + matches.group(2) + '/'
else:
if os.path.exists(path + '/x64'):
path += '/x64'
elif os.path.exists(path + '/Win32'):
path += '/Win32'
else:
print('Failed to find configuration path at ' + path + '.')
exit(-1)
if os.path.exists(path + '/Release'):
path += '/Release'
elif os.path.exists(path + '/Debug'):
path += '/Debug'
else:
print('Failed to find release configuration path at ' + path + '.')
exit(-1)
executable_path = path + '/tests-rewind.exe'
if not os.path.exists(executable_path):
print('Failed to find path at ' + path + '.')
exit(-1)
print('Running executable at \"' + executable_path + '\".') | mit | Python | |
1440023d9a46b5abc0fe9e3a713d437185478102 | Create chucknorris.py | THEMVFFINMAN/PyttleShip,THEMVFFINMAN/Python-Games | Codingame.com/chucknorris.py | Codingame.com/chucknorris.py | import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
message = input()
binaries = []
for ch in message:
binaries.append(bin(ord(ch))[2:].zfill(7))
stringer = ""
zero = False
one = False
for binary in binaries:
for char in binary:
if int(char):
if zero:
zero = False
stringer += " "
if not one:
one = True
stringer += "0 "
stringer += "0"
else:
if one:
one = False
stringer += " "
if not zero:
zero = True
stringer += "00 "
stringer += "0"
print(stringer)
| mit | Python | |
2edb50db002e773b66eb1f824894ee362846f86e | add example using 2 env vars | AlanCoding/Ansible-inventory-file-examples,AlanCoding/Ansible-inventory-file-examples | scripts/environment/dual_env_vars.py | scripts/environment/dual_env_vars.py | #!/usr/bin/env python
from argparse import ArgumentParser
from datetime import datetime
import os
inventory = {
'all': {'vars': {'ansible_connection': 'local'}},
'ungrouped': {'hosts': ['localhost']},
'_meta': {'hostvars': {'localhost': {
'test_env': os.environ.get('TEST_ENV', False),
'test_env2': os.environ.get('TEST_ENV2', False),
'current_time': str(datetime.now())
}}}
}
def parse_args():
parser = ArgumentParser()
parser.add_argument('--list', dest='list_instances', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', dest='requested_host', help='Get all the variables about a specific instance')
return parser.parse_args()
def load_inventory():
args = parse_args()
if args.list_instances:
print(inventory)
if __name__ == '__main__':
load_inventory()
| mit | Python | |
20cc61dd008fa4c1ced0d4726baaeba78d0d1f05 | Update get_posts example script for Python 3. | Aloomaio/facebook-sdk,mobolic/facebook-sdk | examples/get_posts.py | examples/get_posts.py | """
A simple example script to get all posts on a user's timeline.
Originally created by Mitchell Stewart.
<https://gist.github.com/mylsb/10294040>
"""
import facebook
import requests
def some_action(post):
""" Here you might want to do something with each post. E.g. grab the
post's message (post['message']) or the post's picture (post['picture']).
In this implementation we just print the post's created time.
"""
print(post['created_time'])
# You'll need an access token here to do anything. You can get a temporary one
# here: https://developers.facebook.com/tools/explorer/
access_token = ''
# Look at Bill Gates's profile for this example by using his Facebook id.
user = 'BillGates'
graph = facebook.GraphAPI(access_token)
profile = graph.get_object(user)
posts = graph.get_connections(profile['id'], 'posts')
# Wrap this block in a while loop so we can keep paginating requests until
# finished.
while True:
try:
# Perform some action on each post in the collection we receive from
# Facebook.
[some_action(post=post) for post in posts['data']]
# Attempt to make a request to the next page of data, if it exists.
posts = requests.get(posts['paging']['next']).json()
except KeyError:
# When there are no more pages (['paging']['next']), break from the
# loop and end the script.
break
| """
A simple example script to get all posts on a user's timeline.
Originally created by Mitchell Stewart.
<https://gist.github.com/mylsb/10294040>
"""
import facebook
import requests
def some_action(post):
""" Here you might want to do something with each post. E.g. grab the
post's message (post['message']) or the post's picture (post['picture']).
In this implementation we just print the post's created time.
"""
print post['created_time']
# You'll need an access token here to do anything. You can get a temporary one
# here: https://developers.facebook.com/tools/explorer/
access_token = ''
# Look at Bill Gates's profile for this example by using his Facebook id.
user = 'BillGates'
graph = facebook.GraphAPI(access_token)
profile = graph.get_object(user)
posts = graph.get_connections(profile['id'], 'posts')
# Wrap this block in a while loop so we can keep paginating requests until
# finished.
while True:
try:
# Perform some action on each post in the collection we receive from
# Facebook.
[some_action(post=post) for post in posts['data']]
# Attempt to make a request to the next page of data, if it exists.
posts = requests.get(posts['paging']['next']).json()
except KeyError:
# When there are no more pages (['paging']['next']), break from the
# loop and end the script.
break
| apache-2.0 | Python |
6a968d47a3605a4ce0af486b7777497749b4fac6 | Add tests for deleted notification objects | j0gurt/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,vladan-m/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,hyperNURb/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core | src/tests/ggrc_workflows/notifications/test_deleted_objects.py | src/tests/ggrc_workflows/notifications/test_deleted_objects.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
import random
from tests.ggrc import TestCase
from freezegun import freeze_time
from datetime import datetime
from mock import patch
import os
from ggrc import notification
from ggrc.models import Notification, Person
from ggrc_workflows.models import Workflow
from tests.ggrc_workflows.generator import WorkflowsGenerator
from tests.ggrc.api_helper import Api
from tests.ggrc.generator import GgrcGenerator
if os.environ.get('TRAVIS', False):
random.seed(1) # so we can reproduce the tests if needed
class TestNotificationsForDeletedObjects(TestCase):
""" This class contains simple one time workflow tests that are not
in the gsheet test grid
"""
def setUp(self):
TestCase.setUp(self)
self.api = Api()
self.wf_generator = WorkflowsGenerator()
self.ggrc_generator = GgrcGenerator()
Notification.query.delete()
self.random_objects = self.ggrc_generator.generate_random_objects(2)
_, self.user = self.ggrc_generator.generate_person(user_role="gGRC Admin")
self.create_test_cases()
def init_decorator(init):
def new_init(self, *args, **kwargs):
init(self, *args, **kwargs)
if hasattr(self, "created_at"):
self.created_at = datetime.now()
return new_init
Notification.__init__ = init_decorator(Notification.__init__)
@patch("ggrc.notification.email.send_email")
def test_delete_activated_workflow(self, mock_mail):
with freeze_time("2015-02-01 13:39:20"):
_, wf = self.wf_generator.generate_workflow(self.quarterly_wf_1)
response, wf = self.wf_generator.activate_workflow(wf)
self.assert200(response)
user = Person.query.get(self.user.id)
with freeze_time("2015-01-01 13:39:20"):
_, notif_data = notification.get_todays_notifications()
self.assertNotIn(user.email, notif_data)
with freeze_time("2015-01-29 13:39:20"):
_, notif_data = notification.get_todays_notifications()
self.assertIn(user.email, notif_data)
self.assertIn("cycle_starts_in", notif_data[user.email])
workflow = Workflow.query.get(wf.id)
response = self.wf_generator.api.delete(workflow, workflow.id)
self.assert200(response)
_, notif_data = notification.get_todays_notifications()
user = Person.query.get(self.user.id)
self.assertNotIn(user.email, notif_data)
def create_test_cases(self):
def person_dict(person_id):
return {
"href": "/api/people/%d" % person_id,
"id": person_id,
"type": "Person"
}
self.quarterly_wf_1 = {
"title": "quarterly wf 1",
"description": "",
"owners": [person_dict(self.user.id)],
"frequency": "quarterly",
"task_groups": [{
"title": "tg_1",
"contact": person_dict(self.user.id),
"task_group_tasks": [{
"contact": person_dict(self.user.id),
"description": self.wf_generator.random_str(100),
"relative_start_day": 5,
"relative_start_month": 2,
"relative_end_day": 25,
"relative_end_month": 2,
},
],
},
]
}
| apache-2.0 | Python | |
134dbd68cc4630442f1dddb9426207de93c1498b | Add a missing migration for Course.solution_visibility description | matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo | web/courses/migrations/0005_update_solution_visibility_text.py | web/courses/migrations/0005_update_solution_visibility_text.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0004_course_institution'),
]
operations = [
migrations.AlterField(
model_name='problemset',
name='solution_visibility',
field=models.CharField(default=b'S', max_length=20, verbose_name='Solution visibility', choices=[(b'H', 'Official solutions are hidden'), (b'S', 'Official solutions are visible when solved'), (b'V', 'Official solutions are visible')]),
),
]
| agpl-3.0 | Python | |
fe5fa8bcde6c98e8dd66cf6190544d4ccf86175e | Add settings file for demo deployments | vladan-m/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,vladan-m/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core | src/ggrc/settings/app_engine_demo.py | src/ggrc/settings/app_engine_demo.py | from app_engine import *
COMPANY = "Reciprocity, Inc."
COMPANY_LOGO_TEXT = "Demo gGRC Implementation"
| apache-2.0 | Python | |
8e54894906f8a67607deb1d81d42eb2a92fafe2e | add rabbit test code | jasonrbriggs/stomp.py,jasonrbriggs/stomp.py | rabbit-test.py | rabbit-test.py | import time
import sys
import stomp
class MyListener(object):
def on_error(self, headers, message):
print 'received an error %s' % message
def on_connecting(self, host_and_port):
print host_and_port
def on_message(self, headers, message):
print 'received a message %s' % message
conn = stomp.Connection([('0.0.0.0', 61613), ('127.0.0.1', 61613)], 'guest', 'guest')
conn.add_listener(MyListener())
conn.start()
conn.connect()
conn.subscribe(destination='/queue/test', ack='auto')
conn.send(' '.join(sys.argv[1:]), destination='/queue/test')
time.sleep(2)
conn.disconnect()
| apache-2.0 | Python | |
880d797a6e1f4e3a9182c647c0530976c1a65fc2 | Add FIWARE Doc Style | Fiware/apps.WMarket,conwetlab/WMarket,Fiware/apps.WMarket,conwetlab/WMarket,Fiware/apps.WMarket,conwetlab/WMarket,Fiware/apps.WMarket,conwetlab/WMarket | doc/conf.py | doc/conf.py | # -*- coding: utf-8 -*-
import os
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
extensions = []
templates_path = ['/home/docs/checkouts/readthedocs.org/readthedocs/templates/sphinx', 'templates', '_templates', '.templates']
source_suffix = ['.rst', '.md']
master_doc = 'index'
project = u'WMarket'
copyright = u'2016'
version = 'develop'
release = 'develop'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
htmlhelp_basename = 'wmarket'
file_insertion_enabled = False
latex_documents = [
('index', 'wmarket.tex', u'WMarket Documentation',
u'', 'manual'),
]
# Only import and set the theme if we're building docs locally
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_style = 'https://www.fiware.org/style/fiware_readthedocs.css'
else:
html_context = {
'css_files': [
'https://media.readthedocs.org/css/sphinx_rtd_theme.css',
'https://media.readthedocs.org/css/readthedocs-doc-embed.css',
'https://www.fiware.org/style/fiware_readthedocs.css',
],
}
| bsd-3-clause | Python | |
1b5a6458c526a9789fab5d08fbbc5cece60f3b62 | add bootstrap file | thomvil/elm-init-scripts,NoRedInk/elm-init-scripts | bootstrap.py | bootstrap.py |
def main():
parser = argparse.ArgumentParser(description='Initialize an Elm page')
parser.add_argument('module_name')
parser.add_argument('destination')
args = parser.parse_args()
bootstrap(args.module_name, args.destination)
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
8054aa12d4d38b6600750527ed8552058ac216cd | Add 'choose/' from commit '7515ded12265f841375b584cb5da6601cf822a4f' | eddieantonio/big-practice-repo,eddieantonio/big-practice-repo,eddieantonio/big-practice-repo,eddieantonio/big-practice-repo,eddieantonio/big-practice-repo,eddieantonio/big-practice-repo,eddieantonio/big-practice-repo | choose/choose.py | choose/choose.py | #!/usr/bin/env python
# Copyright (C) 2016 Eddie Antonio Santos <easantos@ualberta.ca>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import random
USAGE_TEXT = """
Usage:
{0} args...
Where args are completely arbitrary arguments.
""".format(sys.argv[0])
def invalid_usage(msg):
sys.stderr.write(msg)
sys.stderr.write(USAGE_TEXT)
exit(-1)
if __name__ == '__main__':
choices = sys.argv[1:]
if not choices:
invalid_usage('Need at least one arugment!')
print(random.choice(choices))
| agpl-3.0 | Python | |
bf6e9fd75b4d7d7d7f4c225ba6684e895b79160e | add sign up script | JamisHoo/Yagra,JamisHoo/Yagra | cgi-bin/signup.py | cgi-bin/signup.py | #!/usr/bin/env python
from __future__ import print_function
from common import populate_html
import os
import hashlib
import cgi
import MySQLdb
import smtplib
def process_input():
# Load email and password
form = cgi.FieldStorage()
email = form.getfirst("email")
password = form.getfirst("password")
generate_output(email, password)
def generate_output(email, password):
# Email is not provided
if not email:
print("Content-type: text/html")
print()
print(populate_html("signup.html", {}))
return
# Password is not provided
if not password:
print("Content-type: text/html")
print()
print(populate_html("signup.html", {}))
return
# Insert new user info into database
db_connection = MySQLdb.connect(host="localhost", user="root",
passwd="1234", db="yagra")
db_cursor = db_connection.cursor()
email_hash = hashlib.md5(email).digest()
salt = os.urandom(32)
password_hash = hashlib.sha256(salt + password).digest()
activate_token = os.urandom(32)
# TODO: check activate token field when signing in and resetting password
# TODO: Handle primary key duplicate
try:
db_cursor.execute(
"""INSERT INTO users
(email, email_hash, activate_token, salt, passwd_hash)
VALUES (%s, %s, %s, %s, %s)""",
(email, email_hash, activate_token, salt, password_hash))
db_connection.commit()
except MySQLdb.IntegrityError:
print("Content-type: text/html")
print()
print("Email already used")
return
# Send an activate email
from_addr = "jamis@test.jamis.xyz"
activate_link = "http://121.42.28.81/cgi-bin/activate.py?token=%s" % activate_token.encode("hex").upper()
email_content = populate_html("activate.email", dict(link = activate_link))
smtp_server = smtplib.SMTP("localhost")
smtp_server.sendmail(from_addr, email, email_content);
smtp_server.quit()
print("Content-type: text/html")
print()
print("Activate email sent")
try:
process_input()
except:
cgi.print_exception()
| mit | Python | |
64fce7c67849f44492d55ccf8a745b252bf1368b | Add some tests for polynomial printing. | jakirkham/numpy,bertrand-l/numpy,githubmlai/numpy,SiccarPoint/numpy,jorisvandenbossche/numpy,ChristopherHogan/numpy,GrimDerp/numpy,simongibbons/numpy,pizzathief/numpy,ahaldane/numpy,argriffing/numpy,sigma-random/numpy,kirillzhuravlev/numpy,MSeifert04/numpy,NextThought/pypy-numpy,matthew-brett/numpy,astrofrog/numpy,ogrisel/numpy,dch312/numpy,sonnyhu/numpy,behzadnouri/numpy,ssanderson/numpy,mattip/numpy,dwf/numpy,anntzer/numpy,pelson/numpy,dwf/numpy,Dapid/numpy,madphysicist/numpy,tdsmith/numpy,ChanderG/numpy,stefanv/numpy,nguyentu1602/numpy,skymanaditya1/numpy,tynn/numpy,SunghanKim/numpy,andsor/numpy,Yusa95/numpy,Anwesh43/numpy,jonathanunderwood/numpy,felipebetancur/numpy,tdsmith/numpy,chiffa/numpy,ajdawson/numpy,brandon-rhodes/numpy,mingwpy/numpy,b-carter/numpy,sigma-random/numpy,pdebuyl/numpy,maniteja123/numpy,rhythmsosad/numpy,ContinuumIO/numpy,CMartelLML/numpy,MichaelAquilina/numpy,Linkid/numpy,ESSS/numpy,dch312/numpy,matthew-brett/numpy,BabeNovelty/numpy,MSeifert04/numpy,kirillzhuravlev/numpy,GrimDerp/numpy,drasmuss/numpy,MaPePeR/numpy,WarrenWeckesser/numpy,yiakwy/numpy,ViralLeadership/numpy,jorisvandenbossche/numpy,chiffa/numpy,mathdd/numpy,sinhrks/numpy,CMartelLML/numpy,ahaldane/numpy,numpy/numpy,matthew-brett/numpy,dimasad/numpy,brandon-rhodes/numpy,jankoslavic/numpy,AustereCuriosity/numpy,mhvk/numpy,jakirkham/numpy,endolith/numpy,MichaelAquilina/numpy,jakirkham/numpy,larsmans/numpy,has2k1/numpy,bmorris3/numpy,grlee77/numpy,KaelChen/numpy,gfyoung/numpy,rherault-insa/numpy,rgommers/numpy,nbeaver/numpy,ddasilva/numpy,naritta/numpy,AustereCuriosity/numpy,seberg/numpy,shoyer/numpy,skwbc/numpy,endolith/numpy,ewmoore/numpy,abalkin/numpy,cowlicks/numpy,ViralLeadership/numpy,kiwifb/numpy,groutr/numpy,Yusa95/numpy,nbeaver/numpy,Srisai85/numpy,pyparallel/numpy,njase/numpy,Yusa95/numpy,WarrenWeckesser/numpy,seberg/numpy,rmcgibbo/numpy,musically-ut/numpy,mathdd/numpy,naritta/numpy,bertrand-l/numpy,cjermain/numpy,dato-code/numpy,matthew-brett/numpy,jschueller/numpy,mathdd/numpy,bmorris3/numpy,SunghanKim/numpy,stuarteberg/numpy,embray/numpy,astrofrog/numpy,andsor/numpy,b-carter/numpy,skwbc/numpy,rajathkumarmp/numpy,matthew-brett/numpy,tynn/numpy,tynn/numpy,skymanaditya1/numpy,moreati/numpy,b-carter/numpy,argriffing/numpy,embray/numpy,rherault-insa/numpy,jorisvandenbossche/numpy,felipebetancur/numpy,njase/numpy,ddasilva/numpy,BabeNovelty/numpy,ekalosak/numpy,pizzathief/numpy,kirillzhuravlev/numpy,NextThought/pypy-numpy,joferkington/numpy,WillieMaddox/numpy,rgommers/numpy,grlee77/numpy,MaPePeR/numpy,githubmlai/numpy,tacaswell/numpy,larsmans/numpy,WarrenWeckesser/numpy,dwf/numpy,madphysicist/numpy,BabeNovelty/numpy,rgommers/numpy,rudimeier/numpy,githubmlai/numpy,charris/numpy,drasmuss/numpy,empeeu/numpy,ChanderG/numpy,maniteja123/numpy,utke1/numpy,jankoslavic/numpy,stefanv/numpy,simongibbons/numpy,cowlicks/numpy,skwbc/numpy,gfyoung/numpy,dwillmer/numpy,Linkid/numpy,stuarteberg/numpy,charris/numpy,yiakwy/numpy,gfyoung/numpy,yiakwy/numpy,mattip/numpy,ContinuumIO/numpy,ChristopherHogan/numpy,embray/numpy,BMJHayward/numpy,pizzathief/numpy,rhythmsosad/numpy,empeeu/numpy,leifdenby/numpy,mortada/numpy,MichaelAquilina/numpy,grlee77/numpy,ssanderson/numpy,trankmichael/numpy,ssanderson/numpy,moreati/numpy,larsmans/numpy,chatcannon/numpy,naritta/numpy,sinhrks/numpy,astrofrog/numpy,shoyer/numpy,ESSS/numpy,skymanaditya1/numpy,mortada/numpy,NextThought/pypy-numpy,pbrod/numpy,pyparallel/numpy,ekalosak/numpy,hainm/numpy,has2k1/numpy,has2k1/numpy,Anwesh43/numpy,mindw/numpy,ogrisel/numpy,abalkin/numpy,mwiebe/numpy,kiwifb/numpy,trankmichael/numpy,MSeifert04/numpy,sinhrks/numpy,utke1/numpy,SiccarPoint/numpy,Eric89GXL/numpy,dch312/numpy,mingwpy/numpy,SiccarPoint/numpy,Yusa95/numpy,CMartelLML/numpy,rhythmsosad/numpy,chatcannon/numpy,SiccarPoint/numpy,jonathanunderwood/numpy,anntzer/numpy,Eric89GXL/numpy,ESSS/numpy,chiffa/numpy,MaPePeR/numpy,Srisai85/numpy,ekalosak/numpy,ahaldane/numpy,moreati/numpy,numpy/numpy,jschueller/numpy,nguyentu1602/numpy,jakirkham/numpy,gmcastil/numpy,embray/numpy,mwiebe/numpy,pbrod/numpy,shoyer/numpy,solarjoe/numpy,ajdawson/numpy,pbrod/numpy,jorisvandenbossche/numpy,nbeaver/numpy,mhvk/numpy,pyparallel/numpy,SunghanKim/numpy,MaPePeR/numpy,ajdawson/numpy,mingwpy/numpy,ahaldane/numpy,jakirkham/numpy,MSeifert04/numpy,ekalosak/numpy,behzadnouri/numpy,WillieMaddox/numpy,anntzer/numpy,ChristopherHogan/numpy,Anwesh43/numpy,rherault-insa/numpy,mattip/numpy,musically-ut/numpy,ewmoore/numpy,stuarteberg/numpy,solarjoe/numpy,tdsmith/numpy,drasmuss/numpy,madphysicist/numpy,ewmoore/numpy,abalkin/numpy,mingwpy/numpy,BabeNovelty/numpy,mattip/numpy,bmorris3/numpy,jschueller/numpy,musically-ut/numpy,jorisvandenbossche/numpy,hainm/numpy,mhvk/numpy,Srisai85/numpy,joferkington/numpy,ahaldane/numpy,BMJHayward/numpy,rmcgibbo/numpy,astrofrog/numpy,pbrod/numpy,sonnyhu/numpy,charris/numpy,gmcastil/numpy,grlee77/numpy,bringingheavendown/numpy,dwillmer/numpy,BMJHayward/numpy,behzadnouri/numpy,argriffing/numpy,ContinuumIO/numpy,rmcgibbo/numpy,ChanderG/numpy,leifdenby/numpy,numpy/numpy,cjermain/numpy,GrimDerp/numpy,groutr/numpy,gmcastil/numpy,cowlicks/numpy,ewmoore/numpy,sonnyhu/numpy,leifdenby/numpy,madphysicist/numpy,dato-code/numpy,brandon-rhodes/numpy,KaelChen/numpy,charris/numpy,dwf/numpy,nguyentu1602/numpy,MSeifert04/numpy,felipebetancur/numpy,has2k1/numpy,astrofrog/numpy,rudimeier/numpy,WillieMaddox/numpy,sonnyhu/numpy,GaZ3ll3/numpy,GaZ3ll3/numpy,dwf/numpy,mortada/numpy,shoyer/numpy,grlee77/numpy,dimasad/numpy,immerrr/numpy,sinhrks/numpy,BMJHayward/numpy,maniteja123/numpy,seberg/numpy,numpy/numpy,jschueller/numpy,pelson/numpy,pizzathief/numpy,mindw/numpy,felipebetancur/numpy,trankmichael/numpy,GaZ3ll3/numpy,rhythmsosad/numpy,Srisai85/numpy,ogrisel/numpy,chatcannon/numpy,immerrr/numpy,pelson/numpy,pelson/numpy,tdsmith/numpy,SunghanKim/numpy,ViralLeadership/numpy,Linkid/numpy,mortada/numpy,seberg/numpy,tacaswell/numpy,groutr/numpy,pdebuyl/numpy,sigma-random/numpy,shoyer/numpy,andsor/numpy,dwillmer/numpy,AustereCuriosity/numpy,GrimDerp/numpy,bmorris3/numpy,ChanderG/numpy,GaZ3ll3/numpy,ajdawson/numpy,rgommers/numpy,stefanv/numpy,brandon-rhodes/numpy,simongibbons/numpy,dato-code/numpy,hainm/numpy,Dapid/numpy,mwiebe/numpy,madphysicist/numpy,stefanv/numpy,jonathanunderwood/numpy,njase/numpy,cjermain/numpy,naritta/numpy,githubmlai/numpy,andsor/numpy,kirillzhuravlev/numpy,ChristopherHogan/numpy,stuarteberg/numpy,KaelChen/numpy,bringingheavendown/numpy,skymanaditya1/numpy,empeeu/numpy,dwillmer/numpy,joferkington/numpy,Anwesh43/numpy,rajathkumarmp/numpy,pizzathief/numpy,empeeu/numpy,ddasilva/numpy,bertrand-l/numpy,utke1/numpy,KaelChen/numpy,sigma-random/numpy,rajathkumarmp/numpy,mhvk/numpy,tacaswell/numpy,Eric89GXL/numpy,jankoslavic/numpy,endolith/numpy,ogrisel/numpy,Dapid/numpy,cowlicks/numpy,yiakwy/numpy,ewmoore/numpy,hainm/numpy,endolith/numpy,ogrisel/numpy,simongibbons/numpy,mathdd/numpy,mindw/numpy,jankoslavic/numpy,bringingheavendown/numpy,rajathkumarmp/numpy,Linkid/numpy,trankmichael/numpy,pbrod/numpy,pdebuyl/numpy,immerrr/numpy,Eric89GXL/numpy,MichaelAquilina/numpy,rudimeier/numpy,nguyentu1602/numpy,mhvk/numpy,immerrr/numpy,CMartelLML/numpy,solarjoe/numpy,musically-ut/numpy,anntzer/numpy,rudimeier/numpy,dimasad/numpy,larsmans/numpy,rmcgibbo/numpy,dimasad/numpy,WarrenWeckesser/numpy,pdebuyl/numpy,simongibbons/numpy,cjermain/numpy,mindw/numpy,embray/numpy,stefanv/numpy,kiwifb/numpy,dch312/numpy,pelson/numpy,NextThought/pypy-numpy,joferkington/numpy,WarrenWeckesser/numpy,dato-code/numpy | numpy/polynomial/tests/test_printing.py | numpy/polynomial/tests/test_printing.py | import numpy.polynomial as poly
from numpy.testing import TestCase, run_module_suite, assert_
class test_str(TestCase):
def test_polynomial_str(self):
res = str(poly.Polynomial([0,1]))
tgt = 'poly([0., 1.])'
assert_(res, tgt)
def test_chebyshev_str(self):
res = str(poly.Chebyshev([0,1]))
tgt = 'leg([0., 1.])'
assert_(res, tgt)
def test_legendre_str(self):
res = str(poly.Legendre([0,1]))
tgt = 'leg([0., 1.])'
assert_(res, tgt)
def test_hermite_str(self):
res = str(poly.Hermite([0,1]))
tgt = 'herm([0., 1.])'
assert_(res, tgt)
def test_hermiteE_str(self):
res = str(poly.HermiteE([0,1]))
tgt = 'herme([0., 1.])'
assert_(res, tgt)
def test_laguerre_str(self):
res = str(poly.Laguerre([0,1]))
tgt = 'lag([0., 1.])'
assert_(res, tgt)
class test_repr(TestCase):
def test_polynomial_str(self):
res = repr(poly.Polynomial([0,1]))
tgt = 'Polynomial([0., 1.])'
assert_(res, tgt)
def test_chebyshev_str(self):
res = repr(poly.Chebyshev([0,1]))
tgt = 'Chebyshev([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_legendre_repr(self):
res = repr(poly.Legendre([0,1]))
tgt = 'Legendre([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_hermite_repr(self):
res = repr(poly.Hermite([0,1]))
tgt = 'Hermite([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_hermiteE_repr(self):
res = repr(poly.HermiteE([0,1]))
tgt = 'HermiteE([0., 1.], [-1., 1.], [-1., 1.])'
assert_(res, tgt)
def test_laguerre_repr(self):
res = repr(poly.Laguerre([0,1]))
tgt = 'Laguerre([0., 1.], [0., 1.], [0., 1.])'
assert_(res, tgt)
#
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | Python | |
96fa9dd03ec6a97f6b99c1556f84b0b50824ee53 | Create keyexpansion.py | deekshadangwal/PyRTL,nvandervoort/PyRTL,UCSBarchlab/PyRTL,UCSBarchlab/PyRTL,nvandervoort/PyRTL,deekshadangwal/PyRTL | research/aes/keyexpansion.py | research/aes/keyexpansion.py | import sys
sys.path.append("../..")
import pyrtl
from pyrtl import *
import func_g
from func_g import *
def KeyExpansion(in_vector):
""" KeyExpansion round of AES.
Input: 16-byte key.
Output: 176-byte expanded key.
"""
w0 = in_vector[96:128]
w1 = in_vector[64:96]
w2 = in_vector[32:64]
w3 = in_vector[0:32]
w4 = w0 ^ g_w3(w3)
w5 = w4 ^ w1
w6 = w5 ^ w2
w7 = w6 ^ w3
w8 = w4 ^ g_w7(w7)
w9 = w8 ^ w5
w10 = w9 ^ w6
w11 = w10 ^ w7
w12 = w8 ^ g_w11(w11)
w13 = w12 ^ w9
w14 = w13 ^ w10
w15 = w14 ^ w11
w16 = w12 ^ g_w15(w15)
w17 = w16 ^ w13
w18 = w17 ^ w14
w19 = w18 ^ w15
w20 = w16 ^ g_w19(w19)
w21 = w20 ^ w17
w22 = w21 ^ w18
w23 = w22 ^ w19
w24 = w20 ^ g_w23(w23)
w25 = w24 ^ w21
w26 = w25 ^ w22
w27 = w26 ^ w23
w28 = w24 ^ g_w27(w27)
w29 = w28 ^ w25
w30 = w29 ^ w26
w31 = w30 ^ w27
w32 = w28 ^ g_w31(w31)
w33 = w32 ^ w29
w34 = w33 ^ w30
w35 = w34 ^ w31
w36 = w32 ^ g_w35(w35)
w37 = w36 ^ w33
w38 = w37 ^ w34
w39 = w38 ^ w35
w40 = w36 ^ g_w39(w39)
w41 = w40 ^ w37
w42 = w41 ^ w38
w43 = w42 ^ w39
out_vector = pyrtl.concat(w0, w1, w2, w3,
w4, w5, w6, w7,
w8, w9, w10, w11,
w12, w13, w14, w15,
w16, w17, w18, w19,
w20, w21, w22, w23,
w24, w25, w26, w27,
w28, w29, w30, w31,
w32, w33, w34, w35,
w36, w37, w38, w39,
w40, w41, w42, w43)
return out_vector
# Hardware build.
aes_input = pyrtl.Input(bitwidth=128, name='aes_input')
aes_output = pyrtl.Output(bitwidth=1408, name='aes_output')
aes_output <<= KeyExpansion(aes_input)
print pyrtl.working_block()
print
sim_trace = pyrtl.SimulationTrace()
sim = pyrtl.Simulation(tracer=sim_trace)
# 000102030405060708090a0b0c0d0e0f
# 0f1571c947d9e8590cb7adaf7f6798
# ffffffffffffffffffffffffffffffff
for cycle in range(1):
sim.step({aes_input: 0x00000000000000000000000000000000})
sim_trace.render_trace(symbol_len=40, segment_size=1)
| bsd-3-clause | Python | |
d251a2b2cd449ed5078b41b09f50003786f3bbde | Create script to pad and trim fastq reads to one length | alliemacleay/misc | seq_pad.py | seq_pad.py | #!/usr/bin/python
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
__author__= 'Allison MacLeay'
import sys
import os
import argparse
import glob
import gzip
#-----------------------------------------
# MAIN
# run umitag.py for all files in a directory
# that have the same prefix
#-----------------------------------------
if __name__ == '__main__':
parser=argparse.ArgumentParser(description="Run umitag utility in batches of similarly prefixed names.")
parser.add_argument('--dir', default='.', help='directory containing fastq output')
parser.add_argument('--out', default='seq_pad_out', help='directory for output')
parser.add_argument('--len', default=147, help='length to trim and pad to')
args=parser.parse_args()
l=int(args.len)
os.system("mkdir -p " + args.out)
files = glob.glob(os.path.join(args.dir,"*.fastq.gz"))
for f in files:
pfx = f.split('.')[0].split('/')[-1]
fh = gzip.open(f,'r')
out = gzip.open(os.path.join(args.out, pfx + "_padded.fastq.gz"),'wb')
ct=0
for line in fh:
line = line.strip()
ct+=1
if ct%4 == 2:
#sequence
if len(line) < l:
line = line + ('N'* (l-len(line)))
print line[:l]
out.write(line[:l])
if ct%4 == 0:
#quality
if len(line) < l:
line = line + ('#'* (l-len(line)))
print line[:l]
out.write(line[:l])
fh.close()
out.close()
| mit | Python | |
19e3cd7256d5eb25eee8597f82bb2dd3fc019f03 | add a low-level random kick agent | LARG/HFO,mhauskn/HFO,mhauskn/HFO,mhauskn/HFO,LARG/HFO,LARG/HFO | example/low_level_random_kick_agent.py | example/low_level_random_kick_agent.py | #!/usr/bin/env python3
# encoding: utf-8
from hfo import *
import argparse
import numpy as np
import math as m
import sys, os
import itertools
def rad_to_deg(rad):
return rad/m.pi*180
def sign(x):
return (int(x>=0)-0.5)*2
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--port', type=int, default=6000, help="Server port")
args=parser.parse_args()
hfo = HFOEnvironment()
hfo.connectToServer(LOW_LEVEL_FEATURE_SET,
'/bin/teams/base/config/formations-dt',
args.port,'localhost','base_left',False)
States, Actions, Statuses = [], [], []
for episode in itertools.count():
status=IN_GAME
while status==IN_GAME:
state = hfo.getState()
if int(state[12]) == 1: # Kickable = 1
goal_center_angle = rad_to_deg(m.acos(state[14])) * sign(m.asin(state[13]))
# turn to goal center
if abs(goal_center_angle) > 45:
hfo.act(1, goal_center_angle)
# kick
else:
power = np.random.uniform(0,100)
hfo.act(3, power, goal_center_angle)
else: # Kickable = -1
ball_angle = rad_to_deg(m.acos(state[52])) * sign(m.asin(state[51]))
# turn to ball
if abs(ball_angle) > 10:
hfo.act(1, ball_angle)
# go to ball
else:
power = np.random.uniform(0,100)
hfo.act(0, power, ball_angle)
status = hfo.step()
#--------------- end of while loop ------------------------------------------------------
# Quit if the server goes down
if status == SERVER_DOWN:
hfo.act(QUIT)
break
| mit | Python | |
5eae0b790fb84ec77bc1f7a28706eecb9f25ef1f | Add another specific script | moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE,moschlar/SAUCE | sauce/bin/add_dummy_users.py | sauce/bin/add_dummy_users.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Add new users by CSV file
@author: moschlar
"""
#
## SAUCE - System for AUtomated Code Evaluation
## Copyright (C) 2013 Moritz Schlarb
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os, sys, csv, time
from argparse import ArgumentParser
from sqlalchemy.exc import SQLAlchemyError
from paste.deploy import appconfig
from tg import config
from sauce.config.environment import load_environment
from sauce import model
from sauce.lib.mail import sendmail
import transaction
from sqlalchemy.orm.exc import NoResultFound
def load_config(filename):
conf = appconfig('config:' + os.path.abspath(filename))
load_environment(conf.global_conf, conf.local_conf)
def parse_args():
parser = ArgumentParser(description=__doc__)
parser.add_argument("conf_file", help="configuration to use")
parser.add_argument("csv_file", help="csv file to parse")
parser.add_argument("csv_fields", default='firstrow', nargs='?',
help="csv field names, comma separated - field names that match a database field get used")
return parser.parse_args()
def main():
args = parse_args()
load_config(args.conf_file)
#event = model.Event.by_url(args.event_url)
if args.csv_fields == 'firstrow':
fields = None
else:
fields = args.csv_fields
with open(args.csv_file) as f:
reader = csv.DictReader(f, fieldnames=fields, dialect=csv.excel_tab)
dicts = list(reader)
errors = []
for d in dicts:
print d
try:
s = model.User.query.filter_by(user_name=d['user_name']).one()
except NoResultFound:
s = model.User(user_name=d['user_name'])
model.DBSession.add(s)
s._last_name = d['last_name'].decode('utf-8')
s._first_name = d['first_name'].decode('utf-8')
s.email_address = d['email_address']
try:
#model.DBSession.flush()
transaction.commit()
except SQLAlchemyError as e:
#model.DBSession.rollback()
transaction.abort()
#print e.message
errors.append((e, s))
#raise e
# try:
# transaction.commit()
# except SQLAlchemyError as e:
# transaction.abort()
# raise e
print errors
if __name__ == '__main__':
print >>sys.stderr, 'Do not use this program unmodified.'
sys.exit(1)
sys.exit(main())
| agpl-3.0 | Python | |
0095e75ce94b3181d56f7783110cadd3d9730577 | add version 0.0.1 for initialize | ray-g/FormulaCooker | __init__.py | __init__.py | from __future__ import absolute_import
__version__ = '0.0.1' | mit | Python | |
d25af87006ac21f55706c3a5579aec3c961b88e8 | Add script to download data from MDCS | wd15/sem-image-stats | download_mdcs_data.py | download_mdcs_data.py | """Fetch images from MDCS.
"""
import json
import requests
import xmltodict
def download_mdcs_data():
user = "dwheeler"
password = "12345"
mdcs_url = "http://129.6.153.123:8000"
schema_title = 'SemImage'
url = mdcs_url + "/rest/templates/select/all"
allSchemas = json.loads(requests.get(url, auth=(user, password)).text)
schemaIDs = [schema['id'] for schema in allSchemas if schema['title'] == schema_title]
url = mdcs_url + "/rest/explore/query-by-example"
query = {"schema" : schemaIDs[0]}
req_data = {"query" : json.dumps(query)}
qres = json.loads(requests.post(url, req_data, auth=(user, password)).text)
imgfile = [data['title'] for data in qres]
img_urls = [xmltodict.parse(data['content'])['semImage']['imageFile'] for data in qres]
# for i in range(len(qres)):
# imgfile.append(qres[i]['title'])
# content = qres[i]['content']
# # qdata = DMD.DataModelDict(content)
# content_dict = xmltodict.parse(content)
# # img_urls.append(qdata.find('imageFile'))
# img_urls.append(content_dict['semImage']['imageFile'])
print("no_images: ",len(img_urls))
print()
print(imgfile)
print()
print(img_urls)
if __name__ == '__main__':
download_mdcs_data()
| mit | Python | |
3e73bbb51c7b5b214dc2c79e5a1cb8b00f243855 | Fix tests | qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/locations/tests/test_location_fixtures.py | corehq/apps/locations/tests/test_location_fixtures.py | from django.test import SimpleTestCase
from corehq.apps.locations.models import SQLLocation, LocationType
from ..fixtures import _location_to_fixture, _location_footprint
class LocationFixturesTest(SimpleTestCase):
def test_metadata(self):
state = LocationType(
domain="test-domain",
name="state",
code="state",
)
location = SQLLocation(
location_id="unique-id",
domain="test-domain",
name="Braavos",
location_type=state,
metadata={'best_swordsman': "Sylvio Forel",
'in_westeros': "false"},
)
location_db = _location_footprint([location])
fixture = _location_to_fixture(location_db, location, id)
location_data = {
e.tag: e.text for e in fixture.find('location_data')
}
self.assertEquals(location_data, location.metadata)
| from django.test import SimpleTestCase
from corehq.apps.locations.models import Location
from ..fixtures import _location_to_fixture, _location_footprint
class LocationFixturesTest(SimpleTestCase):
def test_metadata(self):
location = Location(
_id="unique-id",
domain="test-domain",
name="Braavos",
location_type="state",
metadata={'best_swordsman': "Sylvio Forel",
'in_westeros': "false"},
)
location_db = _location_footprint([location])
fixture = _location_to_fixture(location_db, location, id)
location_data = {
e.tag: e.text for e in fixture.find('location_data')
}
self.assertEquals(location_data, location.metadata)
| bsd-3-clause | Python |
ae0ef3a709e22774cf98f4c3010b66fb1be172bc | add class Datapoint | Isotop7/pyrtemonnaie | Datapoint.py | Datapoint.py | import re
from datetime import date
class Datapoint:
def __init__(self, recipient, s_date="01.01.1970", value=0.0, comment=""):
self.__Recipient = recipient
self.__Date = s_date
self.__Value = value
self.__Comment = comment
def get_recipient(self):
return self.__Recipient
def set_recipient(self, recipient):
self.__Recipient = recipient
def get_date(self):
return self.__Date
def set_date(self, s_date):
try:
regex = re.compile(r'\d{1,2}\.\d{1,2}\.\d{4}')
m = regex.match(date)
if m:
date_split = s_date.split(".")
self.__Date = date(date_split[0], date_split[1], date_split[2])
else:
raise ValueError
except ValueError:
now = date.today()
self.__Date = now.strftime("%d.%m.%Y")
def get_value(self):
return self.__Value
def set_value(self, value):
if value < 0:
raise ValueError
else:
self.__Value = value
def get_comment(self):
return self.__Comment
def set_comment(self, comment):
if ";" in comment:
raise ValueError
else:
self.__Comment = comment
def to_String(self):
return self.__Recipient + ";" + self.__Date + ";" + str(self.__Value) + ";" + self.__Comment | mit | Python | |
f6637b7fe03e9d72b8053972c444840921b84d6e | Add a registry that keeps track of tag->renderer/input mappings | motion2015/edx-platform,BehavioralInsightsTeam/edx-platform,hkawasaki/kawasaki-aio8-2,ferabra/edx-platform,halvertoluke/edx-platform,Stanford-Online/edx-platform,tanmaykm/edx-platform,cecep-edu/edx-platform,atsolakid/edx-platform,leansoft/edx-platform,ahmadiga/min_edx,edx/edx-platform,jazkarta/edx-platform-for-isc,jruiperezv/ANALYSE,EduPepperPDTesting/pepper2013-testing,zubair-arbi/edx-platform,eestay/edx-platform,jolyonb/edx-platform,tiagochiavericosta/edx-platform,solashirai/edx-platform,motion2015/edx-platform,deepsrijit1105/edx-platform,rue89-tech/edx-platform,raccoongang/edx-platform,SivilTaram/edx-platform,alexthered/kienhoc-platform,a-parhom/edx-platform,wwj718/ANALYSE,pku9104038/edx-platform,halvertoluke/edx-platform,itsjeyd/edx-platform,bdero/edx-platform,y12uc231/edx-platform,xuxiao19910803/edx,xingyepei/edx-platform,procangroup/edx-platform,shubhdev/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,zubair-arbi/edx-platform,cecep-edu/edx-platform,B-MOOC/edx-platform,JioEducation/edx-platform,jamiefolsom/edx-platform,nttks/edx-platform,vismartltd/edx-platform,Unow/edx-platform,valtech-mooc/edx-platform,nikolas/edx-platform,shurihell/testasia,Ayub-Khan/edx-platform,marcore/edx-platform,dsajkl/123,jbzdak/edx-platform,valtech-mooc/edx-platform,edry/edx-platform,raccoongang/edx-platform,louyihua/edx-platform,ferabra/edx-platform,MakeHer/edx-platform,jbassen/edx-platform,mcgachey/edx-platform,SivilTaram/edx-platform,shashank971/edx-platform,xuxiao19910803/edx,abdoosh00/edraak,mitocw/edx-platform,eemirtekin/edx-platform,hastexo/edx-platform,JCBarahona/edX,vasyarv/edx-platform,torchingloom/edx-platform,martynovp/edx-platform,jswope00/GAI,shubhdev/edxOnBaadal,franosincic/edx-platform,nagyistoce/edx-platform,halvertoluke/edx-platform,IndonesiaX/edx-platform,kamalx/edx-platform,devs1991/test_edx_docmode,Kalyzee/edx-platform,chauhanhardik/populo_2,vismartltd/edx-platform,motion2015/edx-platform,shubhdev/openedx,vikas1885/test1,ampax/edx-platform-backup,lduarte1991/edx-platform,cselis86/edx-platform,cognitiveclass/edx-platform,leansoft/edx-platform,cselis86/edx-platform,jruiperezv/ANALYSE,antoviaque/edx-platform,alexthered/kienhoc-platform,zofuthan/edx-platform,itsjeyd/edx-platform,10clouds/edx-platform,iivic/BoiseStateX,TsinghuaX/edx-platform,kmoocdev/edx-platform,sudheerchintala/LearnEraPlatForm,abdoosh00/edx-rtl-final,prarthitm/edxplatform,apigee/edx-platform,bitifirefly/edx-platform,doismellburning/edx-platform,xuxiao19910803/edx,eestay/edx-platform,PepperPD/edx-pepper-platform,antonve/s4-project-mooc,shubhdev/openedx,chauhanhardik/populo_2,abdoosh00/edraak,praveen-pal/edx-platform,halvertoluke/edx-platform,cyanna/edx-platform,inares/edx-platform,chauhanhardik/populo_2,cognitiveclass/edx-platform,tanmaykm/edx-platform,CourseTalk/edx-platform,unicri/edx-platform,dcosentino/edx-platform,auferack08/edx-platform,nanolearningllc/edx-platform-cypress,J861449197/edx-platform,fintech-circle/edx-platform,xingyepei/edx-platform,raccoongang/edx-platform,rismalrv/edx-platform,nttks/edx-platform,appliedx/edx-platform,eduNEXT/edx-platform,10clouds/edx-platform,lduarte1991/edx-platform,ovnicraft/edx-platform,Edraak/edx-platform,vasyarv/edx-platform,ubc/edx-platform,pomegranited/edx-platform,wwj718/ANALYSE,gymnasium/edx-platform,ovnicraft/edx-platform,benpatterson/edx-platform,knehez/edx-platform,Stanford-Online/edx-platform,msegado/edx-platform,edx-solutions/edx-platform,ampax/edx-platform-backup,Softmotions/edx-platform,J861449197/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearning/edx-platform,zhenzhai/edx-platform,naresh21/synergetics-edx-platform,fintech-circle/edx-platform,B-MOOC/edx-platform,jbassen/edx-platform,kursitet/edx-platform,morpheby/levelup-by,JioEducation/edx-platform,vikas1885/test1,caesar2164/edx-platform,teltek/edx-platform,jamesblunt/edx-platform,ahmadiga/min_edx,jolyonb/edx-platform,synergeticsedx/deployment-wipro,syjeon/new_edx,jamesblunt/edx-platform,RPI-OPENEDX/edx-platform,eemirtekin/edx-platform,TeachAtTUM/edx-platform,miptliot/edx-platform,tanmaykm/edx-platform,shurihell/testasia,valtech-mooc/edx-platform,shubhdev/edx-platform,antoviaque/edx-platform,praveen-pal/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,SravanthiSinha/edx-platform,hkawasaki/kawasaki-aio8-0,chudaol/edx-platform,shubhdev/edxOnBaadal,fly19890211/edx-platform,jelugbo/tundex,EduPepperPDTesting/pepper2013-testing,don-github/edx-platform,mbareta/edx-platform-ft,fintech-circle/edx-platform,unicri/edx-platform,jelugbo/tundex,jamiefolsom/edx-platform,zerobatu/edx-platform,Endika/edx-platform,zadgroup/edx-platform,a-parhom/edx-platform,Semi-global/edx-platform,jamiefolsom/edx-platform,jazkarta/edx-platform-for-isc,IONISx/edx-platform,mtlchun/edx,Edraak/circleci-edx-platform,benpatterson/edx-platform,jswope00/griffinx,nagyistoce/edx-platform,CredoReference/edx-platform,beni55/edx-platform,arbrandes/edx-platform,TsinghuaX/edx-platform,IONISx/edx-platform,B-MOOC/edx-platform,kalebhartje/schoolboost,nanolearningllc/edx-platform-cypress,Livit/Livit.Learn.EdX,chudaol/edx-platform,antonve/s4-project-mooc,chudaol/edx-platform,xinjiguaike/edx-platform,torchingloom/edx-platform,MSOpenTech/edx-platform,EduPepperPDTesting/pepper2013-testing,eduNEXT/edx-platform,pepeportela/edx-platform,ZLLab-Mooc/edx-platform,cpennington/edx-platform,jbzdak/edx-platform,xinjiguaike/edx-platform,kmoocdev/edx-platform,Shrhawk/edx-platform,defance/edx-platform,synergeticsedx/deployment-wipro,prarthitm/edxplatform,iivic/BoiseStateX,nanolearningllc/edx-platform-cypress-2,olexiim/edx-platform,edx-solutions/edx-platform,utecuy/edx-platform,rue89-tech/edx-platform,bdero/edx-platform,SravanthiSinha/edx-platform,amir-qayyum-khan/edx-platform,ZLLab-Mooc/edx-platform,IONISx/edx-platform,mtlchun/edx,pabloborrego93/edx-platform,stvstnfrd/edx-platform,longmen21/edx-platform,zadgroup/edx-platform,deepsrijit1105/edx-platform,nanolearning/edx-platform,adoosii/edx-platform,DefyVentures/edx-platform,arifsetiawan/edx-platform,knehez/edx-platform,naresh21/synergetics-edx-platform,alexthered/kienhoc-platform,IndonesiaX/edx-platform,TeachAtTUM/edx-platform,chauhanhardik/populo,arbrandes/edx-platform,solashirai/edx-platform,pabloborrego93/edx-platform,WatanabeYasumasa/edx-platform,Stanford-Online/edx-platform,gymnasium/edx-platform,eduNEXT/edunext-platform,edry/edx-platform,ovnicraft/edx-platform,playm2mboy/edx-platform,ubc/edx-platform,Edraak/edx-platform,deepsrijit1105/edx-platform,yokose-ks/edx-platform,PepperPD/edx-pepper-platform,rationalAgent/edx-platform-custom,jbzdak/edx-platform,hkawasaki/kawasaki-aio8-1,abdoosh00/edraak,solashirai/edx-platform,mushtaqak/edx-platform,CourseTalk/edx-platform,chand3040/cloud_that,JioEducation/edx-platform,procangroup/edx-platform,BehavioralInsightsTeam/edx-platform,hkawasaki/kawasaki-aio8-2,pabloborrego93/edx-platform,stvstnfrd/edx-platform,alu042/edx-platform,romain-li/edx-platform,nanolearningllc/edx-platform-cypress,dsajkl/reqiop,jazztpt/edx-platform,mtlchun/edx,ak2703/edx-platform,knehez/edx-platform,Endika/edx-platform,Edraak/circleci-edx-platform,jolyonb/edx-platform,EduPepperPD/pepper2013,UOMx/edx-platform,rationalAgent/edx-platform-custom,hkawasaki/kawasaki-aio8-2,kmoocdev/edx-platform,Kalyzee/edx-platform,gsehub/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,shabab12/edx-platform,alu042/edx-platform,JCBarahona/edX,chand3040/cloud_that,miptliot/edx-platform,JioEducation/edx-platform,MSOpenTech/edx-platform,prarthitm/edxplatform,morenopc/edx-platform,polimediaupv/edx-platform,edx-solutions/edx-platform,atsolakid/edx-platform,ubc/edx-platform,nagyistoce/edx-platform,shubhdev/openedx,wwj718/ANALYSE,edry/edx-platform,arbrandes/edx-platform,EduPepperPD/pepper2013,jswope00/GAI,tanmaykm/edx-platform,Kalyzee/edx-platform,iivic/BoiseStateX,xuxiao19910803/edx,rismalrv/edx-platform,dcosentino/edx-platform,doismellburning/edx-platform,dkarakats/edx-platform,chrisndodge/edx-platform,EduPepperPD/pepper2013,franosincic/edx-platform,ahmedaljazzar/edx-platform,motion2015/edx-platform,DNFcode/edx-platform,itsjeyd/edx-platform,benpatterson/edx-platform,shubhdev/openedx,analyseuc3m/ANALYSE-v1,kamalx/edx-platform,romain-li/edx-platform,devs1991/test_edx_docmode,devs1991/test_edx_docmode,kxliugang/edx-platform,bitifirefly/edx-platform,AkA84/edx-platform,ESOedX/edx-platform,knehez/edx-platform,kmoocdev/edx-platform,mjirayu/sit_academy,nanolearning/edx-platform,jjmiranda/edx-platform,don-github/edx-platform,pdehaye/theming-edx-platform,Ayub-Khan/edx-platform,zhenzhai/edx-platform,auferack08/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,TeachAtTUM/edx-platform,y12uc231/edx-platform,IONISx/edx-platform,cognitiveclass/edx-platform,4eek/edx-platform,itsjeyd/edx-platform,CourseTalk/edx-platform,waheedahmed/edx-platform,LearnEra/LearnEraPlaftform,RPI-OPENEDX/edx-platform,xingyepei/edx-platform,polimediaupv/edx-platform,pelikanchik/edx-platform,yokose-ks/edx-platform,franosincic/edx-platform,jonathan-beard/edx-platform,wwj718/edx-platform,teltek/edx-platform,DefyVentures/edx-platform,shubhdev/edxOnBaadal,kxliugang/edx-platform,jazztpt/edx-platform,playm2mboy/edx-platform,etzhou/edx-platform,Lektorium-LLC/edx-platform,mushtaqak/edx-platform,xuxiao19910803/edx-platform,chrisndodge/edx-platform,zerobatu/edx-platform,mahendra-r/edx-platform,mjg2203/edx-platform-seas,carsongee/edx-platform,xuxiao19910803/edx,martynovp/edx-platform,SivilTaram/edx-platform,arifsetiawan/edx-platform,defance/edx-platform,angelapper/edx-platform,vikas1885/test1,BehavioralInsightsTeam/edx-platform,Shrhawk/edx-platform,mjirayu/sit_academy,philanthropy-u/edx-platform,dcosentino/edx-platform,alexthered/kienhoc-platform,zubair-arbi/edx-platform,kalebhartje/schoolboost,mushtaqak/edx-platform,leansoft/edx-platform,ahmadio/edx-platform,jbzdak/edx-platform,longmen21/edx-platform,martynovp/edx-platform,beacloudgenius/edx-platform,syjeon/new_edx,waheedahmed/edx-platform,hamzehd/edx-platform,B-MOOC/edx-platform,romain-li/edx-platform,torchingloom/edx-platform,ahmadiga/min_edx,analyseuc3m/ANALYSE-v1,longmen21/edx-platform,Kalyzee/edx-platform,hamzehd/edx-platform,nttks/jenkins-test,jazkarta/edx-platform,msegado/edx-platform,doganov/edx-platform,openfun/edx-platform,MSOpenTech/edx-platform,cselis86/edx-platform,mbareta/edx-platform-ft,jzoldak/edx-platform,waheedahmed/edx-platform,shashank971/edx-platform,kalebhartje/schoolboost,UXE/local-edx,B-MOOC/edx-platform,jelugbo/tundex,arbrandes/edx-platform,EDUlib/edx-platform,simbs/edx-platform,eemirtekin/edx-platform,wwj718/edx-platform,jazztpt/edx-platform,chauhanhardik/populo_2,ahmadiga/min_edx,SravanthiSinha/edx-platform,xuxiao19910803/edx-platform,pdehaye/theming-edx-platform,iivic/BoiseStateX,xuxiao19910803/edx-platform,CourseTalk/edx-platform,cecep-edu/edx-platform,adoosii/edx-platform,pelikanchik/edx-platform,eduNEXT/edunext-platform,halvertoluke/edx-platform,ESOedX/edx-platform,pepeportela/edx-platform,LICEF/edx-platform,bdero/edx-platform,zubair-arbi/edx-platform,wwj718/ANALYSE,Endika/edx-platform,DNFcode/edx-platform,olexiim/edx-platform,inares/edx-platform,arifsetiawan/edx-platform,ahmadio/edx-platform,eduNEXT/edunext-platform,romain-li/edx-platform,polimediaupv/edx-platform,IndonesiaX/edx-platform,CredoReference/edx-platform,nikolas/edx-platform,valtech-mooc/edx-platform,shashank971/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,etzhou/edx-platform,openfun/edx-platform,proversity-org/edx-platform,Unow/edx-platform,chauhanhardik/populo,EduPepperPDTesting/pepper2013-testing,xinjiguaike/edx-platform,simbs/edx-platform,jonathan-beard/edx-platform,AkA84/edx-platform,cselis86/edx-platform,ampax/edx-platform-backup,nanolearning/edx-platform,franosincic/edx-platform,knehez/edx-platform,devs1991/test_edx_docmode,prarthitm/edxplatform,cognitiveclass/edx-platform,waheedahmed/edx-platform,antonve/s4-project-mooc,nttks/edx-platform,gsehub/edx-platform,vasyarv/edx-platform,LearnEra/LearnEraPlaftform,dcosentino/edx-platform,openfun/edx-platform,ak2703/edx-platform,jjmiranda/edx-platform,morenopc/edx-platform,chrisndodge/edx-platform,kmoocdev2/edx-platform,alu042/edx-platform,CredoReference/edx-platform,msegado/edx-platform,jazkarta/edx-platform-for-isc,nttks/jenkins-test,zhenzhai/edx-platform,pku9104038/edx-platform,zofuthan/edx-platform,andyzsf/edx,jazkarta/edx-platform-for-isc,DNFcode/edx-platform,don-github/edx-platform,MakeHer/edx-platform,Softmotions/edx-platform,pomegranited/edx-platform,4eek/edx-platform,4eek/edx-platform,Shrhawk/edx-platform,marcore/edx-platform,xuxiao19910803/edx-platform,miptliot/edx-platform,nanolearningllc/edx-platform-cypress,shurihell/testasia,MakeHer/edx-platform,mahendra-r/edx-platform,rismalrv/edx-platform,polimediaupv/edx-platform,mahendra-r/edx-platform,fintech-circle/edx-platform,philanthropy-u/edx-platform,unicri/edx-platform,rationalAgent/edx-platform-custom,apigee/edx-platform,ampax/edx-platform,Shrhawk/edx-platform,shabab12/edx-platform,10clouds/edx-platform,nanolearningllc/edx-platform-cypress-2,dsajkl/reqiop,doismellburning/edx-platform,don-github/edx-platform,DefyVentures/edx-platform,nikolas/edx-platform,eestay/edx-platform,zofuthan/edx-platform,RPI-OPENEDX/edx-platform,UXE/local-edx,jazkarta/edx-platform,marcore/edx-platform,beni55/edx-platform,utecuy/edx-platform,morenopc/edx-platform,Semi-global/edx-platform,pku9104038/edx-platform,louyihua/edx-platform,praveen-pal/edx-platform,devs1991/test_edx_docmode,nttks/edx-platform,shashank971/edx-platform,syjeon/new_edx,analyseuc3m/ANALYSE-v1,pomegranited/edx-platform,appliedx/edx-platform,IITBinterns13/edx-platform-dev,ampax/edx-platform,pelikanchik/edx-platform,mushtaqak/edx-platform,kursitet/edx-platform,AkA84/edx-platform,4eek/edx-platform,EDUlib/edx-platform,vasyarv/edx-platform,shurihell/testasia,xinjiguaike/edx-platform,mtlchun/edx,abdoosh00/edraak,edry/edx-platform,hkawasaki/kawasaki-aio8-1,MakeHer/edx-platform,andyzsf/edx,atsolakid/edx-platform,Edraak/edx-platform,morpheby/levelup-by,don-github/edx-platform,playm2mboy/edx-platform,UXE/local-edx,solashirai/edx-platform,playm2mboy/edx-platform,jazkarta/edx-platform,peterm-itr/edx-platform,jonathan-beard/edx-platform,IndonesiaX/edx-platform,mushtaqak/edx-platform,ahmadiga/min_edx,PepperPD/edx-pepper-platform,Ayub-Khan/edx-platform,chudaol/edx-platform,kxliugang/edx-platform,beacloudgenius/edx-platform,caesar2164/edx-platform,olexiim/edx-platform,tiagochiavericosta/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,RPI-OPENEDX/edx-platform,Stanford-Online/edx-platform,hmcmooc/muddx-platform,hastexo/edx-platform,torchingloom/edx-platform,gsehub/edx-platform,openfun/edx-platform,playm2mboy/edx-platform,jswope00/griffinx,jswope00/GAI,mjg2203/edx-platform-seas,mitocw/edx-platform,xingyepei/edx-platform,EduPepperPDTesting/pepper2013-testing,doismellburning/edx-platform,pdehaye/theming-edx-platform,mjirayu/sit_academy,jazkarta/edx-platform,tiagochiavericosta/edx-platform,nttks/jenkins-test,EduPepperPD/pepper2013,jazkarta/edx-platform,kursitet/edx-platform,IITBinterns13/edx-platform-dev,jamesblunt/edx-platform,rhndg/openedx,beacloudgenius/edx-platform,abdoosh00/edx-rtl-final,Lektorium-LLC/edx-platform,SravanthiSinha/edx-platform,JCBarahona/edX,beacloudgenius/edx-platform,shashank971/edx-platform,waheedahmed/edx-platform,edx-solutions/edx-platform,pabloborrego93/edx-platform,dsajkl/reqiop,shabab12/edx-platform,kmoocdev2/edx-platform,UOMx/edx-platform,nanolearningllc/edx-platform-cypress-2,jzoldak/edx-platform,jzoldak/edx-platform,AkA84/edx-platform,arifsetiawan/edx-platform,ak2703/edx-platform,dsajkl/reqiop,longmen21/edx-platform,mahendra-r/edx-platform,wwj718/ANALYSE,chand3040/cloud_that,a-parhom/edx-platform,apigee/edx-platform,OmarIthawi/edx-platform,cselis86/edx-platform,MSOpenTech/edx-platform,torchingloom/edx-platform,rue89-tech/edx-platform,MakeHer/edx-platform,jelugbo/tundex,Lektorium-LLC/edx-platform,motion2015/a3,ZLLab-Mooc/edx-platform,deepsrijit1105/edx-platform,nikolas/edx-platform,procangroup/edx-platform,zerobatu/edx-platform,rationalAgent/edx-platform-custom,mtlchun/edx,SravanthiSinha/edx-platform,simbs/edx-platform,UOMx/edx-platform,atsolakid/edx-platform,morenopc/edx-platform,hkawasaki/kawasaki-aio8-2,EDUlib/edx-platform,WatanabeYasumasa/edx-platform,jswope00/griffinx,motion2015/a3,LearnEra/LearnEraPlaftform,naresh21/synergetics-edx-platform,ak2703/edx-platform,shubhdev/edxOnBaadal,ZLLab-Mooc/edx-platform,xingyepei/edx-platform,ESOedX/edx-platform,appliedx/edx-platform,romain-li/edx-platform,motion2015/a3,vikas1885/test1,alexthered/kienhoc-platform,jolyonb/edx-platform,angelapper/edx-platform,IONISx/edx-platform,WatanabeYasumasa/edx-platform,beni55/edx-platform,sudheerchintala/LearnEraPlatForm,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-0,Edraak/edraak-platform,Edraak/edx-platform,nanolearning/edx-platform,LICEF/edx-platform,pdehaye/theming-edx-platform,msegado/edx-platform,mjg2203/edx-platform-seas,jruiperezv/ANALYSE,rue89-tech/edx-platform,DefyVentures/edx-platform,MSOpenTech/edx-platform,chand3040/cloud_that,kamalx/edx-platform,kalebhartje/schoolboost,appliedx/edx-platform,peterm-itr/edx-platform,Semi-global/edx-platform,shurihell/testasia,dsajkl/123,morpheby/levelup-by,carsongee/edx-platform,martynovp/edx-platform,dkarakats/edx-platform,jjmiranda/edx-platform,jonathan-beard/edx-platform,bigdatauniversity/edx-platform,longmen21/edx-platform,kamalx/edx-platform,etzhou/edx-platform,lduarte1991/edx-platform,jbassen/edx-platform,TeachAtTUM/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/circleci-edx-platform,inares/edx-platform,cpennington/edx-platform,beacloudgenius/edx-platform,appsembler/edx-platform,xinjiguaike/edx-platform,inares/edx-platform,dsajkl/123,antoviaque/edx-platform,atsolakid/edx-platform,J861449197/edx-platform,analyseuc3m/ANALYSE-v1,fly19890211/edx-platform,CredoReference/edx-platform,beni55/edx-platform,benpatterson/edx-platform,appliedx/edx-platform,philanthropy-u/edx-platform,zerobatu/edx-platform,ampax/edx-platform-backup,sudheerchintala/LearnEraPlatForm,Edraak/edraak-platform,TsinghuaX/edx-platform,kxliugang/edx-platform,hmcmooc/muddx-platform,TsinghuaX/edx-platform,fly19890211/edx-platform,ahmadio/edx-platform,kamalx/edx-platform,jjmiranda/edx-platform,Unow/edx-platform,zerobatu/edx-platform,cognitiveclass/edx-platform,mcgachey/edx-platform,tiagochiavericosta/edx-platform,mjirayu/sit_academy,morenopc/edx-platform,LearnEra/LearnEraPlaftform,rismalrv/edx-platform,carsongee/edx-platform,hmcmooc/muddx-platform,adoosii/edx-platform,praveen-pal/edx-platform,abdoosh00/edx-rtl-final,olexiim/edx-platform,lduarte1991/edx-platform,AkA84/edx-platform,Edraak/circleci-edx-platform,nttks/jenkins-test,PepperPD/edx-pepper-platform,defance/edx-platform,Livit/Livit.Learn.EdX,DefyVentures/edx-platform,nagyistoce/edx-platform,jruiperezv/ANALYSE,pomegranited/edx-platform,syjeon/new_edx,solashirai/edx-platform,jamesblunt/edx-platform,ubc/edx-platform,ahmadio/edx-platform,ahmedaljazzar/edx-platform,dkarakats/edx-platform,a-parhom/edx-platform,devs1991/test_edx_docmode,pepeportela/edx-platform,J861449197/edx-platform,jonathan-beard/edx-platform,chand3040/cloud_that,iivic/BoiseStateX,franosincic/edx-platform,motion2015/a3,hamzehd/edx-platform,Softmotions/edx-platform,morpheby/levelup-by,auferack08/edx-platform,ovnicraft/edx-platform,mitocw/edx-platform,Edraak/edraak-platform,tiagochiavericosta/edx-platform,kmoocdev/edx-platform,devs1991/test_edx_docmode,amir-qayyum-khan/edx-platform,J861449197/edx-platform,IndonesiaX/edx-platform,Edraak/edx-platform,4eek/edx-platform,antonve/s4-project-mooc,Endika/edx-platform,mcgachey/edx-platform,chrisndodge/edx-platform,jswope00/griffinx,shubhdev/edx-platform,chauhanhardik/populo,dkarakats/edx-platform,motion2015/edx-platform,rhndg/openedx,sameetb-cuelogic/edx-platform-test,Unow/edx-platform,yokose-ks/edx-platform,OmarIthawi/edx-platform,nanolearningllc/edx-platform-cypress-2,yokose-ks/edx-platform,appsembler/edx-platform,JCBarahona/edX,PepperPD/edx-pepper-platform,gymnasium/edx-platform,Softmotions/edx-platform,dsajkl/123,rue89-tech/edx-platform,Edraak/circleci-edx-platform,y12uc231/edx-platform,alu042/edx-platform,cecep-edu/edx-platform,ampax/edx-platform,sameetb-cuelogic/edx-platform-test,Ayub-Khan/edx-platform,Semi-global/edx-platform,jamiefolsom/edx-platform,vismartltd/edx-platform,EduPepperPD/pepper2013,shubhdev/edx-platform,ahmedaljazzar/edx-platform,antoviaque/edx-platform,shabab12/edx-platform,wwj718/edx-platform,unicri/edx-platform,cecep-edu/edx-platform,UXE/local-edx,rhndg/openedx,doganov/edx-platform,jamiefolsom/edx-platform,simbs/edx-platform,auferack08/edx-platform,carsongee/edx-platform,ampax/edx-platform-backup,leansoft/edx-platform,inares/edx-platform,mahendra-r/edx-platform,bigdatauniversity/edx-platform,zhenzhai/edx-platform,zadgroup/edx-platform,leansoft/edx-platform,cyanna/edx-platform,marcore/edx-platform,procangroup/edx-platform,ahmedaljazzar/edx-platform,jelugbo/tundex,nagyistoce/edx-platform,IITBinterns13/edx-platform-dev,zofuthan/edx-platform,zubair-arbi/edx-platform,gsehub/edx-platform,LICEF/edx-platform,jazztpt/edx-platform,jruiperezv/ANALYSE,Shrhawk/edx-platform,ak2703/edx-platform,pku9104038/edx-platform,polimediaupv/edx-platform,hastexo/edx-platform,simbs/edx-platform,SivilTaram/edx-platform,doganov/edx-platform,hkawasaki/kawasaki-aio8-1,hkawasaki/kawasaki-aio8-0,andyzsf/edx,cpennington/edx-platform,cpennington/edx-platform,eemirtekin/edx-platform,stvstnfrd/edx-platform,caesar2164/edx-platform,hkawasaki/kawasaki-aio8-0,jazztpt/edx-platform,ESOedX/edx-platform,jswope00/GAI,ubc/edx-platform,antonve/s4-project-mooc,kalebhartje/schoolboost,vismartltd/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,kxliugang/edx-platform,vasyarv/edx-platform,jbzdak/edx-platform,hamzehd/edx-platform,utecuy/edx-platform,ferabra/edx-platform,bitifirefly/edx-platform,sameetb-cuelogic/edx-platform-test,adoosii/edx-platform,OmarIthawi/edx-platform,OmarIthawi/edx-platform,ovnicraft/edx-platform,nttks/edx-platform,teltek/edx-platform,stvstnfrd/edx-platform,ferabra/edx-platform,benpatterson/edx-platform,WatanabeYasumasa/edx-platform,fly19890211/edx-platform,jbassen/edx-platform,eestay/edx-platform,LICEF/edx-platform,Softmotions/edx-platform,10clouds/edx-platform,jamesblunt/edx-platform,teltek/edx-platform,louyihua/edx-platform,vismartltd/edx-platform,bigdatauniversity/edx-platform,eduNEXT/edx-platform,olexiim/edx-platform,andyzsf/edx,kmoocdev2/edx-platform,sudheerchintala/LearnEraPlatForm,Lektorium-LLC/edx-platform,ahmadio/edx-platform,LICEF/edx-platform,martynovp/edx-platform,appsembler/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,dcosentino/edx-platform,motion2015/a3,apigee/edx-platform,etzhou/edx-platform,proversity-org/edx-platform,fly19890211/edx-platform,angelapper/edx-platform,hastexo/edx-platform,zadgroup/edx-platform,eduNEXT/edx-platform,abdoosh00/edx-rtl-final,hkawasaki/kawasaki-aio8-1,ampax/edx-platform,chauhanhardik/populo,msegado/edx-platform,zhenzhai/edx-platform,Semi-global/edx-platform,zofuthan/edx-platform,cyanna/edx-platform,dkarakats/edx-platform,dsajkl/123,synergeticsedx/deployment-wipro,philanthropy-u/edx-platform,defance/edx-platform,JCBarahona/edX,yokose-ks/edx-platform,mjg2203/edx-platform-seas,wwj718/edx-platform,jbassen/edx-platform,pepeportela/edx-platform,utecuy/edx-platform,raccoongang/edx-platform,bigdatauniversity/edx-platform,doganov/edx-platform,DNFcode/edx-platform,hmcmooc/muddx-platform,bdero/edx-platform,eemirtekin/edx-platform,Livit/Livit.Learn.EdX,cyanna/edx-platform,arifsetiawan/edx-platform,IITBinterns13/edx-platform-dev,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,mbareta/edx-platform-ft,bigdatauniversity/edx-platform,RPI-OPENEDX/edx-platform,hamzehd/edx-platform,kursitet/edx-platform,ZLLab-Mooc/edx-platform,jswope00/griffinx,eestay/edx-platform,chudaol/edx-platform,pomegranited/edx-platform,wwj718/edx-platform,beni55/edx-platform,gymnasium/edx-platform,synergeticsedx/deployment-wipro,openfun/edx-platform,nikolas/edx-platform,louyihua/edx-platform,peterm-itr/edx-platform,shubhdev/openedx,nttks/jenkins-test,Ayub-Khan/edx-platform,peterm-itr/edx-platform,edry/edx-platform,unicri/edx-platform,shubhdev/edxOnBaadal,bitifirefly/edx-platform,rhndg/openedx,bitifirefly/edx-platform,mjirayu/sit_academy,kmoocdev2/edx-platform,adoosii/edx-platform,pelikanchik/edx-platform,chauhanhardik/populo_2,EduPepperPDTesting/pepper2013-testing,cyanna/edx-platform,valtech-mooc/edx-platform,UOMx/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress-2,rhndg/openedx,amir-qayyum-khan/edx-platform,mbareta/edx-platform-ft,ferabra/edx-platform,kursitet/edx-platform,utecuy/edx-platform,appsembler/edx-platform,proversity-org/edx-platform,mcgachey/edx-platform,y12uc231/edx-platform,mcgachey/edx-platform,vikas1885/test1,Edraak/edraak-platform,jazkarta/edx-platform-for-isc,y12uc231/edx-platform,Kalyzee/edx-platform,shubhdev/edx-platform,jzoldak/edx-platform,etzhou/edx-platform,Livit/Livit.Learn.EdX,kmoocdev2/edx-platform,doganov/edx-platform,amir-qayyum-khan/edx-platform,rationalAgent/edx-platform-custom,edx/edx-platform | common/lib/capa/capa/registry.py | common/lib/capa/capa/registry.py | class TagRegistry(object):
"""
A registry mapping tags to handlers.
(A dictionary with some extra error checking.)
"""
def __init__(self):
self._mapping = {}
def register(self, cls):
"""
Register cls as a supported tag type. It is expected to define cls.tags as a list of tags
that it implements.
If an already-registered type has registered one of those tags, will raise ValueError.
If there are no tags in cls.tags, will also raise ValueError.
"""
# Do all checks and complain before changing any state.
if len(cls.tags) == 0:
raise ValueError("No tags specified for class {0}".format(cls.__name__))
for t in cls.tags:
if t in self._mapping:
other_cls = self._mapping[t]
if cls == other_cls:
# registering the same class multiple times seems silly, but ok
continue
raise ValueError("Tag {0} already registered by class {1}."
" Can't register for class {2}"
.format(t, other_cls.__name__, cls.__name__))
# Ok, should be good to change state now.
for t in cls.tags:
self._mapping[t] = cls
def registered_tags(self):
"""
Get a list of all the tags that have been registered.
"""
return self._mapping.keys()
def get_class_for_tag(self, tag):
"""
For any tag in registered_tags(), returns the corresponding class. Otherwise, will raise
KeyError.
"""
return self._mapping[tag]
| agpl-3.0 | Python | |
6b6adc4dd4441f47f8d4e45a5f8473dbdfec275d | Move get_packs_base_path and get_pack_base_path to utils in st2common. | pinterb/st2,emedvedev/st2,Plexxi/st2,tonybaloney/st2,pixelrebel/st2,tonybaloney/st2,pixelrebel/st2,punalpatel/st2,pinterb/st2,emedvedev/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,armab/st2,lakshmi-kannan/st2,Plexxi/st2,jtopjian/st2,Plexxi/st2,punalpatel/st2,pixelrebel/st2,alfasin/st2,dennybaa/st2,emedvedev/st2,nzlosh/st2,StackStorm/st2,armab/st2,peak6/st2,StackStorm/st2,lakshmi-kannan/st2,alfasin/st2,peak6/st2,grengojbo/st2,punalpatel/st2,tonybaloney/st2,nzlosh/st2,Itxaka/st2,grengojbo/st2,peak6/st2,alfasin/st2,jtopjian/st2,dennybaa/st2,Itxaka/st2,grengojbo/st2,nzlosh/st2,Itxaka/st2,dennybaa/st2,jtopjian/st2,pinterb/st2,Plexxi/st2,lakshmi-kannan/st2,armab/st2 | st2common/st2common/content/utils.py | st2common/st2common/content/utils.py | import os
import pipes
from oslo.config import cfg
__all__ = [
'get_packs_base_path',
'get_pack_base_path'
]
def get_packs_base_path():
return cfg.CONF.content.packs_base_path
def get_pack_base_path(pack_name):
"""
Return full absolute base path to the content pack directory.
:param pack_name: Content pack name.
:type pack_name: ``str``
:rtype: ``str``
"""
if not pack_name:
return None
packs_base_path = get_packs_base_path()
pack_base_path = os.path.join(packs_base_path, pipes.quote(pack_name))
pack_base_path = os.path.abspath(pack_base_path)
return pack_base_path
| apache-2.0 | Python | |
effc03fbc0646b875e7cd586b04024dbdd12f806 | Create b.py | y-sira/atcoder,y-sira/atcoder | agc015/b.py | agc015/b.py | def main():
floors = input()
count = 0
for i in range(len(floors)):
if floors[i] == 'U':
count += (len(floors) - (i + 1)) + 2 * i
else:
count += 2 * (len(floors) - (i + 1)) + i
print(count)
if __name__ == '__main__':
main()
| mit | Python | |
b35e780364ca2d06902302b165ce2261ec6795a1 | Add tests for getting all toilets | praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js,praekelt/go-imali-yethu-js | ona_migration_script/test_migrate_toilet_codes.py | ona_migration_script/test_migrate_toilet_codes.py | import json
import requests
from requests_testadapter import TestAdapter
import unittest
import migrate_toilet_codes
class TestCreateSession(unittest.TestCase):
def test_create_session(self):
username = 'testuser'
password = 'testpass'
s = migrate_toilet_codes.create_session(username, password)
self.assertTrue(isinstance(s, requests.Session))
self.assertEqual(
s.headers['Content-type'], "application/json; charset=utf-8")
self.assertEqual(s.auth, (username, password))
class TestGetAllToilets(unittest.TestCase):
def test_get_list_of_toilets(self):
s = requests.Session()
url = 'http://www.example.org/toilet_codes/'
return_data = [
{
"id": 94,
"code": "RR007094FT",
"lat": -34.01691,
"lon": 18.66339,
"section": "RR",
"section_number": "94",
"cluster": "7",
"toilet_type": "FT"
},
{
"id": 1,
"code": "RR001001FT",
"lat": -34.01667,
"lon": 18.66404,
"section": "RR",
"section_number": "1",
"cluster": "1",
"toilet_type": "FT"
}
]
s.mount(url, TestAdapter(json.dumps(return_data)))
toilets = migrate_toilet_codes.get_all_toilets(s, url)
self.assertEqual(return_data, toilets)
def test_http_errors_raised(self):
s = requests.Session()
url = 'http://www.example.org/toilet_codes/'
s.mount(url, TestAdapter('', status=404))
with self.assertRaises(requests.HTTPError) as e:
migrate_toilet_codes.get_all_toilets(s, url)
self.assertEqual(e.exception.response.status_code, 404)
| bsd-3-clause | Python | |
9826ccce540a945a8cb8bb788a8f993aa5aae553 | update stock reco permission | saurabh6790/medapp,susuchina/ERPNEXT,treejames/erpnext,hatwar/Das_erpnext,saurabh6790/medapp,tmimori/erpnext,gangadhar-kadam/latestchurcherp,mbauskar/omnitech-erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve-erp,geekroot/erpnext,saurabh6790/test-med-app,Tejal011089/fbd_erpnext,indictranstech/tele-erpnext,indictranstech/osmosis-erpnext,hanselke/erpnext-1,ShashaQin/erpnext,rohitwaghchaure/New_Theme_Erp,mahabuber/erpnext,suyashphadtare/gd-erp,gangadhar-kadam/hrerp,rohitwaghchaure/GenieManager-erpnext,Suninus/erpnext,Suninus/erpnext,rohitwaghchaure/erpnext-receipher,suyashphadtare/sajil-erp,gangadhar-kadam/mic-erpnext,indictranstech/phrerp,suyashphadtare/gd-erp,saurabh6790/test-erp,gangadhar-kadam/verve_erp,gangadharkadam/saloon_erp,Drooids/erpnext,indictranstech/trufil-erpnext,sheafferusa/erpnext,netfirms/erpnext,saurabh6790/omnit-app,BhupeshGupta/erpnext,mbauskar/omnitech-demo-erpnext,hanselke/erpnext-1,suyashphadtare/vestasi-erp-jan-end,SPKian/Testing,fuhongliang/erpnext,mbauskar/Das_Erpnext,gangadhar-kadam/adb-erp,gangadharkadam/contributionerp,suyashphadtare/sajil-final-erp,rohitwaghchaure/digitales_erpnext,fuhongliang/erpnext,pombredanne/erpnext,saurabh6790/medsynaptic-app,njmube/erpnext,gangadharkadam/sher,rohitwaghchaure/New_Theme_Erp,indictranstech/buyback-erp,BhupeshGupta/erpnext,mbauskar/internal-hr,gsnbng/erpnext,indictranstech/biggift-erpnext,indictranstech/erpnext,sagar30051991/ozsmart-erp,Tejal011089/Medsyn2_app,sagar30051991/ozsmart-erp,hatwar/buyback-erpnext,mbauskar/Das_Erpnext,gsnbng/erpnext,pombredanne/erpnext,shitolepriya/test-erp,indictranstech/tele-erpnext,treejames/erpnext,saurabh6790/med_new_app,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/powapp,gangadhar-kadam/helpdesk-erpnext,indictranstech/tele-erpnext,Tejal011089/huntercamp_erpnext,meisterkleister/erpnext,Tejal011089/med2-app,suyashphadtare/sajil-erp,saurabh6790/medsynaptic-app,saurabh6790/aimobilize,suyashphadtare/vestasi-erp-final,rohitwaghchaure/erpnext_smart,netfirms/erpnext,gangadharkadam/v5_erp,gangadharkadam/v6_erp,aruizramon/alec_erpnext,susuchina/ERPNEXT,indictranstech/focal-erpnext,indictranstech/internal-erpnext,gangadharkadam/saloon_erp_install,mbauskar/sapphire-erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/saloon_erp_install,fuhongliang/erpnext,BhupeshGupta/erpnext,mahabuber/erpnext,saurabh6790/OFF-RISAPP,BhupeshGupta/erpnext,indictranstech/erpnext,indictranstech/buyback-erp,gangadhar-kadam/mtn-erpnext,rohitwaghchaure/New_Theme_Erp,gangadharkadam/v6_erp,indictranstech/trufil-erpnext,saurabh6790/medsyn-app1,fuhongliang/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/letzerp,indictranstech/internal-erpnext,Tejal011089/digitales_erpnext,tmimori/erpnext,mbauskar/sapphire-erpnext,hatwar/focal-erpnext,shitolepriya/test-erp,shft117/SteckerApp,gangadharkadam/vlinkerp,indictranstech/focal-erpnext,saurabh6790/pow-app,indictranstech/phrerp,gangadhar-kadam/verve_test_erp,treejames/erpnext,ThiagoGarciaAlves/erpnext,hatwar/buyback-erpnext,mbauskar/Das_Erpnext,aruizramon/alec_erpnext,saurabh6790/omni-apps,indictranstech/osmosis-erpnext,anandpdoshi/erpnext,gangadhar-kadam/helpdesk-erpnext,indictranstech/osmosis-erpnext,gangadharkadam/v5_erp,gangadhar-kadam/mic-erpnext,indictranstech/Das_Erpnext,meisterkleister/erpnext,aruizramon/alec_erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/verve_live_erp,gangadharkadam/tailorerp,mbauskar/phrerp,gangadhar-kadam/laganerp,saurabh6790/pow-app,suyashphadtare/vestasi-update-erp,saurabh6790/aimobilize,gangadharkadam/letzerp,saurabh6790/aimobilize-app-backup,gangadhar-kadam/latestchurcherp,indictranstech/fbd_erpnext,indictranstech/internal-erpnext,Tejal011089/huntercamp_erpnext,gangadhar-kadam/powapp,saurabh6790/medsynaptic1-app,indictranstech/reciphergroup-erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp_install,gangadhar-kadam/verve_erp,gangadhar-kadam/verve_test_erp,suyashphadtare/vestasi-erp-final,Suninus/erpnext,gangadharkadam/v5_erp,gangadhar-kadam/nassimapp,MartinEnder/erpnext-de,saurabh6790/omnit-app,saurabh6790/med_app_rels,Drooids/erpnext,saurabh6790/test-med-app,gangadharkadam/saloon_erp,SPKian/Testing2,pombredanne/erpnext,mbauskar/helpdesk-erpnext,gangadhar-kadam/verve-erp,netfirms/erpnext,gangadharkadam/letzerp,meisterkleister/erpnext,indictranstech/reciphergroup-erpnext,gangadhar-kadam/powapp,njmube/erpnext,saurabh6790/omnisys-app,suyashphadtare/gd-erp,mbauskar/alec_frappe5_erpnext,gangadhar-kadam/verve_erp,mbauskar/internal-hr,indictranstech/Das_Erpnext,tmimori/erpnext,gangadhar-kadam/verve_live_erp,gangadhar-kadam/sapphire_app,netfirms/erpnext,gsnbng/erpnext,rohitwaghchaure/GenieManager-erpnext,Aptitudetech/ERPNext,suyashphadtare/vestasi-erp-1,gangadhar-kadam/mtn-erpnext,indictranstech/trufil-erpnext,geekroot/erpnext,Drooids/erpnext,gangadhar-kadam/church-erpnext,rohitwaghchaure/digitales_erpnext,Tejal011089/paypal_erpnext,suyashphadtare/test,indictranstech/focal-erpnext,SPKian/Testing,gangadhar-kadam/latestchurcherp,geekroot/erpnext,gangadhar-kadam/sms-erpnext,mbauskar/omnitech-erpnext,gangadharkadam/v6_erp,rohitwaghchaure/GenieManager-erpnext,hatwar/buyback-erpnext,hatwar/focal-erpnext,mbauskar/Das_Erpnext,gangadharkadam/v4_erp,saurabh6790/omnitech-apps,gangadhar-kadam/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/biggift-erpnext,mbauskar/phrerp,indictranstech/buyback-erp,mbauskar/alec_frappe5_erpnext,4commerce-technologies-AG/erpnext,gangadharkadam/office_erp,saurabh6790/omni-apps,indictranstech/fbd_erpnext,indictranstech/biggift-erpnext,susuchina/ERPNEXT,sheafferusa/erpnext,indictranstech/erpnext,gangadharkadam/verveerp,gmarke/erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/v4_erp,gangadharkadam/sher,suyashphadtare/test,saurabh6790/medsynaptic1-app,suyashphadtare/vestasi-update-erp,gangadhar-kadam/laganerp,gangadharkadam/v5_erp,mbauskar/helpdesk-erpnext,gangadhar-kadam/sapphire_app,Tejal011089/digitales_erpnext,gangadharkadam/verveerp,sagar30051991/ozsmart-erp,gangadhar-kadam/smrterp,gangadhar-kadam/verve_test_erp,Tejal011089/med2-app,mbauskar/helpdesk-erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/huntercamp_erpnext,hatwar/Das_erpnext,SPKian/Testing,gangadharkadam/vlinkerp,hatwar/Das_erpnext,saurabh6790/ON-RISAPP,Tejal011089/osmosis_erpnext,Tejal011089/paypal_erpnext,saurabh6790/trufil_app,rohitwaghchaure/erpnext_smart,njmube/erpnext,Tejal011089/trufil-erpnext,mbauskar/omnitech-erpnext,gangadharkadam/saloon_erp_install,gmarke/erpnext,mahabuber/erpnext,saurabh6790/ON-RISAPP,saurabh6790/med_new_app,indictranstech/internal-erpnext,saurabh6790/medsyn-app,Suninus/erpnext,saurabh6790/OFF-RISAPP,gangadhar-kadam/verve_test_erp,ShashaQin/erpnext,dieface/erpnext,indictranstech/phrerp,treejames/erpnext,indictranstech/Das_Erpnext,saurabh6790/test-erp,ShashaQin/erpnext,4commerce-technologies-AG/erpnext,indictranstech/phrerp,indictranstech/focal-erpnext,indictranstech/trufil-erpnext,saurabh6790/omn-app,saurabh6790/medsyn-app,hatwar/focal-erpnext,dieface/erpnext,gangadhar-kadam/hrerp,gangadhar-kadam/verve_erp,Tejal011089/trufil-erpnext,indictranstech/vestasi-erpnext,gangadharkadam/contributionerp,Tejal011089/digitales_erpnext,tmimori/erpnext,MartinEnder/erpnext-de,gangadhar-kadam/helpdesk-erpnext,sheafferusa/erpnext,gangadharkadam/johnerp,Tejal011089/osmosis_erpnext,gmarke/erpnext,rohitwaghchaure/New_Theme_Erp,gangadhar-kadam/sapphire_app,gangadharkadam/verveerp,mbauskar/alec_frappe5_erpnext,Yellowen/Owrang,saurabh6790/tru_app_back,gangadharkadam/v4_erp,gmarke/erpnext,gangadharkadam/vlinkerp,mbauskar/internal-hr,saurabh6790/omnitech-apps,gangadharkadam/vlinkerp,shitolepriya/test-erp,gangadharkadam/smrterp,gangadhar-kadam/verve-erp,gangadharkadam/sterp,MartinEnder/erpnext-de,anandpdoshi/erpnext,suyashphadtare/vestasi-erp-jan-end,Tejal011089/trufil-erpnext,indictranstech/osmosis-erpnext,MartinEnder/erpnext-de,indictranstech/vestasi-erpnext,indictranstech/biggift-erpnext,gangadharkadam/v6_erp,hatwar/Das_erpnext,saurabh6790/med_app_rels,gangadhar-kadam/church-erpnext,indictranstech/buyback-erp,indictranstech/reciphergroup-erpnext,Tejal011089/fbd_erpnext,mbauskar/omnitech-demo-erpnext,suyashphadtare/vestasi-erp-1,dieface/erpnext,sagar30051991/ozsmart-erp,Tejal011089/digitales_erpnext,pawaranand/phrerp,mbauskar/omnitech-demo-erpnext,gangadharkadam/letzerp,sheafferusa/erpnext,pombredanne/erpnext,hanselke/erpnext-1,ShashaQin/erpnext,SPKian/Testing2,suyashphadtare/vestasi-erp-jan-end,gangadharkadam/office_erp,gangadhar-kadam/smrterp,mbauskar/omnitech-erpnext,gsnbng/erpnext,pawaranand/phrerp,shft117/SteckerApp,gangadharkadam/sterp,suyashphadtare/test,mbauskar/omnitech-demo-erpnext,pawaranand/phrerp,Tejal011089/trufil-erpnext,gangadharkadam/verveerp,ThiagoGarciaAlves/erpnext,anandpdoshi/erpnext,gangadharkadam/contributionerp,mbauskar/phrerp,gangadhar-kadam/adb-erp,mbauskar/sapphire-erpnext,Tejal011089/Medsyn2_app,4commerce-technologies-AG/erpnext,saurabh6790/trufil_app,pawaranand/phrerp,gangadharkadam/tailorerp,gangadharkadam/johnerp,hanselke/erpnext-1,rohitwaghchaure/erpnext-receipher,rohitwaghchaure/erpnext-receipher,gangadhar-kadam/latestchurcherp,saurabh6790/aimobilize-app-backup,indictranstech/vestasi-erpnext,saurabh6790/medsyn-app1,gangadharkadam/saloon_erp,suyashphadtare/vestasi-erp-final,Tejal011089/osmosis_erpnext,hernad/erpnext,njmube/erpnext,hernad/erpnext,mbauskar/phrerp,saurabh6790/test_final_med_app,hernad/erpnext,shft117/SteckerApp,shitolepriya/test-erp,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-update-erp,Tejal011089/osmosis_erpnext,suyashphadtare/gd-erp,aruizramon/alec_erpnext,mahabuber/erpnext,saurabh6790/test_final_med_app,suyashphadtare/sajil-final-erp,SPKian/Testing2,anandpdoshi/erpnext,suyashphadtare/sajil-erp,gangadharkadam/v4_erp,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/sms-erpnext,gangadharkadam/saloon_erp,saurabh6790/omnisys-app,indictranstech/vestasi-erpnext,dieface/erpnext,gangadhar-kadam/verve_live_erp,gangadharkadam/contributionerp,hatwar/buyback-erpnext,gangadharkadam/smrterp,indictranstech/tele-erpnext,hernad/erpnext,Tejal011089/paypal_erpnext,geekroot/erpnext,Drooids/erpnext,gangadhar-kadam/prjapp,indictranstech/fbd_erpnext,susuchina/ERPNEXT,shft117/SteckerApp,saurabh6790/test-erp,mbauskar/sapphire-erpnext,rohitwaghchaure/digitales_erpnext,Tejal011089/paypal_erpnext,saurabh6790/tru_app_back,gangadhar-kadam/prjapp,Tejal011089/fbd_erpnext,SPKian/Testing2,rohitwaghchaure/erpnext_smart,Tejal011089/fbd_erpnext,hatwar/focal-erpnext,saurabh6790/alert-med-app,gangadhar-kadam/nassimapp,indictranstech/fbd_erpnext,gangadharkadam/office_erp,saurabh6790/alert-med-app,Yellowen/Owrang,SPKian/Testing,mbauskar/helpdesk-erpnext,gangadhar-kadam/laganerp,suyashphadtare/sajil-final-erp,rohitwaghchaure/erpnext-receipher,suyashphadtare/vestasi-erp-1,indictranstech/erpnext,saurabh6790/omn-app,gangadhar-kadam/verve_live_erp | erpnext/patches/jan_mar_2012/update_stockreco_perm.py | erpnext/patches/jan_mar_2012/update_stockreco_perm.py | def execute():
import webnotes
webnotes.conn.sql("update `tabDocPerm` set cancel = 1 where parent = 'Stock Reconciliation' and ifnull(submit, 0) = 1")
| agpl-3.0 | Python | |
95ff3a585af98df894e5032da9e0e38622a3d9a9 | Add template field | Net-ng/kansha,bcroq/kansha,Net-ng/kansha,bcroq/kansha,Net-ng/kansha,bcroq/kansha,bcroq/kansha,Net-ng/kansha | kansha/alembic/versions/2b0edcfa57b4_add_templates.py | kansha/alembic/versions/2b0edcfa57b4_add_templates.py | """Add templates
Revision ID: 2b0edcfa57b4
Revises: 24be36b8c67
Create Date: 2015-11-24 17:50:13.280722
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2b0edcfa57b4'
down_revision = '24be36b8c67'
def upgrade():
op.add_column('board', sa.Column('is_template', sa.Boolean, default=False))
def downgrade():
op.drop_column('board', 'is_template')
| bsd-3-clause | Python | |
b07f482c0cdc827b0b8d73bd920d9d302d006a91 | Test case for contact references (#15077) | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext | erpnext/tests/test_search.py | erpnext/tests/test_search.py | from __future__ import unicode_literals
import unittest
import frappe
from frappe.contacts.address_and_contact import filter_dynamic_link_doctypes
class TestSearch(unittest.TestCase):
#Search for the word "clie", part of the word "client" (customer) in french.
def test_contact_search_in_foreign_language(self):
frappe.local.lang = 'fr'
output = filter_dynamic_link_doctypes("DocType", "clie", "name", 0, 20, {'fieldtype': 'HTML', 'fieldname': 'contact_html'})
result = [['found' for x in y if x=="Customer"] for y in output]
self.assertTrue(['found'] in result)
def tearDown(self):
frappe.local.lang = 'en' | agpl-3.0 | Python | |
2539e398eccf6134ccdb0c608513c3c5812654b5 | Create File2Txt.py | HeyItsJono/Pythonista | File2Txt.py | File2Txt.py | from console import clear
from console import hud_alert
from os import remove
def drop_extension(filename):
dictionary = dict(enumerate(filename, start = 1))
try:
assert '.' in dictionary.values()
except AssertionError:
return filename
for (k, v) in dictionary.iteritems():
if v == '.':
return filename[:k-1]
else:
pass
if __name__ == '__main__':
clear()
filename = raw_input('Enter filename (Path Optional) \n')
with open(drop_extension(filename) + '.txt', 'w') as out_file:
try:
with open(filename, 'r') as in_file:
out_file.write(in_file.read())
in_file.close()
except IOError:
hud_alert('File not found', icon = 'error')
remove(drop_extension(filename) + '.txt')
out_file.close()
| mit | Python | |
74c89b9fd6ac8f9d32192c0c0fbd5851230538e0 | add a script to run sandman. sandman can be used to edit te sqlite database through a browser. | berz/lossebladjes,berz/lossebladjes,berz/lossebladjes | sandman_run.py | sandman_run.py | #!venv/bin/python
from sandman import app
from sandman.model import activate
from losseblaadjes import app as lossebladjes_app
app.config['SQLALCHEMY_DATABASE_URI'] = lossebladjes_app.config['SQLALCHEMY_DATABASE_URI']
activate()
app.run()
| bsd-3-clause | Python | |
0df8ce131c5ed07b032060d9f460a328324c9974 | add simplistic google docs exporter | zeha/assorted,zeha/assorted,zeha/assorted | google-export/exporter.py | google-export/exporter.py | import gdata.docs.service
import gdata.spreadsheet.service
app_name = 'at.zeha.google-export-v0-github'
login_u = 'username@example.org'
login_p = 'password'
export_folder = '/home/username/google-export/data/'
gd_client = gdata.docs.service.DocsService(source=app_name)
gd_client.ClientLogin(login_u, login_p)
gs_client = gdata.spreadsheet.service.SpreadsheetsService(source=app_name)
gs_client.ClientLogin(login_u, login_p)
documents_feed = gd_client.GetDocumentListFeed()
for document_entry in documents_feed.entry:
file_path = export_folder + document_entry.title.text
docs_token = gd_client.GetClientLoginToken()
resource_id = document_entry.resourceId.text
res_type = resource_id[:resource_id.find(':')]
if res_type == 'spreadsheet':
file_path = file_path + '.ods'
gd_client.SetClientLoginToken(gs_client.GetClientLoginToken())
else:
file_path = file_path + '.odt'
gd_client.Export(resource_id, file_path)
gd_client.SetClientLoginToken(docs_token)
| mit | Python | |
7320e6fd38babe5b7c2d5fbb5121d5845c57bb05 | Add problem75.py | mjwestcott/projecteuler,mjwestcott/projecteuler,mjwestcott/projecteuler | euler_python/problem75.py | euler_python/problem75.py | """
problem75.py
It turns out that 12 cm is the smallest length of wire that can be bent to form
an integer sided right angle triangle in exactly one way, but there are many
more examples.
12 cm: (3,4,5)
24 cm: (6,8,10)
30 cm: (5,12,13)
36 cm: (9,12,15)
40 cm: (8,15,17)
48 cm: (12,16,20)
In contrast, some lengths of wire, like 20 cm, cannot be bent to form an integer
sided right angle triangle, and other lengths allow more than one solution to be
found; for example, using 120 cm it is possible to form exactly three different
integer sided right angle triangles.
120 cm: (30,40,50), (20,48,52), (24,45,51)
Given that L is the length of the wire, for how many values of L ≤ 1,500,000 can
exactly one integer sided right angle triangle be formed?
"""
from collections import Counter
from itertools import count, takewhile
def children(triple):
"""Given a pythagorean triple, return its three children triples."""
# See Berggren's ternary tree, which will produce all infinitely many
# primitive triples without duplication.
a, b, c = triple
a1, b1, c1 = (-a + 2*b + 2*c), (-2*a + b + 2*c), (-2*a + 2*b + 3*c)
a2, b2, c2 = (+a + 2*b + 2*c), (+2*a + b + 2*c), (+2*a + 2*b + 3*c)
a3, b3, c3 = (+a - 2*b + 2*c), (+2*a - b + 2*c), (+2*a - 2*b + 3*c)
return (a1, b1, c1), (a2, b2, c2), (a3, b3, c3)
def problem75():
limit = 1500000
# A mapping from values of L to the number of right-angled triangles with
# the perimeter L
triangles = Counter()
# Use a depth-first search to exhaust the search space, starting with the
# first pythagorean triple.
frontier = [(3, 4, 5)]
while frontier:
triple = frontier.pop()
L = sum(triple)
if L > limit:
continue
triangles[L] += 1
a, b, c = triple
# We're not only interested in 'primitive triples', but multiples too.
multiples = takewhile(lambda m: sum(m) < limit, ((i*a, i*b, i*c) for i in count(2)))
for m in multiples:
triangles[sum(m)] += 1
for child in children(triple):
frontier.append(child)
return sum(triangles[L] == 1 for L in triangles)
| mit | Python | |
c7efac00035589f271b2c591489f3b32cabcc6e5 | Add very basic example. | craig552uk/flask-json | examples/example1.py | examples/example1.py | from datetime import datetime
from flask import Flask
from flask_json import FlaskJSON, JsonErrorResponse, json_response
app = Flask(__name__)
FlaskJSON(app)
@app.route('/get_time')
def get_time():
return json_response(time=datetime.utcnow())
@app.route('/raise_error')
def raise_error():
raise JsonErrorResponse(description='Example text.', code=123)
if __name__ == '__main__':
app.run()
| bsd-3-clause | Python | |
ff9ebfb2698c40da5af383b0e620191f4a893e7b | add skitai_pingpong_handler | hansroh/skitai,hansroh/skitai,hansroh/skitai | skitai/tools/benchmark/skitai_pingpong_handler.py | skitai/tools/benchmark/skitai_pingpong_handler.py | """
This module is copy of skitai.handler.pingpong_handler.py.
It's not actually used by skitai, but for just your reference
"""
from . import ssgi_handler
class Handler (ssgi_handler.Handler):
def __init__(self, wasc):
self.wasc = wasc
def match (self, request):
return request.split_uri() [0] == "/ping"
def handle_request (self, request):
# WSGI Server Payload Emulating
env = self.build_environ (request)
start_response = request.response.start_response
start_response ("200 OK", [("Content-Type", "text/plain"), ("Content-Length", "4")])
request.response.push (b"pong")
request.response.done ()
| mit | Python | |
5cae22b665c250935d7a0c3fdd0e583beff08500 | add nofollow to Link object | Partoo/scrapy,liyy7/scrapy,cyberplant/scrapy,kmike/scrapy,nfunato/scrapy,Digenis/scrapy,Allianzcortex/scrapy,zhangtao11/scrapy,JacobStevenR/scrapy,rahulsharma1991/scrapy,amboxer21/scrapy,github-account-because-they-want-it/scrapy,Timeship/scrapy,elijah513/scrapy,ylcolala/scrapy,jdemaeyer/scrapy,amboxer21/scrapy,xiao26/scrapy,arush0311/scrapy,fafaman/scrapy,KublaikhanGeek/scrapy,pranjalpatil/scrapy,IvanGavran/scrapy,lacrazyboy/scrapy,nikgr95/scrapy,devGregA/scrapy,nguyenhongson03/scrapy,hyrole/scrapy,Bourneer/scrapy,CodeJuan/scrapy,rolando/scrapy,dangra/scrapy,kazitanvirahsan/scrapy,tagatac/scrapy,heamon7/scrapy,scorphus/scrapy,hwsyy/scrapy,smaty1/scrapy,raphaelfruneaux/scrapy,taito/scrapy,coderabhishek/scrapy,ENjOyAbLE1991/scrapy,JacobStevenR/scrapy,ndemir/scrapy,AaronTao1990/scrapy,jamesblunt/scrapy,tliber/scrapy,gbirke/scrapy,snowdream1314/scrapy,nikgr95/scrapy,pawelmhm/scrapy,wujuguang/scrapy,TarasRudnyk/scrapy,Slater-Victoroff/scrapy,jdemaeyer/scrapy,foromer4/scrapy,WilliamKinaan/scrapy,kimimj/scrapy,Partoo/scrapy,cyrixhero/scrapy,olafdietsche/scrapy,agusc/scrapy,rootAvish/scrapy,ENjOyAbLE1991/scrapy,URXtech/scrapy,olafdietsche/scrapy,sardok/scrapy,hyrole/scrapy,mgedmin/scrapy,kimimj/scrapy,rahul-c1/scrapy,aivarsk/scrapy,JacobStevenR/scrapy,avtoritet/scrapy,rootAvish/scrapy,Slater-Victoroff/scrapy,yarikoptic/scrapy,AaronTao1990/scrapy,fqul/scrapy,kashyap32/scrapy,Chenmxs/scrapy,pfctdayelise/scrapy,raphaelfruneaux/scrapy,joshlk/scrapy,Lucifer-Kim/scrapy,emschorsch/scrapy,pablohoffman/scrapy,jeffreyjinfeng/scrapy,zorojean/scrapy,starrify/scrapy,pablohoffman/scrapy,devGregA/scrapy,cursesun/scrapy,WilliamKinaan/scrapy,nowopen/scrapy,lacrazyboy/scrapy,wzyuliyang/scrapy,curita/scrapy,zhangtao11/scrapy,rahulsharma1991/scrapy,cleydson/scrapy,ndemir/scrapy,songfj/scrapy,tagatac/scrapy,raphaelfruneaux/scrapy,cyrixhero/scrapy,carlosp420/scrapy,moraesnicol/scrapy,Digenis/scrapy,github-account-because-they-want-it/scrapy,zorojean/scrapy,ssh-odoo/scrapy,rdowinton/scrapy,scrapy/scrapy,agreen/scrapy,cyberplant/scrapy,Lucifer-Kim/scrapy,dacjames/scrapy,bmess/scrapy,w495/scrapy,ssteo/scrapy,nett55/scrapy,tntC4stl3/scrapy,aivarsk/scrapy,fontenele/scrapy,nfunato/scrapy,yarikoptic/scrapy,arush0311/scrapy,kashyap32/scrapy,snowdream1314/scrapy,Parlin-Galanodel/scrapy,kalessin/scrapy,jiezhu2007/scrapy,tliber/scrapy,coderabhishek/scrapy,tntC4stl3/scrapy,scrapy/scrapy,rahulsharma1991/scrapy,starrify/scrapy,yidongliu/scrapy,agusc/scrapy,1yvT0s/scrapy,darkrho/scrapy-scrapy,jdemaeyer/scrapy,elijah513/scrapy,farhan0581/scrapy,ashishnerkar1/scrapy,Partoo/scrapy,OpenWhere/scrapy,devGregA/scrapy,legendtkl/scrapy,pawelmhm/scrapy,pfctdayelise/scrapy,tagatac/scrapy,dracony/scrapy,aivarsk/scrapy,Allianzcortex/scrapy,csalazar/scrapy,umrashrf/scrapy,arush0311/scrapy,hectoruelo/scrapy,mlyundin/scrapy,z-fork/scrapy,Ryezhang/scrapy,CENDARI/scrapy,Preetwinder/scrapy,URXtech/scrapy,eLRuLL/scrapy,amboxer21/scrapy,johnardavies/scrapy,cyrixhero/scrapy,z-fork/scrapy,CENDARI/scrapy,sardok/scrapy,Cnfc19932/scrapy,nfunato/scrapy,chekunkov/scrapy,TarasRudnyk/scrapy,darkrho/scrapy-scrapy,fpy171/scrapy,mgedmin/scrapy,eliasdorneles/scrapy,IvanGavran/scrapy,curita/scrapy,hbwzhsh/scrapy,Zephor5/scrapy,irwinlove/scrapy,Parlin-Galanodel/scrapy,elacuesta/scrapy,emschorsch/scrapy,Timeship/scrapy,liyy7/scrapy,hbwzhsh/scrapy,foromer4/scrapy,johnardavies/scrapy,elacuesta/scrapy,jorik041/scrapy,zackslash/scrapy,heamon7/scrapy,carlosp420/scrapy,Adai0808/scrapy-1,farhan0581/scrapy,jorik041/scrapy,dracony/scrapy,wenyu1001/scrapy,livepy/scrapy,cursesun/scrapy,huoxudong125/scrapy,mouadino/scrapy,nett55/scrapy,dgillis/scrapy,nguyenhongson03/scrapy,rahul-c1/scrapy,KublaikhanGeek/scrapy,zackslash/scrapy,OpenWhere/scrapy,crasker/scrapy,Ryezhang/scrapy,wenyu1001/scrapy,fpy171/scrapy,stenskjaer/scrapy,zjuwangg/scrapy,rootAvish/scrapy,eliasdorneles/scrapy,TarasRudnyk/scrapy,crasker/scrapy,GregoryVigoTorres/scrapy,wzyuliyang/scrapy,stenskjaer/scrapy,pranjalpatil/scrapy,moraesnicol/scrapy,haiiiiiyun/scrapy,profjrr/scrapy,nett55/scrapy,fqul/scrapy,elacuesta/scrapy,kmike/scrapy,agreen/scrapy,csalazar/scrapy,nowopen/scrapy,jiezhu2007/scrapy,xiao26/scrapy,umrashrf/scrapy,jc0n/scrapy,barraponto/scrapy,yidongliu/scrapy,gbirke/scrapy,URXtech/scrapy,CodeJuan/scrapy,livepy/scrapy,olorz/scrapy,haiiiiiyun/scrapy,wangjun/scrapy,elijah513/scrapy,emschorsch/scrapy,fafaman/scrapy,hwsyy/scrapy,finfish/scrapy,yusofm/scrapy,scorphus/scrapy,Bourneer/scrapy,ArturGaspar/scrapy,shaform/scrapy,redapple/scrapy,finfish/scrapy,rklabs/scrapy,hyrole/scrapy,darkrho/scrapy-scrapy,CENDARI/scrapy,pranjalpatil/scrapy,sigma-random/scrapy,pombredanne/scrapy,pombredanne/scrapy,redapple/scrapy,wujuguang/scrapy,olorz/scrapy,haiiiiiyun/scrapy,codebhendi/scrapy,rklabs/scrapy,Chenmxs/scrapy,GregoryVigoTorres/scrapy,stenskjaer/scrapy,fafaman/scrapy,cleydson/scrapy,avtoritet/scrapy,fpy171/scrapy,sigma-random/scrapy,Slater-Victoroff/scrapy,finfish/scrapy,YeelerG/scrapy,csalazar/scrapy,ArturGaspar/scrapy,wenyu1001/scrapy,Zephor5/scrapy,tntC4stl3/scrapy,pfctdayelise/scrapy,hectoruelo/scrapy,yarikoptic/scrapy,starrify/scrapy,webmakin/scrapy,wangjun/scrapy,WilliamKinaan/scrapy,ramiro/scrapy,GregoryVigoTorres/scrapy,rolando-contrib/scrapy,chekunkov/scrapy,shaform/scrapy,redapple/scrapy,Chenmxs/scrapy,famorted/scrapy,wangjun/scrapy,bmess/scrapy,cleydson/scrapy,cursesun/scrapy,Preetwinder/scrapy,Allianzcortex/scrapy,moraesnicol/scrapy,zhangtao11/scrapy,rdowinton/scrapy,godfreyy/scrapy,joshlk/scrapy,profjrr/scrapy,farhan0581/scrapy,rolando/scrapy,OpenWhere/scrapy,mlyundin/scrapy,Bourneer/scrapy,cyberplant/scrapy,CodeJuan/scrapy,Djlavoy/scrapy,rolando-contrib/scrapy,pawelmhm/scrapy,rklabs/scrapy,jamesblunt/scrapy,scrapy/scrapy,rahul-c1/scrapy,wujuguang/scrapy,taito/scrapy,Digenis/scrapy,legendtkl/scrapy,hwsyy/scrapy,joshlk/scrapy,scorphus/scrapy,kazitanvirahsan/scrapy,Djlavoy/scrapy,ssh-odoo/scrapy,crasker/scrapy,w495/scrapy,AaronTao1990/scrapy,yidongliu/scrapy,mouadino/scrapy,mlyundin/scrapy,yusofm/scrapy,z-fork/scrapy,shaform/scrapy,zorojean/scrapy,rolando/scrapy,rdowinton/scrapy,kazitanvirahsan/scrapy,yusofm/scrapy,Timeship/scrapy,godfreyy/scrapy,barraponto/scrapy,avtoritet/scrapy,dacjames/scrapy,Lucifer-Kim/scrapy,beni55/scrapy,jeffreyjinfeng/scrapy,smaty1/scrapy,dhenyjarasandy/scrapy,jc0n/scrapy,wzyuliyang/scrapy,gnemoug/scrapy,Geeglee/scrapy,ssteo/scrapy,dgillis/scrapy,dgillis/scrapy,carlosp420/scrapy,codebhendi/scrapy,beni55/scrapy,eLRuLL/scrapy,kalessin/scrapy,Cnfc19932/scrapy,w495/scrapy,hectoruelo/scrapy,eLRuLL/scrapy,umrashrf/scrapy,fontenele/scrapy,legendtkl/scrapy,ssh-odoo/scrapy,jc0n/scrapy,xiao26/scrapy,liyy7/scrapy,jiezhu2007/scrapy,taito/scrapy,jorik041/scrapy,hansenDise/scrapy,tliber/scrapy,foromer4/scrapy,hbwzhsh/scrapy,jeffreyjinfeng/scrapy,gnemoug/scrapy,webmakin/scrapy,huoxudong125/scrapy,zjuwangg/scrapy,Geeglee/scrapy,profjrr/scrapy,IvanGavran/scrapy,ramiro/scrapy,pablohoffman/scrapy,ylcolala/scrapy,ramiro/scrapy,dracony/scrapy,irwinlove/scrapy,agreen/scrapy,songfj/scrapy,Cnfc19932/scrapy,Adai0808/scrapy-1,1yvT0s/scrapy,zackslash/scrapy,dhenyjarasandy/scrapy,livepy/scrapy,eliasdorneles/scrapy,bmess/scrapy,ashishnerkar1/scrapy,coderabhishek/scrapy,curita/scrapy,snowdream1314/scrapy,famorted/scrapy,huoxudong125/scrapy,zjuwangg/scrapy,ylcolala/scrapy,Ryezhang/scrapy,github-account-because-they-want-it/scrapy,kimimj/scrapy,famorted/scrapy,fontenele/scrapy,beni55/scrapy,heamon7/scrapy,ArturGaspar/scrapy,olafdietsche/scrapy,hansenDise/scrapy,Preetwinder/scrapy,irwinlove/scrapy,KublaikhanGeek/scrapy,pombredanne/scrapy,Zephor5/scrapy,Djlavoy/scrapy,smaty1/scrapy,godfreyy/scrapy,nowopen/scrapy,Parlin-Galanodel/scrapy,dangra/scrapy,agusc/scrapy,1yvT0s/scrapy,YeelerG/scrapy,olorz/scrapy,ssteo/scrapy,hansenDise/scrapy,songfj/scrapy,kashyap32/scrapy,kmike/scrapy,fqul/scrapy,webmakin/scrapy,dacjames/scrapy,johnardavies/scrapy,mgedmin/scrapy,Geeglee/scrapy,YeelerG/scrapy,nguyenhongson03/scrapy,kalessin/scrapy,rolando-contrib/scrapy,dangra/scrapy,nikgr95/scrapy,Adai0808/scrapy-1,lacrazyboy/scrapy,chekunkov/scrapy,barraponto/scrapy,dhenyjarasandy/scrapy,ENjOyAbLE1991/scrapy,codebhendi/scrapy | scrapy/link.py | scrapy/link.py | """
This module defines the Link object used in Link extractors.
For actual link extractors implementation see scrapy.contrib.linkextractor, or
its documentation in: docs/topics/link-extractors.rst
"""
class Link(object):
"""Link objects represent an extracted link by the LinkExtractor.
At the moment, it contains just the url and link text.
"""
__slots__ = ['url', 'text', 'nofollow']
def __init__(self, url, text='', nofollow=False):
self.url = url
self.text = text
self.nofollow = nofollow
def __eq__(self, other):
return self.url == other.url and self.text == other.text and self.nofollow == other.nofollow
def __hash__(self):
return hash(self.url) ^ hash(self.text) ^ hash(self.nofollow)
def __repr__(self):
return 'Link(url=%r, text=%r, nofollow=%r)' % (self.url, self.text, self.nofollow)
| """
This module defines the Link object used in Link extractors.
For actual link extractors implementation see scrapy.contrib.linkextractor, or
its documentation in: docs/topics/link-extractors.rst
"""
class Link(object):
"""Link objects represent an extracted link by the LinkExtractor.
At the moment, it contains just the url and link text.
"""
__slots__ = ['url', 'text']
def __init__(self, url, text=''):
self.url = url
self.text = text
def __eq__(self, other):
return self.url == other.url and self.text == other.text
def __hash__(self):
return hash(self.url) ^ hash(self.text)
def __repr__(self):
return '<Link url=%r text=%r >' % (self.url, self.text)
| bsd-3-clause | Python |
2f180a089450da5241836a1c6dc77f5116a5418d | Add c4_check_move | davidrobles/mlnd-capstone-code | examples/c4_check_move.py | examples/c4_check_move.py | from keras.models import load_model
from capstone.game.games import Connect4 as C4
from capstone.game.players import AlphaBeta, GreedyQ, RandPlayer
from capstone.rl.value_functions import QNetwork
from capstone.game.utils import play_match
board = [[' ', ' ', ' ', ' ', ' ', ' ', ' '], # 6
[' ', ' ', ' ', ' ', ' ', ' ', ' '], # 5
[' ', ' ', ' ', ' ', ' ', ' ', ' '], # 4
[' ', ' ', ' ', ' ', ' ', ' ', ' '], # 3
[' ', ' ', ' ', ' ', ' ', ' ', ' '], # 2
[' ', 'X', 'X', 'X', 'O', 'O', 'O']] # 1
move_mapper = {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4, 'f': 5, 'g': 6}
qnetwork = QNetwork(move_mapper, None, None, None)
model = load_model('models/episode-52500-winpct-0.938')
c4 = C4(board)
# results = play_series(
# game=c4,
# players=[GreedyQ(mlp), RandPlayer()],
# n_matches=100,
# verbose=True
# )
def yeah(game, player):
if game.is_over():
utilities = {'W': 1.0, 'L': -1.0, 'D': 0.0}
outcome = game.outcome(player)
return utilities[outcome]
best_func = max if player == 0 else min
# game_moves = [(game, move) for move in game.legal_moves()]
# _, best_score = best_func(game_moves, key=lambda gm: qnetwork[gm])
return qnetwork[game]
players=[AlphaBeta(eval_func=yeah, max_depth=2), RandPlayer()]
play_match(c4, players)
| mit | Python | |
bfbec3452bb518f9bd674a3782a56e45ee2e18da | Add quotes example | tjguk/networkzero,tjguk/networkzero,tjguk/networkzero | examples/quotes/quotes.py | examples/quotes/quotes.py | import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
my_name = input("Name: ")
nw0.advertise(my_name)
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
| mit | Python | |
9018954189a6556e6038d01e0766a336d62a91fd | Add __init__.py | lord63/choosealicense-cli | choosealicense/__init__.py | choosealicense/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
choosealicense-cli
~~~~~
Bring http://choosealicense.com to your terminal.
:copyright: (c) 2015 by lord63.
:license: MIT, see LICENSE for more details.
"""
__title__ = "choosealicense-cli"
__version__ = '0.1.0'
__author__ = "lord63"
__license__ = "MIT"
__copyright__ = "Copyright 2015 lord63"
| mit | Python | |
8d45b85a5726db2cc3038adae2fc2f609c35ed63 | Create analyzer.py | zbigniewz/jenkins-build-failure-analyzer,ZbigniewZabost/jenkins-build-failure-analyzer,zbigniewz/jenkins-build-failure-analyzer,ZbigniewZabost/jenkins-build-failure-analyzer | analyzer.py | analyzer.py | #!/usr/bin/python
import pprint
import re
import argparse
from datetime import datetime
from statsd import StatsClient
from utils import failureReasons, JenkinsClient
def is_build_failed(job):
if job['lastBuild']:
if job['lastBuild']['result'] == 'FAILURE':
return True
return False
def was_built_in_last_24h(job):
if job['lastBuild']:
build_date_time = datetime.utcfromtimestamp(job['lastBuild']['timestamp'] / 1e3) # to proper timestamp
time_diff_in_hours = (datetime.now() - build_date_time).total_seconds() / 60 / 60 # seconds to hours
if time_diff_in_hours < 24:
return True
return False
def filter_jobs(all_jobs):
failed_jobs = []
for job in all_jobs:
if is_build_failed(job) and was_built_in_last_24h(job):
failed_jobs.append(job)
return failed_jobs
def find_failure_reason(console_output):
for reason in failureReasons.possible_reasons:
for regex in reason['regex']:
match = re.search(regex, console_output)
if match:
return reason
return failureReasons.unknown_reason
def update_results(results, reason, job):
if reason['name'] in results:
results[reason['name']]['count'] += 1
results[reason['name']]['jobs'].append({
'job name': job['name'],
'build url': '{job_url}{build_number}/console'.format(job_url=job['url'],
build_number=job['lastBuild']['number'])
})
else:
results[reason['name']] = {
'count': 1,
'jobs': [
{
'job name': job['name'],
'build url': '{job_url}{build_number}/console'.format(job_url=job['url'],
build_number=job['lastBuild']['number'])
}
],
'description': reason['description'],
'regex': reason['regex'],
'graphite key' : reason['graphite key']
}
return results
def analyze_jobs(filtered_jobs, jenkins_server):
results = {}
counter = 0
for job in filtered_jobs:
counter += 1
print "Analyzing job {id} / {all}".format(id=counter, all=len(filtered_jobs))
console_output = jenkins_server.get_job_console_output(job)
failure_reason = find_failure_reason(console_output)
results = update_results(results, failure_reason, job)
return results
def print_results(results):
print '\n Full results:\n'
pp = pprint.PrettyPrinter()
pp.pprint(results)
print '\n\n\n Quick summary:\n'
for key, value in results.iteritems():
print '{reason} : {count}'.format(reason=key, count=value['count'])
def report_to_graphite(host, port, prefix, results):
statsd = StatsClient(host=host, port=port, prefix=prefix, maxudpsize=512)
for key, value in results.iteritems():
statsd.gauge(value['graphite key'], value['count'])
def create_arg_parser():
parser = argparse.ArgumentParser(description='Analyze jenkins failures and report them to graphite server')
parser.add_argument('jenkins_host')
parser.add_argument('jenkins_user')
parser.add_argument('jenkins_pass')
parser.add_argument('statsd_host')
parser.add_argument('statsd_port')
parser.add_argument('graphite_key')
return parser
def main():
parser = create_arg_parser()
args = parser.parse_args()
jenkins_server = JenkinsClient.JenkinsClient(args.jenkins_host, args.jenkins_user, args.jenkins_pass)
all_jobs = jenkins_server.get_all_jobs()
filtered_jobs = filter_jobs(all_jobs)
results = analyze_jobs(filtered_jobs, jenkins_server)
report_to_graphite(args.statsd_host, args.statsd_port, args.graphite_key, results)
print_results(results)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
954301b5f3290a85d58d9d16a30c8c50c97e7283 | Add fuzzy match | TheBrane/sodi-data-acquisition | form_matching/fuzzy_match.py | form_matching/fuzzy_match.py | '''
Tashlin Reddy
August 2020
Fuzzy Match strings in Columns of CSV file
'''
#read in dependencies
import pandas as pd
import numpy as np
from fuzzywuzzy import fuzz
#read in csv form
form = pd.read_csv('form.csv')
#form = pd.read_csv('https...')
#iterate over each column and check if there is a list to fuzzy match
df_new = pd.DataFrame()
for col_num in range(0,form.shape[1]):
title = form.columns[col_num]
try:
if form.iloc[:,col_num].str.contains(',').any() == True:
matched_series = fuzzy_match_col(col_num)
df_new[title] = matched_series
else:
df_new[title] = form.iloc[:,col_num]
except:
df_new[title] = form.iloc[:,col_num]
#export new matched csv file
df_new.to_csv("fuzz_matched.csv")
#fuzzy match function
def fuzzy_match_col(col_num):
keyword_col = form.iloc[:,col_num]
keywords_lst = keyword_col[0].split(',')
new_lst = []
for keyword_lst in keyword_col:
new_wrds = []
try:
keywords = keyword_lst.split(',')
for word in keywords:
word = word.strip()
word = word.replace(" ", "_")
word = word.lower().capitalize()
if word != '':
new_wrds.append(word)
except:
new_wrds.append('NAN')
new_lst.append(new_wrds)
for i in range(1, len(new_lst)):
for word_list in new_lst[:i]:
for word in word_list:
for match in new_lst[i]:
score = fuzz.ratio(word, match)
if score > 70 and score !=100:
#print(word,'=', match, " score:", score)
new_lst[i] = [w.replace(match, word) for w in new_lst[i]]
matched_words = [', '.join(element) for element in new_lst]
cleaned_words = [w.replace('_', ' ') for w in matched_words]
return cleaned_words
| mit | Python | |
b7406691eed7308d7dd4336326c896e22d16d768 | add coins model with save fig in the right spot | probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml | book/coins_model_sel_demo.py | book/coins_model_sel_demo.py | import numpy as np
from pyprobml_utils import save_fig
import matplotlib.pyplot as plt
from scipy.special import betaln
theta = 0.7
N = 5
alpha = 1
alphaH = alpha
alphaT = alpha
# instantiate a number of datastructures
flips = np.zeros((2**N, N))
Nh = np.zeros(2**N)
Nt = np.zeros(2**N)
marginal_lik = np.zeros(2**N)
log_lik = np.zeros(2**N)
log_BF = np.zeros(2**N)
for i in range(0,2**N):
flips[i] = np.array(np.unravel_index(i, [2]*N, 'F')) + 1
Nh[i] = len(np.where(flips[i] == 1)[0])
Nt[i] = len(np.where(flips[i] == 2)[0])
marginal_lik[i] = np.exp(betaln(alphaH+Nh[i], alphaT+Nt[i]) - betaln(alphaH, alphaT))
mle = Nh[i] / N
log_lik[i] = Nh[i]*np.log10(mle + 10e-8) + Nt[i]*np.log10(1 - mle + 10e-8)
log_BF[i] = betaln(alphaH+Nh[i], alphaT+Nt[i]) - betaln(alphaH, alphaH) - N*np.log(0.5)
#sort in order of number of heads
ndx = np.argsort(Nh)
Nh = Nh[ndx]
marginal_lik = marginal_lik[ndx]
log_lik = log_lik[ndx]
log_BF = log_BF[ndx]
p0 = (1/2)**N
plt.plot(marginal_lik, 'o-', linewidth=2)
plt.plot((0,2**N), (p0,p0), c='k', linewidth=2)
plt.xticks(list(range(len(Nh))), Nh.astype(int))
plt.xlabel('num heads')
plt.title(r"Marginal likelihood for Beta-Bernoulli model $\int p(D|\theta) Be(\theta | 1, 1,) d\Theta$")
plt.xlim((-0.6,2**N))
save_fig("joshCoins4.pdf")
plt.show()
plt.plot(np.exp(log_BF), 'o-', linewidth=2)
plt.title("BF(1,0)")
plt.xticks(list(range(len(Nh))), Nh.astype(int))
plt.xlim((-0.6,2**N))
save_fig("joshCoins4BF.pdf")
plt.show()
BIC1 = log_lik - 1
plt.plot(BIC1, 'o-', linewidth=2)
plt.title(r"BIC approximation to $log_{10} p(D|M1)$")
plt.xticks(list(range(len(Nh))), Nh.astype(int))
plt.xlim((-0.6,2**N))
save_fig("joshCoins4BIC.pdf")
plt.show()
plt.plot(np.log10(marginal_lik), 'o-', linewidth=2)
plt.title(r"$log_{10} p(D | M1)$")
plt.xticks(list(range(len(Nh))), Nh.astype(int))
plt.xlim((-0.6,2**N))
save_fig("joshCoins4LML.pdf")
plt.show()
| mit | Python | |
8347613940104b77224c48e104f898ac29f0c34d | Add script to generate synthetic CPT documents | NLeSC/cptm,NLeSC/cptm | generateCPTCorpus.py | generateCPTCorpus.py | """Script that generates a (synthetic) corpus to test the CPT model.
The corpus consists of 5 documents containing fixed topics and opinions.
The generation process is described in the CPT paper.
A text document contains the topic words on the first line and the opion words
on the second line.
Usage: python generateCPTCorpus.py <out dir>
"""
import argparse
import numpy as np
from collections import Counter
import codecs
import os
parser = argparse.ArgumentParser()
#parser.add_argument('num_doc', help='the number of documents to be generated')
#parser.add_argument('num_topic_words', help='the number of topic words per '
# 'document')
#parser.add_argument('num_opinion_words', help='the number of opinion words '
# 'per document')
parser.add_argument('out_dir', help='the directory where the generated '
'documents should be saved.')
args = parser.parse_args()
if not os.path.exists(args.out_dir):
os.makedirs(args.out_dir)
topic_vocabulary = np.array(['zon',
'ijs',
'strand',
'vanille',
'chocola',
'broccoli',
'wortel'])
opinion_vocabulary = np.array(['warm',
'zwemmen',
'zonnig',
'bewolkt',
'vies',
'lekker',
'koud'])
real_theta_topic = np.array([[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0],
[0.5, 0.5, 0.0],
[0.0, 0.5, 0.5]])
real_phi_topic = np.array([[0.4, 0.2, 0.4, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.3, 0.0, 0.35, 0.35, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.5]])
real_phi_opinion = np.array([[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0]])
num_topics = real_theta_topic.shape[1]
length_topic = 50
length_opinion = 20
for m, tm in enumerate(real_theta_topic):
out_file = os.path.join(args.out_dir, 'document{}.txt'.format(m+1))
print out_file
with codecs.open(out_file, 'wb', 'utf8') as f:
topic_words = []
topic_counter = Counter()
for i in range(length_topic):
# topic words
topic = np.random.multinomial(1, tm).argmax()
topic_counter[topic] += 1
word = np.random.multinomial(1, real_phi_topic[topic]).argmax()
topic_words.append(topic_vocabulary[word])
#print topic_counter
f.write('{}\n'.format(' '.join(topic_words)))
opinion_words = []
# select opinion (index) based on topic occurrence
om = np.array([float(topic_counter[i]) for i in range(num_topics)])
#print om
# normalize
om /= sum(om)
for i in range(length_opinion):
# opinion words
topic = np.random.multinomial(1, om).argmax()
word = np.random.multinomial(1, real_phi_opinion[topic]).argmax()
opinion_words.append(opinion_vocabulary[word])
f.write(' '.join(opinion_words))
| apache-2.0 | Python | |
32858d93d8fcb0adf3b0da54f607fddbc29926ad | refactor customerRegister | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | usermanage/views/customerRegister.py | usermanage/views/customerRegister.py | from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.models import User, Group
from django.contrib.auth.decorators import login_required, user_passes_test, permission_required
from django.contrib.auth.forms import UserCreationForm
from customermanage.models import Coupon, Wallet
from storemanage.models import Ticket
# Create your views here.
from usermanage import models
def customerRegister(request):
if request.user.is_authenticated:
return redirect('index:index')
if request.method == 'GET':
return render(request,'usermanage/register-customer.html')
data = request.POST
# check user already exits
if User.objects.filter(username=data['username']).exists():
return render(request, 'usermanage/register-customer.html', {
'error' : True,
})
user = User.objects.create_user(username = data['username'], password = data['password'], email = data['email'])
g = Group.objects.get(name='customer')
g.user_set.add(user)
user.save()
g.save()
customerprofile = models.Customer(user = user, first_name = data['first_name'], last_name = data['last_name'])
customerprofile.save()
return redirect('index:index')
| mit | Python | |
8f6dd4e4825175b4be37bca85c466b51619c3b89 | Create list-iod-web-index.py | hpautonomy/iod-example-python-scripts,hpe-idol/iod-example-python-scripts | list-iod-web-index.py | list-iod-web-index.py | #!/usr/bin/env python
import os
import unirest
import time
import json
import pprint
import logging
import argparse
unirest.timeout(120)
IODAPIKEY = os.environ.get('IODAPIKEY')
parser = argparse.ArgumentParser(description='List IOD connectors associated with the API key')
parser.add_argument('--apikey', default=IODAPIKEY)
args = parser.parse_args()
apikey = args.apikey
if apikey:
logging.info("Using apikey: %s" % apikey)
else:
logging.critical("No apikey supplied. Exiting.")
exit(1)
print "CALLING LISTRESOURCES FOR WEB_CLOUD CONNECTORS"
response = unirest.post("https://api.idolondemand.com/1/api/sync/listresources/v1", headers={"Accept": "application/json"}, params={"apikey": apikey, "type": "connector", "flavor": "web_cloud"})
reply = json.loads(response.raw_body)
pprint.pprint(reply)
print "CALLING LISTRESOURCES FOR FILESYSTEM_ONSITE CONNECTORS"
response = unirest.post("https://api.idolondemand.com/1/api/sync/listresources/v1", headers={"Accept": "application/json"}, params={"apikey": apikey, "type": "connector", "flavor": "filesystem_onsite"})
reply = json.loads(response.raw_body)
pprint.pprint(reply)
| mit | Python | |
deac3ae3b0f19adcbe612cce7fe9cfbbc2c08c5f | Update to current year | gschizas/praw,praw-dev/praw,praw-dev/praw,gschizas/praw | docs/conf.py | docs/conf.py | import os
import sys
sys.path.insert(0, "..")
from praw import __version__
copyright = "2020, Bryce Boe"
exclude_patterns = ["_build"]
extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"]
html_static_path = ["_static"]
html_theme = "sphinx_rtd_theme"
html_theme_options = {"collapse_navigation": True}
htmlhelp_basename = "PRAW"
intersphinx_mapping = {"python": ("https://docs.python.org/3.6", None)}
master_doc = "index"
nitpicky = True
project = "PRAW"
pygments_style = "sphinx"
release = __version__
source_suffix = ".rst"
suppress_warnings = ["image.nonlocal_uri"]
version = ".".join(__version__.split(".", 2)[:2])
# Use RTD theme locally
if not os.environ.get("READTHEDOCS"):
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def skip(app, what, name, obj, skip, options):
if name in {
"__call__",
"__contains__",
"__getitem__",
"__init__",
"__iter__",
"__len__",
}:
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
app.add_stylesheet("theme_override.css")
| import os
import sys
sys.path.insert(0, "..")
from praw import __version__
copyright = "2017, Bryce Boe"
exclude_patterns = ["_build"]
extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"]
html_static_path = ["_static"]
html_theme = "sphinx_rtd_theme"
html_theme_options = {"collapse_navigation": True}
htmlhelp_basename = "PRAW"
intersphinx_mapping = {"python": ("https://docs.python.org/3.6", None)}
master_doc = "index"
nitpicky = True
project = "PRAW"
pygments_style = "sphinx"
release = __version__
source_suffix = ".rst"
suppress_warnings = ["image.nonlocal_uri"]
version = ".".join(__version__.split(".", 2)[:2])
# Use RTD theme locally
if not os.environ.get("READTHEDOCS"):
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def skip(app, what, name, obj, skip, options):
if name in {
"__call__",
"__contains__",
"__getitem__",
"__init__",
"__iter__",
"__len__",
}:
return False
return skip
def setup(app):
app.connect("autodoc-skip-member", skip)
app.add_stylesheet("theme_override.css")
| bsd-2-clause | Python |
2ea14af13a80b98609108f8824ff1d995b0fecfb | Create img2html.py | gauntletm/img2html | img2html.py | img2html.py | #!/usr/bin/env python
# img to html v0.2
# will convert a non-svg image to html
#
# Gauntlet O. Manatee
# spukspital@openmailbox.org
import os, sys
import Image
imgname = raw_input("Enter the path to the .png file you want to convert.\n(I recommend .png, though it is not mandatory.\n \
Other image files work, too. However, my best results have been .png pics in the RGB color space.)\n")
image = Image.open(imgname)
pix = image.load()
width, height = image.size
wbody = width*10
# creating the output file
outfile = open("i2h-out.html",'a+')
outfile.write(str('<!DOCTYPE html>\n \
<html>\n \
<head>\n \
<title>img2html</title>\n \
<style>.square {display: block; width: 10px; height: 10px; float: left;}\n \
.clear {clear: both;}\n \
p {margin-top: 0; margin-bottom: 0; padding-top: 0; padding-bottom: 0;}\n \
div {width: ' + str(wbody) + 'px;} \n \
</style>\n \
</head>\n \
<body>\n \
<div><p>\n'))
# pixel coordinates
x = 0
y = 0
while y < height:
while x < width:
r, g, b = pix[x,y]
outfile.write(str('<span class="square" style="background-color: rgb(' + str(r) + str(',') + str(g) + str(',') + str(b) + ')"> </span>\n'))
x = x+1
outfile.write(str('</p>\n'))
outfile.write(str('<p class="clear">\n\n'))
y = y+1
x = 0
# writing the rest of the html document and closing the file
outfile.write(str('</p></div>\n \
</body>\n \
</html>'))
outfile.close()
print("Done.")
quit()
| apache-2.0 | Python | |
cff78bad619c7fb1e8c9067dedbf6afc38de90d5 | Create 'Python.py'. | toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava,toturkmen/baklava | P/Python.py | P/Python.py | for i in range (0, 10, 1):
print ((" " * (10 - i)) + ("*" * (i * 2 + 1)))
for i in range (10, -1, -1):
print ((" " * (10 - i)) + ("*" * (i * 2 + 1)))
raw_input () | mit | Python | |
16525e1e249d8506f605569c5d6d87b1988e890d | Create PushOver.py | KronosKoderS/py_pushover,KronosKoderS/pypushover | PushOver.py | PushOver.py | import sys
#Making compatible for Python 3 and 2.
if sys.version_info < (3, 0, 0):
import urllib
else:
import urllib2.parse as urllib
import urllib2
class Sounds(object):
Short_Pushover = 'pushover'
Short_Bike = 'bike'
Short_Bugle = 'bugle'
Short_Cash_Register = 'cashregister'
Short_Classical = 'classical'
Short_Cosmic = 'cosmic'
Short_Gamelan = 'gamelan'
Short_Incoming = 'incoming'
Short_Intermission = 'intermission'
Short_Magic = 'magic'
Short_Mechanical = 'mechanical'
Short_Piano_Bar = 'pianobar'
Short_Siren = 'siren'
Short_Space_Alarm = 'spacealarm'
Short_Tug_Boat = 'tugboat'
Long_Alien_Alarm = 'alien'
Long_Climb = 'climb'
Long_Persistent = 'persistent'
Long_Pushover_Echo = 'echo'
Long_Up_Down = 'updown'
Silent = 'none'
class Priority(object):
Lowest = -2
Low = -1
Normal = 0
High = 1
Emergency = 2
class PushOverManager(object):
_url = "https://api.pushover.net/1/messages.json"
def __init__(self, app_token, group_key):
self._app_token = app_token
self._group_key = group_key
def push_notification(self, title, message, **kwargs):
json_out = {
'token': self._app_token,
'user': self._group_key,
'title': title,
'message': message
}
# Support for non-required parameters of PushOver
if 'device' in kwargs:
json_out['device'] = kwargs['device']
if 'url' in kwargs:
json_out['url'] = kwargs['url']
if 'url_title' in kwargs:
json_out['url_title'] = kwargs['url_title']
if 'priority' in kwargs:
json_out['priority'] = kwargs['priority']
if 'timestamp' in kwargs:
json_out['timestamp'] = kwargs['timestamp']
if 'sound' in kwargs:
json_out['sound'] = kwargs['sound']
data = urllib.urlencode(json_out)
req = urllib2.Request(self._url, data)
if self._response_check(urllib2.urlopen(req)) < 1:
raise UserWarning("Notification did not succeed")
@staticmethod
def _response_check(response):
if response.code == 200:
return 1
elif 400 <= response.code < 500:
print("Invalid input! Potential issues are max quota reached, token invalid, user no longer active, etc,.")
if 'errors' in response.headers.dict:
print(response.headers.dict['errors'])
return -1
else:
print("Unable to connect to API or not reply. Please try again in 5 seconds")
return 0
| mit | Python | |
d782809746cfb403358bdfb10215b70c96498264 | Introduce a live camera viewer with Qt. | microy/PyStereoVisionToolkit,microy/VisionToolkit,microy/VisionToolkit,microy/StereoVision,microy/StereoVision,microy/PyStereoVisionToolkit | QtViewer.py | QtViewer.py | #! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Qt interface to display AVT cameras
#
#
# External dependencies
#
from PyQt4 import QtGui, QtCore
#
# Window to display a camera
#
class QtViewer( QtGui.QWidget ) :
#
# Initialisation
#
def __init__( self, camera ) :
# Initialize parent class
QtGui.QWidget.__init__( self )
# Backup the camera
self.camera = camera
# Set the window title
self.setWindowTitle( 'Camera' )
# Create a label to display camera images
self.image_label = QtGui.QLabel( self )
self.image_label.setScaledContents( True )
# Create a dummy image to fill the label
QImage dummy( 100, 100, QImage::Format_RGB32 )
image = dummy
# Create a layout
layout = QtGui.QVBoxLayout( self )
layout.addWidget( self.image_label )
imagelabel->setPixmap( QPixmap::fromImage(image) )
self.setLayout( layout )
self.setGeometry( 100, 100, 200, 200 )
self.show()
| mit | Python | |
70ed3c4044b425f2b679da3f0a3f33a9f7e6381a | introduce data gatherer for finding new versions of various libraries | cuckoobox/cuckoo,cuckoobox/cuckoo,cuckoobox/cuckoo,cuckoobox/cuckoo,cuckoobox/cuckoo | data/gatherer.py | data/gatherer.py | """Gatherer of software components for further identification by the Cuckoo
Sandbox team.
This file uploads as many relevant software components as possible so that the
Cuckoo Sandbox team may be able to add support for more versions of certain
software packages. E.g., currently we have special support for Office 2007,
Adobe PDF Reader 9, and Internet Explorer 8 - with the feedback from our users
we may be able to add special support for different versions of the same
software (think Office 2016, Internet Explorer Edge, etc).
This command should be ran with the "pipe=cuckoo" option. You will then find
yourself with an analysis with all of the related dropped files. This analysis
should be "exported" (a feature from the Cuckoo Web Interface) and shared with
the Cuckoo Developers team in order to process the new files.
"""
import ctypes
import glob
import os
progfiles = [
"Internet Explorer\\iexplore.exe",
"Common Files\\Microsoft Shared\\VBA\\VBA*\\VBE*.dll",
"Adobe\\Reader *\\Reader\\plug_ins\\escript.api",
]
system32 = [
"mshtml.dll",
"ncrypt.dll",
"jscript.dll",
"kernel32.dll",
"kernelbase.dll",
"ntdll.dll",
"advapi32.dll",
]
def report(filepath):
buf = "FILE_NEW:%s" % filepath
out = ctypes.create_string_buffer(512)
value = ctypes.c_uint()
ctypes.windll.kernel32.CallNamedPipeA(
"\\\\.\\PIPE\\cuckoo", buf, len(buf),
out, len(out), ctypes.byref(value), 1000
)
if __name__ == "__main__":
filepaths = []
for filepath in progfiles:
filepaths.append(os.path.join("C:\\Program Files", filepath))
filepaths.append(os.path.join("C:\\Program Files (x86)", filepath))
for filepath in system32:
filepaths.append(os.path.join("C:\\Windows\\System32", filepath))
filepaths.append(os.path.join("C:\\Windows\\Sysnative", filepath))
for filepath in filepaths:
if os.path.exists(filepath):
report(filepath)
continue
if "*" in filepath:
for filepath in glob.iglob(filepath):
report(filepath)
continue
| mit | Python | |
3bf112fcd3f42716bcf7dad0561280f52fc6a31f | add django-cron suppo | DjangoAdminHackers/django-link-report,DjangoAdminHackers/django-link-report | link_report/cron.py | link_report/cron.py | # This file works with our fork of django-cron.
# It's use is optional
# Use any means you like to run scheduled jobs.
from django_cron import cronScheduler
from django_cron import Job
from django_cron import DAY
from link_report.utils import update_sentry_404s
class RunUpdateSentry404s(Job):
run_every = DAY
def job(self):
update_sentry_404s()
cronScheduler.register(RunUpdateSentry404s)
| mit | Python | |
02d7e423416ab90bdc4db6428c51efaf6f33a4c6 | Create templatetag for put a settings var into context | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/dbaas/templatetags/settings_tags.py | dbaas/dbaas/templatetags/settings_tags.py | from django import template
from django.conf import settings
register = template.Library()
@register.assignment_tag()
def setting(var_name):
"""
Get a var from settings
"""
return getattr(settings, var_name)
| bsd-3-clause | Python | |
0c329e33d9b2c0a4101791f4a5597631c61b2255 | Create __init__.py | scienceopen/glowaurora,scienceopen/glowaurora | __init__.py | __init__.py | agpl-3.0 | Python | ||
220086dd3404ad00f113751a78c5abb219d68819 | Introduce resource provider. | Met48/League-of-Legends-DB | loldb/v2/resources.py | loldb/v2/resources.py | import collections
import os
import re
import sqlite3
import raf
def _get_highest_version(versions):
versions = [(v, v.split('.')) for v in versions]
def version_converter(version):
try:
parts = map(int, version[1])
except ValueError:
return None
else:
return [parts, version[0]]
versions = map(version_converter, versions)
versions = filter(lambda x: x is not None, versions)
versions = sorted(versions)
if not versions:
raise RuntimeError("No valid version.")
return versions[-1][1]
def _make_re_pattern(token_str, flags=None):
"""Converts spacing in token_str to variable-length, compiles."""
return re.compile(r'\s*'.join(token_str.split()), flags)
def _build_path(
base_path,
project="lol_air_client",
subdir='releases',
version=None):
"""Generate path for most recent release of a project."""
subdir = subdir.lower()
path = [base_path, "RADS/projects", project, subdir]
if subdir != 'filearchives':
if version is None:
current_base = os.path.join(*path)
versions = os.listdir(current_base)
versions = [v for v in versions if
os.path.isdir(os.path.join(current_base, v))]
version = _get_highest_version(versions)
path.append(version)
return os.path.join(*path)
class ResourceProvider(object):
def __init__(self, lol_path, language='en_US'):
self.base_path = lol_path
self.language = language
self.db = None
self.raf = None
self.font_config = None
def _get_db_path(self):
return os.path.join(
_build_path(self.base_path),
# TODO: Is /bin used on Windows?
'deploy/bin/assets/data/gameStats',
'gameStats_%s.sqlite' % self.language,
)
def _get_raf_path(self):
return _build_path(
self.base_path,
'lol_game_client',
'filearchives'
)
def get_db(self):
"""Get connection to gameStats database."""
if self.db is None:
self.db = sqlite3.connect(self._get_db_path())
return self.db
def get_db_rows(self, table):
"""Get the rows from a gameStats database table."""
connection = self.get_db()
cursor = connection.cursor()
# execute doesn't accept a parametrized table name
rows = cursor.execute("SELECT * FROM `%s`" % table)
# Get column names from cursor
columns = [c[0] for c in cursor.description]
row_class = collections.namedtuple('Row', columns)
for row in rows:
row = row_class(*row)
yield row
def get_raf_master(self):
"""Get RAFMaster instance for game client."""
if self.raf is None:
self.raf = raf.RAFMaster(self._get_raf_path())
return self.raf
def get_font_config(self):
"""Get font_config dictionary."""
if self.font_config is None:
archive = self.get_raf_master()
font_config = {}
font_config_text = archive.find(name='fontconfig_en_US.txt').read()
font_config_re = _make_re_pattern('^ tr "([^"]+)" = "(.+)" $', re.M)
for match in font_config_re.finditer(font_config_text):
font_config[match.group(1)] = match.group(2)
self.font_config = font_config
return self.font_config
class MacResourceProvider(ResourceProvider):
def __init__(self, lol_path=None, **kwargs):
if lol_path is None:
lol_path = "/Applications/League of Legends.app/Contents/LOL"
super(MacResourceProvider, self).__init__(lol_path, **kwargs)
class WindowsResourceProvider(ResourceProvider):
pass
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.