commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
a5c99fe8e37079a2663fe90644d3925d6dc7a5d0
|
Add another example that works offline
|
examples/offline_examples/test_request_fixture.py
|
examples/offline_examples/test_request_fixture.py
|
Python
| 0.000001
|
@@ -0,0 +1,325 @@
+import pytest%0D%0A%0D%0A%0D%0A@pytest.mark.offline%0D%0Adef test_request_fixture(request):%0D%0A sb = request.getfixturevalue('sb')%0D%0A sb.open(%22data:text/html,%3Cp%3EHello%3Cbr%3E%3Cinput%3E%3C/p%3E%22)%0D%0A sb.assert_element(%22html %3E body%22)%0D%0A sb.assert_text(%22Hello%22, %22body p%22)%0D%0A sb.type(%22input%22, %22Goodbye%22)%0D%0A sb.click(%22body p%22)%0D%0A sb.tearDown()%0D%0A
|
|
1566f5f18bc42df93a73ecd2e502b93ed3c7b24b
|
handle for missing metadata
|
edit-value-similarity.py
|
edit-value-similarity.py
|
#!/usr/bin/env python2.7
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from tika import parser
import os, editdistance, itertools, argparse, csv
def stringify(attribute_value):
if isinstance(attribute_value, list):
return str((", ".join(attribute_value)).encode('utf-8').strip())
else:
return str(attribute_value.encode('utf-8').strip())
def computeScores(inputDir, outCSV, acceptTypes, allKeys):
na_metadata = ["resourceName"]
with open(outCSV, "wb") as outF:
a = csv.writer(outF, delimiter=',')
a.writerow(["x-coordinate","y-coordinate","Similarity_score"])
filename_list = []
for root, dirnames, files in os.walk(inputDir):
dirnames[:] = [d for d in dirnames if not d.startswith('.')]
for filename in files:
if not filename.startswith('.'):
filename_list.append(os.path.join(root, filename))
filename_list = [filename for filename in filename_list if parser.from_file(filename)]
if acceptTypes:
filename_list = [filename for filename in filename_list if str(parser.from_file(filename)['metadata']['Content-Type'].encode('utf-8')).split('/')[-1] in acceptTypes]
else:
print "Accepting all MIME Types....."
files_tuple = itertools.combinations(filename_list, 2)
for file1, file2 in files_tuple:
row_edit_distance = [file1, file2]
file1_parsedData = parser.from_file(file1)
file2_parsedData = parser.from_file(file2)
intersect_features = set(file1_parsedData["metadata"].keys()) & set(file2_parsedData["metadata"].keys())
intersect_features = [feature for feature in intersect_features if feature not in na_metadata ]
file_edit_distance = 0.0
for feature in intersect_features:
file1_feature_value = stringify(file1_parsedData["metadata"][feature])
file2_feature_value = stringify(file2_parsedData["metadata"][feature])
if len(file1_feature_value) == 0 and len(file2_feature_value) == 0:
feature_distance = 0.0
else:
feature_distance = float(editdistance.eval(file1_feature_value, file2_feature_value))/(len(file1_feature_value) if len(file1_feature_value) > len(file2_feature_value) else len(file2_feature_value))
file_edit_distance += feature_distance
if allKeys:
file1_only_features = set(file1_parsedData["metadata"].keys()) - set(intersect_features)
file1_only_features = [feature for feature in file1_only_features if feature not in na_metadata]
file2_only_features = set(file2_parsedData["metadata"].keys()) - set(intersect_features)
file2_only_features = [feature for feature in file2_only_features if feature not in na_metadata]
file_edit_distance += len(file1_only_features) + len(file2_only_features) # increment by 1 for each disjunct feature in (A-B) & (B-A), file1_disjunct_feature_value/file1_disjunct_feature_value = 1
file_edit_distance /= float(len(intersect_features) + len(file1_only_features) + len(file2_only_features))
else:
file_edit_distance /= float(len(intersect_features)) #average edit distance
row_edit_distance.append(1-file_edit_distance)
a.writerow(row_edit_distance)
if __name__ == "__main__":
argParser = argparse.ArgumentParser('Edit Distance Similarity based on Metadata values')
argParser.add_argument('--inputDir', required=True, help='path to directory containing files')
argParser.add_argument('--outCSV', required=True, help='path to directory for storing the output CSV File, containing pair-wise Similarity Scores based on edit distance')
argParser.add_argument('--accept', nargs='+', type=str, help='Optional: compute similarity only on specified IANA MIME Type(s)')
argParser.add_argument('--allKeys', action='store_true', help='compute edit distance across all keys')
args = argParser.parse_args()
if args.inputDir and args.outCSV:
computeScores(args.inputDir, args.outCSV, args.accept, args.allKeys)
|
Python
| 0.000024
|
@@ -2297,25 +2297,41 @@
(file2)%0A
-%0A
+ try:%0A
@@ -2436,16 +2436,70 @@
eys())
+%0A except KeyError:%0A continue
|
f31fb6a06c9f0126f43e7b1208502f67f7605d33
|
Add the-love-letter-mistery
|
the-love-letter-mystery/solution.py
|
the-love-letter-mystery/solution.py
|
Python
| 0.999996
|
@@ -0,0 +1,455 @@
+from math import fabs%0A%0A%0Adef solve(string):%0A %22%22%22%0A abc -%3E abb -%3E aba (2)%0A abcba (0)%0A abcd -%3E abcc -%3E abcb -%3E abca -%3E abba (4)%0A cba -%3E bba -%3E aba (2)%0A %22%22%22%0A if len(string) == 1:%0A return True%0A%0A ords = %5Bord(each) for each in string%5D%0A%0A length = len(ords)%0A%0A diffs = sum(%5Bfabs(ords%5Bi%5D - ords%5Blength-1-i%5D) for i in xrange(length/2)%5D)%0A%0A return int(diffs)%0A%0At = raw_input()%0Afor _ in xrange(int(t)):%0A print solve(raw_input())%0A
|
|
4ebcc5980813207fa12108d7c5b9b7971e432502
|
remove unused code
|
corehq/apps/userreports/reports/view.py
|
corehq/apps/userreports/reports/view.py
|
import json
import os
import tempfile
from StringIO import StringIO
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from braces.views import JSONResponseMixin
from corehq.apps.reports.dispatcher import cls_to_view_login_and_domain
from corehq.apps.reports.models import ReportConfig
from corehq.apps.userreports.exceptions import UserReportsError
from corehq.apps.userreports.models import ReportConfiguration
from corehq.apps.userreports.reports.factory import ReportFactory
from corehq.util.couch import get_document_or_404
from couchexport.export import export_from_tables
from couchexport.models import Format
from dimagi.utils.couch.pagination import DatatablesParams
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.web import json_request
from no_exceptions.exceptions import Http403
from corehq.apps.reports.datatables import DataTablesHeader
class ConfigurableReport(JSONResponseMixin, TemplateView):
template_name = 'userreports/configurable_report.html'
slug = "configurable"
prefix = slug
emailable = True
@property
@memoized
def spec(self):
return get_document_or_404(ReportConfiguration, self.domain, self.report_config_id)
@property
def title(self):
return self.spec.title
@property
@memoized
def data_source(self):
return ReportFactory.from_spec(self.spec)
@property
@memoized
def request_dict(self):
request_dict = json_request(self.request.GET)
request_dict['domain'] = self.domain
return request_dict
@property
@memoized
def filter_values(self):
return {
filter.css_id: filter.get_value(self.request_dict)
for filter in self.filters
}
@property
@memoized
def filter_context(self):
return {
filter.css_id: filter.context(self.filter_values[filter.css_id])
for filter in self.filters
}
@property
@memoized
def filters(self):
return self.spec.ui_filters
@cls_to_view_login_and_domain
def dispatch(self, request, report_config_id, **kwargs):
self.request = request
self.domain = request.domain
self.report_config_id = report_config_id
user = request.couch_user
if self.has_permissions(self.domain, user):
if kwargs.get('render_as') == 'email':
return self.email_response
elif kwargs.get('render_as') == 'excel':
return self.excel_response
elif request.is_ajax() or request.GET.get('format', None) == 'json':
return self.get_ajax(request, **kwargs)
self.content_type = None
return super(ConfigurableReport, self).dispatch(request, self.domain, **kwargs)
else:
raise Http403()
def has_permissions(self, domain, user):
return True
def get_context_data(self, **kwargs):
context = {
'domain': self.domain,
'report': self,
'filter_context': self.filter_context,
'url': self.url,
'headers': self.headers
}
context.update(self.saved_report_context_data)
return context
@property
def saved_report_context_data(self):
current_config_id = self.request.GET.get('config_id')
return {
'report_configs': ReportConfig.by_domain_and_owner(
self.domain, self.request.couch_user._id, report_slug=self.slug
),
'default_config': (
ReportConfig.get(current_config_id)
if current_config_id
else ReportConfig.default()
),
}
@property
def headers(self):
return DataTablesHeader(*[col.data_tables_column for col in self.data_source.columns])
def get_ajax(self, request, domain=None, **kwargs):
try:
data = self.data_source
data.set_filter_values(self.filter_values)
total_records = data.get_total_records()
except UserReportsError as e:
return self.render_json_response({
'error': e.message,
})
# todo: this is ghetto pagination - still doing a lot of work in the database
datatables_params = DatatablesParams.from_request_dict(request.GET)
end = min(datatables_params.start + datatables_params.count, total_records)
page = list(data.get_data())[datatables_params.start:end]
return self.render_json_response({
'aaData': page,
"sEcho": self.request_dict.get('sEcho', 0),
"iTotalRecords": total_records,
"iTotalDisplayRecords": total_records,
})
def _get_initial(self, request, **kwargs):
pass
@classmethod
def url_pattern(cls):
from django.conf.urls import url
pattern = r'^{slug}/(?P<report_config_id>[\w\-:]+)/$'.format(slug=cls.slug)
return url(pattern, cls.as_view(), name=cls.slug)
@property
def type(self):
return self.prefix
@property
def sub_slug(self):
return self.report_config_id
@classmethod
def get_report(cls, domain, slug, report_config_id):
report = cls()
report.domain = domain
report.report_config_id = report_config_id
report.name = report.title
return report
@property
def report_type(self):
return self.type
@property
def url(self):
return reverse(self.slug, args=[self.domain, self.report_config_id])
@property
@memoized
def export_table(self):
try:
data = self.data_source
data.set_filter_values(self.filter_values)
except UserReportsError as e:
return self.render_json_response({
'error': e.message,
})
report_config = ReportConfiguration.get(self.report_config_id)
raw_rows = list(data.get_data())
headers = [column['display'] for column in report_config.columns]
columns = [column['field'] for column in report_config.columns]
rows = [[raw_row[column] for column in columns] for raw_row in raw_rows]
return [
[
self.title,
[headers] + rows
]
]
@property
@memoized
def email_response(self):
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as temp:
export_from_tables(self.export_table, temp, Format.HTML)
with open(path) as f:
return HttpResponse(json.dumps({
'report': f.read(),
}))
@property
@memoized
def excel_response(self):
file = StringIO()
export_from_tables(self.export_table, file, Format.XLS_2007)
return file
|
Python
| 0.000017
|
@@ -5488,75 +5488,8 @@
rt%0A%0A
- @property%0A def report_type(self):%0A return self.type%0A%0A
|
01589a78cbe3bcabd116b9943f23ab3e8bc6a158
|
Create irrigate.py
|
device/src/irrigate.py
|
device/src/irrigate.py
|
Python
| 0.000001
|
@@ -0,0 +1,289 @@
+#!/usr/bin/env python%0A#In this project, I use a servo to simulate the water tap.%0A#Roating to 90 angle suggest that the water tap is open, and 0 angle means close.%0Afrom pyb import Servo%0Aservo=Servo(1) # X1%0Adef irrigate_start():%0A servo.angle(90)%0A %0Adef irrigate_stop():%0A servo.angle(0)%0A %0A
|
|
a2296ae2165b60ba182d540f729a099183169c92
|
Add problem 40, decimal fraction digits
|
problem_40.py
|
problem_40.py
|
Python
| 0.999999
|
@@ -0,0 +1,388 @@
+from time import time%0A%0A%0Adef main():%0A fractional_part = ''%0A%0A i = 1%0A while len(fractional_part) %3C 1000000:%0A fractional_part += str(i)%0A i += 1%0A%0A prod = 1%0A for i in %5B1, 10, 100, 1000, 10000, 100000, 1000000%5D:%0A prod *= int(fractional_part%5Bi-1%5D)%0A print 'Product:', prod%0A%0A%0Aif __name__ == '__main__':%0A t = time()%0A main()%0A print 'Time:', time() - t%0A
|
|
e9b6a27a423e765033d04801762f9f0356cd992a
|
Add urls.py ,placeholder for urls mappings in the plots app
|
plots/urls.py
|
plots/urls.py
|
Python
| 0.000001
|
@@ -0,0 +1,65 @@
+__author__ = 'ankesh'%0Afrom django.conf.urls import patterns, url%0A
|
|
1545c195c65e96e55bf5432538ff3141c60f5149
|
bits required to convert A to B
|
bitsToConvert.py
|
bitsToConvert.py
|
Python
| 0.999491
|
@@ -0,0 +1,239 @@
+def bit_req(A,B):%0A%09%22%22%22%0A%09Bits required to convert int A to int B%0A%09%22%22%22%0A%09c=A%5EB%0A%09return countOnes(c)%0A%0Adef countOnes(c):%0A%09count=0%0A%09if c == 1:%0A%09%09return 1%0A%09while(c%3E=1):%0A%09%09b=c%252%0A%09%09if b == 1:%0A%09%09%09count+=1%0A%09%09c=c//2%0A%09return count%0A%0Aprint bit_req(4,7)%09%0A
|
|
dc1bcdfed7439e1e00fdcad058fd9acbc1fac466
|
add initadmin to management base commands
|
fiduswriter/base/management/commands/initadmin.py
|
fiduswriter/base/management/commands/initadmin.py
|
Python
| 0.000001
|
@@ -0,0 +1,1145 @@
+# code adapted by github.com/jobdiogenes from https://github.com/dkarchmer/aws-eb-docker-django/blob/master/authentication/manage%0A# used to help automation install like in docker.%0A# Create admins accounts if no users exists.%0A# Password 'admin' is used unless defined by ADMIN_PASSWORD %0Afrom django.conf import settings%0Afrom django.core.management.base import BaseCommand%0Afrom django.contrib.auth.models import User%0Afrom os import getenv%0A%0Aclass Command(BaseCommand):%0A%0A def handle(self, *args, **options):%0A if User.objects.count() == 0:%0A for user in settings.ADMINS:%0A username = user%5B0%5D.replace(' ', '')%0A email = user%5B1%5D%0A password = getenv('ADMIN_PASSWORD') if getenv('ADMIN_PASSWORD')!='' else 'admin'%0A print('Creating account for %25s (%25s)' %25 (username, email))%0A admin = User.objects.create_superuser(username=username, email=email, password=password%0A admin.is_active = True%0A admin.is_admin = True%0A admin.save()%0A else:%0A print('Admin accounts can only be initialized if no Accounts exist')
|
|
dba312802cbf73f54c7cc347d45430ac0d8f016c
|
add TicketFactory
|
tickets/tests/factories.py
|
tickets/tests/factories.py
|
Python
| 0
|
@@ -0,0 +1,285 @@
+from django.contrib.auth.models import User%0Aimport factory%0A%0Afrom tickets.models import Ticket%0A%0A%0Aclass UserFactory(factory.Factory):%0A FACTORY_FOR = User%0A%0Aclass TicketFactory(factory.Factory):%0A FACTORY_FOR = Ticket%0A %0A creator = factory.LazyAttribute(lambda a: UserFactory())%0A
|
|
9d77092729e534b19d75b38dd700df25a009fa49
|
Add script to convexify the energies of a conservation tracking JSON model
|
toolbox/convexify_costs.py
|
toolbox/convexify_costs.py
|
Python
| 0.000002
|
@@ -0,0 +1,2146 @@
+import sys%0Aimport commentjson as json%0Aimport os%0Aimport argparse%0Aimport numpy as np%0A%0Adef listify(l):%0A return %5B%5Be%5D for e in l%5D%0A%0Adef convexify(l):%0A%09features = np.array(l)%0A%09if features.shape%5B1%5D != 1:%0A%09%09raise InvalidArgumentException('This script can only convexify feature vectors with one feature per state!')%0A%0A%09bestState = np.argmin(features)%0A%0A%09for direction in %5B-1, 1%5D:%0A%09%09pos = bestState + direction%0A%09%09previousGradient = 0%0A%09%09while pos %3E= 0 and pos %3C features.shape%5B0%5D:%0A%09%09%09newGradient = features%5Bpos%5D - features%5Bpos-direction%5D%0A%09%09%09if abs(newGradient) %3C abs(previousGradient):%0A%09%09%09%09# cost function got too flat, set feature value to match old slope%0A%09%09%09%09features%5Bpos%5D = features%5Bpos-direction%5D + previousGradient%0A%09%09%09else:%0A%09%09%09%09# all good, continue with new slope%0A%09%09%09%09previousGradient = newGradient%0A%09%09%09%09%0A%09%09%09pos += direction%0A%09return listify(features.flatten())%0A%0Aif __name__ == %22__main__%22:%0A parser = argparse.ArgumentParser(description='Take a json file containing a result to a set of HDF5 events files')%0A parser.add_argument('--model', required=True, type=str, dest='model_filename',%0A help='Filename of the json model description')%0A parser.add_argument('--output', required=True, type=str, dest='result_filename',%0A help='Filename of the json file containing the model with convexified costs')%0A %0A args = parser.parse_args()%0A%0A with open(args.model_filename, 'r') as f:%0A model = json.load(f)%0A%0A if not model%5B'settings'%5D%5B'statesShareWeights'%5D:%0A %09raise InvalidArgumentException('This script can only convexify feature vectors with shared weights!')%0A%0A segmentationHypotheses = model%5B'segmentationHypotheses'%5D%0A for seg in segmentationHypotheses:%0A %09for f in %5B'features', 'appearanceFeatures', 'disappearanceFeatures'%5D:%0A %09%09if f in seg:%0A %09%09%09seg%5Bf%5D = convexify(seg%5Bf%5D)%0A %09# division features are always convex (is just a line)%0A%0A linkingHypotheses = model%5B'linkingHypotheses'%5D%0A for link in linkingHypotheses:%0A%09%09link%5B'features'%5D = convexify(link%5B'features'%5D)%0A%0A with open(args.result_filename, 'w') as f:%0A %09json.dump(model, f, indent=4, separators=(',', ': '))
|
|
6588940418d34a6ac83ed620186319c9c1e30e3c
|
Correct import
|
scripts/runsizes.py
|
scripts/runsizes.py
|
#!/usr/bin/env python
"""Gets filesize of a first level of directories and sends it to a CouchDB instance.
Not using os.path.getsize neither os.stat.st_size since they report
inaccurate filesizes:
http://stackoverflow.com/questions/1392413/calculating-a-directory-size-using-python
"""
# TODO: Manage depth of root (how many dir levels): http://stackoverflow.com/questions/229186/os-walk-without-digging-into-directories-below
# TODO: Filter out by .bcl files and/or include other ones
import os
import argparse
import subprocess
import datetime
import couchdb
import re
from scilifelab.utils import load_config
def get_dirsizes(path="."):
"""Gets directory size.
TODO: Be replaced with a more pythonic way which reports the size correctly.
"""
path = path.strip().split('\t')
out = subprocess.check_output(["du", "-sb", path[0]], stderr=subprocess.STDOUT)
return out.split('\t')[0]
def parse_dirsizes(path, dirsizes={"errors": []}):
"""Parse directory sizes that have been saved to a file
"""
date_regexp = r'(?:\d{2})?(?:\d{2}\-?){3}[\sT]\d{2}(?:\:\d{2}){1,2}'
try:
with open(path) as fh:
for line in fh:
# Parse a timestamp
m = re.search(date_regexp, line)
if m:
try:
timestamp = datetime.datetime.strptime(m.group(0), "%Y-%m-%d %H:%M")
dirsizes["time"] = timestamp.isoformat()
except:
pass
continue
# Assume directories are listed as [size] [path] on one line each
try:
splits = line.split()
if len(splits) < 2:
continue
size, path = int(splits[0]), splits[1]
dirsizes[path] = size
except ValueError:
continue
except Exception as e:
dirsizes["errors"].append(str(e))
return dirsizes
def send_db(server, db, credentials, data):
""" Submits provided data to database on server
"""
couch = couchdb.Server(server)
couch.resource.credentials = credentials
db = couch[db]
db.save(data)
#with open("runsizes.log", "w") as fh:
# print "Saving data to %s" % fh
# fh.write(str(_to_unicode(data)))
# print "Sending data to couchdb"
def main():
dirsizes = {"time": datetime.datetime.now().isoformat(),
"unit": "bytes",
"errors": []}
parser = argparse.ArgumentParser(description="Compute directory size(s) and report them to a CouchDB database")
parser.add_argument('--dir', dest='root', action='append',
help="the directory to calculate dirsizes from")
parser.add_argument("--server", dest='server', action='store', default="localhost:5984",
help="CouchDB instance to connect to, defaults to localhost:5984")
parser.add_argument("--db", dest='db', action='store', default="tests",
help="CouchDB database name, defaults to 'tests'")
parser.add_argument("--dry-run", dest='dry_run', action='store_true', default=False,
help="Do not submit the resulting hash to CouchDB")
args = parser.parse_args()
#Import DB credentials from pm.conf
c = config.load_config()
try:
user = c.get('db', 'user')
password = c.get('db', 'password')
credentials = (user, password)
except:
raise KeyError('Please specify DB credentials in your pm.conf file')
for r in args.root: # multiple --dir args provided
if os.path.exists(r) and os.path.isdir(r):
for d in os.listdir(r):
path = os.path.join(r, d)
try:
dirsizes[path] = int(get_dirsizes(path))
except subprocess.CalledProcessError as pe:
dirsizes['errors'].append(pe.output)
else:
dirsizes = parse_dirsizes(r, dirsizes)
if args.dry_run:
print(dirsizes)
else:
send_db(args.server, args.db, credentials, dirsizes)
if __name__ == "__main__":
main()
|
Python
| 0.000031
|
@@ -597,21 +597,16 @@
import
-load_
config%0A%0A
|
9090f48b5abb5c60c8629613724ff7309dee07f5
|
Fix restructured text rendering in simple_osmesa.py
|
examples/offscreen/simple_osmesa.py
|
examples/offscreen/simple_osmesa.py
|
# -*- coding: utf-8 -*-
# vispy: testskip
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
This is a simple osmesa example that produce an image of a cube
If you have both osmesa and normal (X) OpenGL installed, execute with
something like the following to pickup the OSMesa libraries :
VISPY_GL_LIB=/opt/osmesa_llvmpipe/lib/libGLESv2.so \
LD_LIBRARY_PATH=/opt/osmesa_llvmpipe/lib/ \
OSMESA_LIBRARY=/opt/osmesa_llvmpipe/lib/libOSMesa.so \
python examples/offscreen/simple_osmesa.py
"""
import vispy
vispy.use(app='osmesa') # noqa
import numpy as np
import vispy.plot as vp
import vispy.io as io
# Check the application correctly picked up osmesa
assert vispy.app.use_app().backend_name == 'osmesa', 'Not using OSMesa'
data = np.load(io.load_data_file('electrophys/iv_curve.npz'))['arr_0']
time = np.arange(0, data.shape[1], 1e-4)
fig = vp.Fig(size=(800, 800), show=False)
x = np.linspace(0, 10, 20)
y = np.cos(x)
line = fig[0, 0].plot((x, y), symbol='o', width=3, title='I/V Curve',
xlabel='Current (pA)', ylabel='Membrane Potential (mV)')
grid = vp.visuals.GridLines(color=(0, 0, 0, 0.5))
grid.set_gl_state('translucent')
fig[0, 0].view.add(grid)
fig.show()
img = fig.render()
io.write_png("osmesa.png", img)
|
Python
| 0.000424
|
@@ -357,11 +357,12 @@
ries
-
+:
:%0A
+%0A
@@ -410,28 +410,24 @@
GLESv2.so %5C%0A
-
LD_LIBRA
@@ -462,20 +462,16 @@
/lib/ %5C%0A
-
OSME
@@ -521,20 +521,16 @@
sa.so %5C%0A
-
pyth
|
a951a29062f1fb7946b4d227f6fa0b3b3d5b9a04
|
Add a bindings.gyp file for use with node-gyp.
|
bindings.gyp
|
bindings.gyp
|
Python
| 0
|
@@ -0,0 +1,139 @@
+%7B%0A 'targets': %5B%0A %7B%0A 'target_name': 'serialport_native',%0A 'sources': %5B 'serialport_native/serialport_native.cc' %5D%0A %7D%0A %5D%0A%7D%0A
|
|
34f7d76cb1f56280b636f4b98968c17a8b9a2c14
|
Create TestRSS.py
|
TestRSS.py
|
TestRSS.py
|
Python
| 0
|
@@ -0,0 +1,351 @@
+'''%0ACreated on Jul 17, 2014%0A%0A@author: ALLWINLEOPRAKASH%0A'''%0A%0Aimport RssFeedCollector as rs%0Aimport datetime %0A%0A%0Ars.OPFileCheck()%0A%0Avar = 1%0A%0A%0A# Continuous active loop to retrieve real time data%0Awhile var == 1:%0A sec = datetime.datetime.now().second%0A # Check and append the new entries every 20 seconds%0A if sec %25 20 == 0:%0A rs.FeedCollector()%0A
|
|
3024ff0fe1343dac11adba82ec28d3a27f4e0d70
|
add TXT
|
gallery/file_modules/txt.py
|
gallery/file_modules/txt.py
|
Python
| 0.00004
|
@@ -0,0 +1,237 @@
+import os%0A%0Afrom gallery.file_modules import FileModule%0Afrom gallery.util import hash_file%0A%0Aclass TXTFile(FileModule):%0A%0A def __init__(self, file_path):%0A FileModule.__init__(self, file_path)%0A self.mime_type = %22text/plain%22%0A
|
|
35da1d5dd86fd597f31c2fb816b2b7e3f89ab021
|
Revert "removing settings.py, since it's ignored by .gitignore"
|
csc_new/csc_new/settings.py
|
csc_new/csc_new/settings.py
|
Python
| 0
|
@@ -0,0 +1,2453 @@
+%22%22%22%0ADjango settings for csc_new project.%0A%0AFor more information on this file, see%0Ahttps://docs.djangoproject.com/en/1.6/topics/settings/%0A%0AFor the full list of settings and their values, see%0Ahttps://docs.djangoproject.com/en/1.6/ref/settings/%0A%22%22%22%0A%0A# Build paths inside the project like this: os.path.join(BASE_DIR, ...)%0Aimport os%0A%0ABASE_DIR = os.path.dirname(os.path.dirname(__file__))%0A%0A# Template Directories%0ATEMPLATE_DIRS = (%0A 'csc_new/templates',%0A)%0A%0A# Reference our custom Member model as the default user model%0A#AUTH_USER_MODEL = 'member.Member'%0A%0A# Quick-start development settings - unsuitable for production%0A# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/%0A%0A# SECURITY WARNING: keep the secret key used in production secret!%0ASECRET_KEY = 'pp0_w0sbde9&ye%25!*i&!)76nq7-y22fbfpvb9heze*&)8j7dpi'%0A%0A# SECURITY WARNING: don't run with debug turned on in production!%0ADEBUG = False%0A%0ATEMPLATE_DEBUG = False%0A%0AALLOWED_HOSTS = %5B'.cs.rit.edu'%5D%0A%0A%0A# Application definition%0A%0AINSTALLED_APPS = (%0A 'django.contrib.admin',%0A 'django.contrib.auth',%0A 'django.contrib.contenttypes',%0A 'django.contrib.sessions',%0A 'django.contrib.messages',%0A 'django.contrib.staticfiles',%0A%09'pages',%0A#%09'member',%0A%09'django.contrib.webdesign',%0A)%0A%0AMIDDLEWARE_CLASSES = (%0A 'django.contrib.sessions.middleware.SessionMiddleware',%0A 'django.middleware.common.CommonMiddleware',%0A 'django.middleware.csrf.CsrfViewMiddleware',%0A 'django.contrib.auth.middleware.AuthenticationMiddleware',%0A 'django.contrib.messages.middleware.MessageMiddleware',%0A 'django.middleware.clickjacking.XFrameOptionsMiddleware',%0A 'csc_new.middleware.TemplateDoesNotExistMiddleware',%0A)%0A%0AROOT_URLCONF = 'csc_new.urls'%0A%0AWSGI_APPLICATION = 'csc_new.wsgi.application'%0A%0A%0A# Database%0A# https://docs.djangoproject.com/en/1.6/ref/settings/#databases%0A%0ADATABASES = %7B%0A 'default': %7B%0A 'ENGINE': 'django.db.backends.sqlite3',%0A 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),%0A %7D%0A%7D%0A%0A# Internationalization%0A# https://docs.djangoproject.com/en/1.6/topics/i18n/%0A%0ALANGUAGE_CODE = 'en-us'%0A%0ATIME_ZONE = %22US/Eastern%22%0A%0AUSE_I18N = True%0A%0AUSE_L10N = True%0A%0AUSE_TZ = True%0A%0A%0A# Static files (CSS, JavaScript, Images)%0A# https://docs.djangoproject.com/en/1.6/howto/static-files/%0A%0AMEDIA_ROOT = os.path.join(BASE_DIR, 'media')%0A%0AMEDIA_URL = '/media/'%0A%0ASTATIC_ROOT = os.path.join(BASE_DIR, 'static')%0A%0ASTATIC_URL = '/static/'%0A%0ASTATICFILES_DIRS = (%0A%09os.path.join(BASE_DIR, 'staticfiles'),%0A)%0A
|
|
d4c30f4e70dabe18c73eeb0feaa49ee4dcead2ff
|
Create groceries.py
|
groceries.py
|
groceries.py
|
Python
| 0.002157
|
@@ -0,0 +1,410 @@
+groceries = %5B%22banana%22, %22orange%22, %22apple%22%5D%0A%0Astock = %7B %22banana%22: 6,%0A %22apple%22: 0,%0A %22orange%22: 32,%0A %22pear%22: 15%0A%7D%0A %0Aprices = %7B %22banana%22: 4,%0A %22apple%22: 2,%0A %22orange%22: 1.5,%0A %22pear%22: 3%0A%7D%0A%0A# Write your code below!%0Adef compute_bill(food):%0A total = 0%0A for item in food:%0A if stock%5Bitem%5D %3E 0:%0A total = total + prices%5Bitem%5D%0A stock%5Bitem%5D = stock%5Bitem%5D - 1%0A return total%0A
|
|
a8dc3e1143290495ab56b30660e7fbe58fcaa36c
|
add analysis script
|
v01/analyse_data.py
|
v01/analyse_data.py
|
Python
| 0.000001
|
@@ -0,0 +1,1273 @@
+# this analysis script finds the photons with the highest energy for the crab nebula from the 2FHL event list%0Afrom numpy import *%0A%0A%0Afrom astropy.io import fits%0Ahdulist=fits.open('gll_psch_v08.fit.gz')%0Aprint hdulist.info()%0A%0Adatalist=hdulist%5B1%5D #hdu=1 is the source catalog, found using %22ftlist%22 or %22hdulist.info()%22%0A%0AN=len(datalist.data)%0A%0Aprint N%0A%0A#loop over fermi 2FHL catalog %0Afor i in range(N+1):%0A data= datalist.data%5Bi-1%5D # -1 otherwise it raises an error%0A string =data%5B'Source_Name'%5D%0A if (string=='2FHL J0534.5+2201'):%0A x=data%5B'RAJ2000'%5D%0A y=data%5B'DEJ2000'%5D%0A print (x, y) %0A%0A %0A%0A%0Afrom astropy.io import fits %0Ahdulist2=fits.open('2fhl_events.fits.gz')%0Aprint hdulist2.info()%0A%0Adatalist2=hdulist2%5B1%5D #hdu=1 is the event list %0Adata2=datalist2.data%0A%0AN=len(datalist2.data)%0A%0A%0A# prepare data%0Adata2= datalist2.data%0Adata2_x=data2%5B'RA'%5D%0Adata2_y=data2%5B'DEC'%5D%0Adata2_energy=data2%5B'ENERGY'%5D%0Ar=sqrt(pow(data2_x-x,2)+pow(data2_y-y,2))%0A%0A# initialize empty list for events%0Alist = %5B%5D%0A#loop over radii to find all events in a circle of 3 deg%0Afor i in range(len(r)):%0A if r%5Bi%5D %3C 3:%0A list.append(data2_energy%5Bi%5D)%0A%0Aprint max(list)%0A%0Aa = sorted(list, reverse=True)%0A%0A#finally print events with highest energies%0Aprint (a%5B1%5D, a%5B2%5D, a%5B3%5D)%0A
|
|
52a83fa5fc6ca029c87b50c64e0e3d08bdf1d081
|
Create pyton_test.py
|
pyton_test.py
|
pyton_test.py
|
Python
| 0.000002
|
@@ -0,0 +1,691 @@
+import RPi.GPIO as GPIO%0AGPIO.setmode(GPIO.BCM)%0A%0AGPIO.setup(2, GPIO.IN, pull_up_down=GPIO.PUD_UP)%0AGPIO.setup(7, GPIO.IN, pull_up_down=GPIO.PUD_UP)%0A%0AGPIO.setup(17, GPIO.OUT)%0AGPIO.setup(22, GPIO.OUT)%0A%0A%0Adef verantavalo(channel):%0A time.sleep(0.1)%0A if GPIO.input(2) != GPIO.HIGH:%0A return%0A if(GPIO.input(17) == 0):%0A GPIO.output(17,1)%0A else:%0A GPIO.output(17,0)%0A%0Adef ulkovalo(channel):%0A time.sleep(0.1)%0A if GPIO.input(7) != GPIO.HIGH:%0A return%0A if(GPIO.input(22) == 0):%0A GPIO.output(22,1)%0A else:%0A GPIO.output(22,0)%0A%0AGPIO.add_event_detect(2, GPIO.RISING, callback=verantavalo)%0A%0AGPIO.add_event_detect(7, GPIO.RISING, callback=ulkovalo)%0A%0A
|
|
338470581269d645c7bdd908ea6e17f2246bad12
|
Move the backend path in the deploy script
|
fabfile.py
|
fabfile.py
|
# fabricfile to deploy build
#
# depends on installation of fabric - pip install fabric virtualenv
#
# example invocation
# $ fab -H jenkins@uf04.seedscientific.com deploy
# $ fab -H ubuntu@52.0.138.67 deploy
# $ fab -H ubuntu@uf04.seedscientific.com deploy
from fabric.api import local, run, cd, put
## global variables
##
local_venv_path = '/tmp/venv'
# remote_venv_path = '/tmp/venv'
# /var/www/clients.seedscientific.com/uf/UF04
remote_work_path = '~/deploy/polio-work'
remote_backend_path = '/var/www/polio/'
remote_frontend_path = '/var/www/polio/static/'
# test build
#
# test-machine dependencies - python, pip, postgres
#
def test():
local("echo TODO: do tests here")
# deploy build
#
# build-machine dependencies - node, gulp, bower, sass, compass, ruby, virtualenv, fabric-virtualenv
def deploy():
###
### on build machine...
###
# set up dependencies
print ("TODO: confirm build machine has dependencies. i.e. node, gulp.")
# e.g.
# sudo gem install sass
# sudo gem install compass
# make virtual env
local('virtualenv %s' % local_venv_path)
# enter virtual environment
activate_this_file = "%s/bin/activate_this.py" % local_venv_path
execfile(activate_this_file, dict(__file__=activate_this_file))
# update/install dependencies
local ("npm install")
local ("pip install -r requirements.txt")
# make dist
local("./node_modules/.bin/bower install")
local("./node_modules/.bin/gulp dist")
###
### on target machine...
###
# make folder if it doesn't exist
run ("mkdir -p %s" % remote_work_path)
# push to remote server
put ('dist/uf04-frontend.zip', remote_work_path)
put ('dist/uf04-backend.zip', remote_work_path)
# unzip stuff
with cd(remote_work_path):
run("rm -rf %s/*" % remote_frontend_path)
run("rm -rf %s/*" % remote_backend_path)
run("unzip -o uf04-frontend.zip -d %s" % remote_frontend_path) # -o is overwrite
run("unzip -o uf04-backend.zip -d %s" % remote_backend_path)
# in server path -
with cd(remote_backend_path):
run("chgrp -R www-data *")
run("chmod -R g+w *")
run("pip install -r requirements.txt")
# echo "== SYNCDB / MIGRATE =="
run("python manage.py syncdb --noinput")
run("python manage.py migrate --noinput")
# echo "== BUILDING DATABASE =="
run("bash bin/build_db.sh")
# bounce apache??
# customize any other configuration?
#
# echo "== BUILDING DOCUMENTATION ==" # maybe...
# make clean -C docs
# make html -C docs
#
# echo "== RUNNING TESTS =="
# python manage.py test datapoints.tests.test_cache --settings=polio.settings_test
# def prepare_deploy():
# local("pip install -r requirements.txt")
# from shell script
# git pull origin development
# pip install -r requirements.txt
# python manage.py syncdb --settings=polio.prod_settings
# python manage.py migrate --settings=polio.prod_settings
# bash bin/build_db.sh
|
Python
| 0
|
@@ -498,24 +498,29 @@
= '/var/www/
+apps/
polio/'%0Aremo
@@ -1813,34 +1813,32 @@
run(%22rm -rf %25s
-/*
%22 %25 remote_front
@@ -1847,24 +1847,25 @@
d_path)%0A
+%0A
run(%22rm
@@ -1860,24 +1860,484 @@
-run(%22rm -rf %25s/*
+# Delete all Python, HTML, and SQL files. We don't delete the entire%0A # directory because that will catch the media/ directory which will%0A # probably have files we want to keep in it. This way we ensure that we%0A # clean out old scripts before deploying. Set mindepth to 2 so that we%0A # can keep the server's settings.py file in the application folder%0A run(%22find %25s -mindepth 2 -regextype 'posix-extended' -regex '.*%5C.(pyc?%7Csql%7Chtml) -delete'
%22 %25
@@ -2513,24 +2513,125 @@
kend_path)%0A%0A
+ with cd(remote_frontend_path):%0A run('chgrp -R www-data *')%0A run('chmod -R g+w *')%0A%0A
# in ser
|
096ea11231668e0fd03c1628c255cf0b08c0bfc3
|
Create HouseCupBot.py
|
HouseCupBot.py
|
HouseCupBot.py
|
Python
| 0
|
@@ -0,0 +1,1624 @@
+import praw, time, sqlite3, operator, re%0A%0A#Bot setup%0Ausername = 'HouseCupBot'%0Apassword = ''%0AuserAgent = 'HouseCupBot. Keeps a running score for Hogwarts houses. Author: u/d_web'%0Ahouses = %5B'gryffindor','hufflepuff','ravenclaw','slytherin'%5D%0AtagLine = 'HouseCupBot by u/D_Web. Type %22HouseCupBot !help%22 for more info.'%0Areplies = %5B'%25s points awarded to %25s%5Cn%5Cn', 'Current Standings:%5Cn%5Cn', 'Winners:%5Cn%5Cn', 'Need Help?'%5D%0A%0A#Set up SQL database. Create tables if they dont exist.%0Aprint 'Setting up SQL Database...',%0Asql = sqlite3.connect(housecupbot.db)%0Acur = sql.cursor()%0Acur.execute('CREATE TABLE IF NOT EXISTS oldposts(ID TEXT)')%0Acur.execute('CREATE TABLE IF NOT EXISTS scores(NAME TEXT, POINTS REAL))%0Acur.execute('CREATE TABLE IF NOT EXISTS winners(NAME TEXT, TIME_PER TEXT, POINTS REAL))%0Asql.commit()%0Aprint 'DONE'%0A%0A#Log in to reddit%0Aprint 'Logging in to Reddit...',%0Ar = praw.Reddit(userAgent)%0Ar.login(username, password)%0Aprint 'DONE'%0A%0Adef sortedDict(dict):%0A s_dict = sorted(dict.iteritems(), key=operator.itemgetter(1))%0A return s_dict%5Blen(s_dict)-1%5D%0A %0Adef subScan():%0A sub = r.get_subreddit('all')%0A posts = sub.get_comments(limit=100)%0A for post in posts:%0A pid = post.id%0A try:%0A p_auth = post.author.name%0A except:%0A p_auth = '%5BDELETED%5D'%0A cur.execute('SELECT * FROM oldposts WHERE ID=?', pid)%0A if not cur.fetchone():%0A cur.execute('INSERT INTO oldposts VALUES(?)', pid)%0A p_body = post.body.lower()%0A for house in houses:%0A re_result = re.match('%5CA%5Cd%7B1,3%7D%5Cspoints for %25s$' %25 house, p_body)%0A if re_result:%0A pass%0A %0A %0A %0A %0A %0A %0A %0A
|
|
5411224e9683c9ee6a8b06ff9b666a93948e6a69
|
Create example.py
|
example.py
|
example.py
|
Python
| 0.000001
|
@@ -0,0 +1,454 @@
+#TABLE LOAD%0A%0Aself.table_data = QtGui.QTableView()%0Acols=%5B'rowid','data'%5D%0Adata = %5B(1,'data1'),(2,'data2'),%5D%0Atable.load(self.table_data,data,cols,order=0,col=0)%0A%0A#TABLE SORT%0A%0Adef context(self,pos):%0A%09%09mainmenu = QtGui.QMenu(%22Menu%22, self)%0A%09%09mainmenu.addAction(%22Sort%22)%0A%09%09%0A%09%09C = self.mapFromGlobal(QCursor.pos())%0A%09%09pos.setY(C.y()); pos.setX(C.x())%0A%09%09action = mainmenu.exec_(self.mapToGlobal(pos))%0A%09%09%0A%09%09if action.text() == 'Sort':%0A%09%09%09table.sort(self.sender())%0A
|
|
8e1e905f5dbdaccc396ec74fb7c05a93d79c35ff
|
Add example to show failure for #62.
|
examples/example_blueprint.py
|
examples/example_blueprint.py
|
Python
| 0
|
@@ -0,0 +1,517 @@
+from flask import Blueprint, Flask, jsonify%0A%0Afrom flasgger import Swagger%0Afrom flasgger.utils import swag_from%0A%0A%0Aapp = Flask(__name__)%0A%0Aexample_blueprint = Blueprint(%22example_blueprint%22, __name__)%0A%0A%0A@example_blueprint.route('/usernames', methods=%5B'GET', 'POST'%5D)%0A@swag_from('username_specs.yml', methods=%5B'GET'%5D)%0A@swag_from('username_specs.yml', methods=%5B'POST'%5D)%0Adef usernames(username):%0A return jsonify(%7B'username': username%7D)%0A%0A%0Aswag = Swagger(app, config=%7B%7D)%0A%0Aif __name__ == %22__main__%22:%0A app.run(debug=True)%0A
|
|
158d3c6478f4d9d83d166504febc2ba1ba4e58f7
|
Add example.
|
example.py
|
example.py
|
Python
| 0.000001
|
@@ -0,0 +1,1177 @@
+# Licensed to Tomaz Muraus under one or more%0A# contributor license agreements. See the NOTICE file distributed with%0A# this work for additional information regarding copyright ownership.%0A# Tomaz muraus licenses this file to You under the Apache License, Version 2.0%0A# (the %22License%22); you may not use this file except in compliance with%0A# the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Afrom libcloud.dns.types import Provider%0Afrom libcloud.dns.providers import get_driver%0A%0Afrom libcloud_to_bind import libcloud_zone_to_bind_zone_file%0A%0A%0ADOMAIN_TO_EXPORT = 'example.com'%0A%0AZerigo = get_driver(Provider.ZERIGO)%0Adriver = Zerigo('email', 'api key')%0A%0Azones = driver.list_zones()%0Azone = %5Bz for z in zones if z.domain == DOMAIN_TO_EXPORT%5D%5B0%5D%0A%0Aresult = libcloud_zone_to_bind_zone_file(zone=zone)%0Aprint(result)%0A
|
|
7c87974c862184df8df40595ba26f5ff7082c4a6
|
Add a CIB routing fuzzer
|
fuzzers/LIFCL/002-cib-routing/fuzzer.py
|
fuzzers/LIFCL/002-cib-routing/fuzzer.py
|
Python
| 0
|
@@ -0,0 +1,1879 @@
+from fuzzconfig import FuzzConfig%0Afrom interconnect import fuzz_interconnect%0Aimport re%0A%0Aconfigs = %5B%0A ((1, 18), FuzzConfig(job=%22CIBTROUTE%22, device=%22LIFCL-40%22, sv=%22../shared/route_40.v%22, tiles=%5B%22CIB_R1C18:CIB_T%22%5D), set(%5B%22TAP_CIBT_R1C14:TAP_CIBT%22%5D)),%0A ((18, 1), FuzzConfig(job=%22CIBLRROUTE%22, device=%22LIFCL-40%22, sv=%22../shared/route_40.v%22, tiles=%5B%22CIB_R18C1:CIB_LR%22%5D), set(%5B%22TAP_PLC_R18C14:TAP_PLC%22%5D)),%0A ((28, 17), FuzzConfig(job=%22CIBROUTE%22, device=%22LIFCL-40%22, sv=%22../shared/route_40.v%22, tiles=%5B%22CIB_R28C17:CIB%22%5D), set(%5B%22TAP_PLC_R28C14:TAP_PLC%22%5D))%0A%5D%0A%0Adef main():%0A for rc, cfg, ignore in configs:%0A cfg.setup()%0A r, c = rc%0A nodes = %5B%22R%7B%7DC%7B%7D_J*%22.format(r, c)%5D%0A extra_sources = %5B%5D%0A extra_sources += %5B%22R%7B%7DC%7B%7D_H02E%7B:02%7D01%22.format(r, c+1, i) for i in range(8)%5D%0A extra_sources += %5B%22R%7B%7DC%7B%7D_H06E%7B:02%7D03%22.format(r, c+3, i) for i in range(4)%5D%0A extra_sources += %5B%22R%7B%7DC%7B%7D_V02N%7B:02%7D01%22.format(r-1, c, i) for i in range(8)%5D%09%0A extra_sources += %5B%22R%7B%7DC%7B%7D_V06N%7B:02%7D03%22.format(r-3, c, i) for i in range(4)%5D%09%0A extra_sources += %5B%22R%7B%7DC%7B%7D_V02S%7B:02%7D01%22.format(r+1, c, i) for i in range(8)%5D%09%0A extra_sources += %5B%22R%7B%7DC%7B%7D_V06S%7B:02%7D03%22.format(r+3, c, i) for i in range(4)%5D%09%0A extra_sources += %5B%22R%7B%7DC%7B%7D_H02W%7B:02%7D01%22.format(r, c-1, i) for i in range(8)%5D%0A extra_sources += %5B%22R%7B%7DC%7B%7D_H06W%7B:02%7D03%22.format(r, c-3, i) for i in range(4)%5D%0A def pip_filter(pip, nodes):%0A from_wire, to_wire = pip%0A return not (%22_CORE%22 in from_wire or %22_CORE%22 in to_wire or %22JCIBMUXOUT%22 in to_wire)%0A fuzz_interconnect(config=cfg, nodenames=nodes, regex=True, bidir=True, ignore_tiles=ignore,%0A pip_predicate=pip_filter)%0A fuzz_interconnect(config=cfg, nodenames=extra_sources, regex=False, bidir=False, ignore_tiles=ignore,%0A pip_predicate=pip_filter)%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
bbbdaed24390b7c5808cc7233b6ad0566c09f188
|
add python C wrapper; mostly empty for now
|
galpy/orbit_src/integratePlanarOrbit.py
|
galpy/orbit_src/integratePlanarOrbit.py
|
Python
| 0
|
@@ -0,0 +1,593 @@
+def integratePlanarOrbit_leapfrog(pot,yo,t,rtol=None,atol=None):%0A %22%22%22%0A NAME:%0A integratePlanarOrbit_leapfrog%0A PURPOSE:%0A leapfrog integrate an ode for a planarOrbit%0A INPUT:%0A pot - Potential or list of such instances%0A yo - initial condition %5Bq,p%5D%0A t - set of times at which one wants the result%0A rtol, atol%0A OUTPUT:%0A y : array, shape (len(y0), len(t))%0A Array containing the value of y for each desired time in t, %5C%0A with the initial value y0 in the first row.%0A HISTORY:%0A 2011-10-03 - Written - Bovy (NYU)%0A %22%22%22%0A %0A
|
|
7b560ea31ad4e308d01926f1e73cb6deb6b24a6a
|
Clarify location of settings/local.py-dist
|
airmozilla/settings/__init__.py
|
airmozilla/settings/__init__.py
|
from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
|
Python
| 0.000287
|
@@ -110,16 +110,27 @@
rename
+airmozilla/
settings
|
70b21201df3c1b6e476f8dbfee53490bd16a6d00
|
Add Fabric fabfile for project management
|
fabfile.py
|
fabfile.py
|
Python
| 0
|
@@ -0,0 +1,1587 @@
+%22%22%22%0AFabric fabfile for Davies cave survey package.%0A%0ARun %60pip install fabric%60 to install, then %60fab --list%60 to see available commands.%0A%22%22%22%0A%0Afrom fabric.api import local, lcd, with_settings%0A%0A%0Adef test():%0A %22%22%22Run project unit tests.%22%22%22%0A local('python -m unittest discover -v -s tests')%0Aunittest = test%0A%0A%0A@with_settings(warn_only=True)%0Adef pep8():%0A %22%22%22Check source for PEP8 conformance.%22%22%22%0A local('pep8 --max-line-length=120 davies')%0A%0A%0Adef precommit():%0A %22%22%22Run pre-commit unit tests and lint checks.%22%22%22%0A pep8()%0A local('pylint -f colorized --errors-only davies')%0A test()%0A%0A%0Adef lint(fmt='colorized'):%0A %22%22%22Run verbose PyLint on source. Optionally specify fmt=html for HTML output.%22%22%22%0A if fmt == 'html':%0A outfile = 'pylint_report.html'%0A local('pylint -f %25s davies %3E %25s %7C%7C true' %25 (fmt, outfile))%0A local('open %25s' %25 outfile)%0A else:%0A local('pylint -f %25s davies %7C%7C true' %25 fmt)%0Apylint = lint%0A%0A%0Adef clean():%0A %22%22%22Clean up generated files.%22%22%22%0A local('rm -rf dist')%0A local('rm -f pylint_report.html')%0A local('find . -name %22*.pyc%22 %7C xargs rm')%0A with lcd('docs'):%0A local('make clean')%0A%0A%0Adef release(version):%0A %22%22%22Perform git-flow release merging and PyPI upload.%22%22%22%0A clean()%0A local('git co master')%0A local('git merge --no-ff dev')%0A local('git tag %25s' %25 version)%0A local('python setup.py sdist upload')%0A%0A%0Adef doc(fmt='html'):%0A %22%22%22Build Sphinx HTML documentation.%22%22%22%0A with lcd('docs'):%0A local('make %25s' %25 fmt)%0A if fmt == 'html':%0A local('open docs/_build/html/index.html')%0Adocs = doc%0A
|
|
609784dc106e01800eed0a7ccf88f82d6977d408
|
Add missed language update migrations
|
babybuddy/migrations/0008_auto_20200120_0622.py
|
babybuddy/migrations/0008_auto_20200120_0622.py
|
Python
| 0
|
@@ -0,0 +1,554 @@
+# Generated by Django 3.0.2 on 2020-01-20 14:22%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('babybuddy', '0007_auto_20190607_1422'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='settings',%0A name='language',%0A field=models.CharField(choices=%5B('en', 'English'), ('fr', 'French'), ('de', 'German'), ('es', 'Spanish'), ('sv', 'Swedish'), ('tr', 'Turkish')%5D, default='en', max_length=255, verbose_name='Language'),%0A ),%0A %5D%0A
|
|
13a45b0b1ab811d6e0ba131380961fba59e8963c
|
Create w3_1.py
|
w3_1.py
|
w3_1.py
|
Python
| 0.000482
|
@@ -0,0 +1,14 @@
+print(%22test%22)%0A
|
|
ce344f340682f81837ae5b71e7c9e17e276c953d
|
Create nxn.py
|
nxn/nxn.py
|
nxn/nxn.py
|
Python
| 0.000005
|
@@ -0,0 +1,273 @@
+N = int(input())%0A%0Aliste = %5B%5D%0A%0Afor i in range(0,N):%0A liste.append(list(map(int, input().split(%22 %22))))%0A%0Aprisum = 0%0Asecsum = 0%0A%0Afor i in range(0,N):%0A prisum += liste%5Bi%5D%5Bi%5D%0A%0Aj = 0%0Afor i in range(N-1,-1,-1):%0A secsum += liste%5Bi%5D%5Bj%5D%0A j += 1%0A%0Aprint(abs(prisum-secsum))%0A
|
|
257bc9e6538d8320603b29465a02000646833805
|
Add a script to choose randomly from a list. I needed it to choose a random desktop background.
|
choose_random.py
|
choose_random.py
|
Python
| 0
|
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3%0A%0Aimport random%0Aimport sys%0A%0Aif __name__ == %22__main__%22:%0A options = list(sys.stdin) # list of lines of text%0A print(random.choice(options), end='')%0A
|
|
271999dae2cd7f736b66c68f5e2454aac995a10d
|
Call `process()` from Python
|
embed.py
|
embed.py
|
Python
| 0.000004
|
@@ -0,0 +1,96 @@
+from ctypes import cdll%0A%0Alib = cdll.LoadLibrary(%22target/release/libembed.dylib%22)%0A%0Alib.process()%0A
|
|
9416747193dfd597bf15d855d4673cb5b16ce76e
|
Add python methods to handle api end-points
|
api/api.py
|
api/api.py
|
Python
| 0.000001
|
@@ -0,0 +1,867 @@
+from connexion.resolver import RestyResolver%0Afrom flask import current_app, request, abort, jsonify, g, url_for%0Afrom flask_httpauth import HTTPAuth%0A%0A__all__ = %5B%22login%22, %22register%22, %22add_bucket_list%22, %22get_bucket_lists%22,%0A %22get_bucket_list%22,%22put_bucket_list%22,%22delete_bucket_list%22,%0A %22create_item_in_bucket_list%22, %22get_items_in_bucket_list%22,%0A %22update_bucket_list_item%22, %22delete_bucket_list_item%22%5D%0A%0A%0Adef login():%0A pass%0A%0A%0Adef register():%0A pass%0A%0A%0Adef add_bucket_list():%0A pass%0A%0A%0Adef get_bucket_lists():%0A pass%0A%0A%0Adef get_bucket_list():%0A pass%0A%0A%0Adef put_bucket_list():%0A pass%0A%0A%0Adef delete_bucket_list():%0A pass%0A%0A%0Adef create_item_in_bucket_list():%0A pass%0A%0A%0Adef get_items_in_bucket_list():%0A pass%0A%0A%0Adef update_bucket_list_item():%0A pass%0A%0A%0Adef delete_bucket_list_item():%0A pass%0A%0Afrom api.models import User, BucketList, Item%0A
|
|
b0dfbb63a306255bc08eae2e7dd9360ca56a366f
|
Add default value of access requests enabled to exsisting projects made before model added
|
osf/migrations/0100_set_access_request_enabled.py
|
osf/migrations/0100_set_access_request_enabled.py
|
Python
| 0
|
@@ -0,0 +1,1824 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.11 on 2018-04-30 18:34%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models ,connection%0A%0Afrom osf.models import AbstractNode%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0099_merge_20180427_1109'),%0A %5D%0A%0A def add_default_access_requests_enabled(self, *args, **kwargs):%0A # Get the date the original noderequest migration was applied%0A sql = %22SELECT applied from django_migrations WHERE app = 'osf' AND name = '0077_add_noderequest_model';%22%0A with connection.cursor() as cursor:%0A cursor.execute(sql)%0A date_noderequest_migration = cursor.fetchall()%5B0%5D%5B0%5D%0A%0A # Get all projects created before that%0A AbstractNode.objects.filter(created__lte=date_noderequest_migration).update(access_requests_enabled=True)%0A%0A def remove_default_access_requests_enabled(self, *args, **kwargs):%0A # Get the date the original noderequest migration was applied%0A sql = %22SELECT applied from django_migrations WHERE app = 'osf' AND name = '0077_add_noderequest_model';%22%0A with connection.cursor() as cursor:%0A cursor.execute(sql)%0A date_noderequest_migration = cursor.fetchall()%5B0%5D%5B0%5D%0A%0A # Get all projects created before that%0A AbstractNode.objects.filter(created__lte=date_noderequest_migration).update(access_requests_enabled=None)%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='noderequestaction',%0A name='permissions',%0A field=models.CharField(choices=%5B(b'read', b'Read'), (b'write', b'Write'), (b'admin', b'Admin')%5D, default=b'read', max_length=5),%0A ),%0A migrations.RunPython(add_default_access_requests_enabled, remove_default_access_requests_enabled),%0A %5D%0A
|
|
1c511dcc4156d68f84b97067433ca151f549df1b
|
Add test for protocol.
|
flowirc/tests/test_IRCClientProtocol.py
|
flowirc/tests/test_IRCClientProtocol.py
|
Python
| 0.000002
|
@@ -0,0 +1,2744 @@
+from unittest import TestCase%0Afrom unittest.mock import Mock, patch, call, MagicMock%0Afrom flowirc.client import IRCClientProtocol%0A%0A%0A__author__ = 'olle.lundberg'%0A%0A%0Aclass TestIRCClientProtocol(TestCase):%0A def setUp(self):%0A self.proto = IRCClientProtocol()%0A self.transport = Mock()%0A%0A def tearDown(self):%0A self.proto = None%0A self.transport = None%0A%0A def test_connection_made(self):%0A self.proto.after_connection_made = Mock()%0A self.proto.connection_made(self.transport)%0A self.assertEqual(self.proto._transport, self.transport)%0A self.assertEqual(1, self.proto.after_connection_made.call_count)%0A self.assertEqual((), self.proto.after_connection_made.call_args)%0A%0A def test_send(self):%0A self.proto._transport = Mock()%0A self.proto.send('foo')%0A self.proto._transport.write.assert_called_once_with(b'foo')%0A%0A self.proto._transport.reset_mock()%0A%0A calls = %5Bcall(b'foo'), call(b'bar'), call(b'baz')%5D%0A self.proto.send('foo', 'bar', 'baz')%0A self.assertEqual(3, self.proto._transport.write.call_count)%0A self.proto._transport.write.assert_has_calls(calls)%0A%0A self.proto._transport.reset_mock()%0A data = Mock()%0A data.encode = Mock(side_effect=AttributeError(%0A %22'NoneType' object has no attribute 'encode'%22))%0A self.assertRaises(AttributeError, self.proto.send, data)%0A%0A @patch('asyncio.Task')%0A @patch('flowirc.client.MessageBase')%0A def test_data_received(self, messagebase, task):%0A self.proto.message_received = Mock()%0A self.proto.data_received(b'')%0A self.proto.data_received(b'f')%0A self.assertEqual(0, task.call_count)%0A%0A self.proto.data_received(b'foo')%0A self.assertEqual(1, messagebase.from_str.call_count)%0A task.called_once_with(self.proto.message_received)%0A self.assertEqual(1, self.proto.message_received.call_count)%0A%0A messagebase.reset_mock()%0A task.reset_mock()%0A self.proto.message_received.reset_mock()%0A%0A ping = %22PING irc.example.net%5Cr%5Cn%22%0A mock = MagicMock(return_value=ping)%0A messagebase.from_str = mock%0A self.proto.data_received(b' %5Cr%5CnPING :irc.example.net%5Cr%5Cn')%0A self.assertEqual(1, messagebase.from_str.call_count)%0A self.proto.message_received.called_once_with(ping)%0A%0A messagebase.reset_mock()%0A task.reset_mock()%0A self.proto.message_received.reset_mock()%0A%0A mock = MagicMock(return_value=None)%0A messagebase.from_str = mock%0A self.proto.data_received(b' %5Cr%5CnNOT_A_CMD :irc.example.net%5Cr%5Cn')%0A self.assertEqual(1, messagebase.from_str.call_count)%0A self.assertEqual(0, self.proto.message_received.call_count)%0A%0A
|
|
2af3b158f1bc4f528f3d4aa7efb8cd595caca0a5
|
Add dump/html add-on #69 (dump/html)
|
jumeaux/addons/dump/html.py
|
jumeaux/addons/dump/html.py
|
Python
| 0
|
@@ -0,0 +1,1671 @@
+# -*- coding:utf-8 -*-%0A%0Afrom bs4 import BeautifulSoup%0Afrom owlmixin import OwlMixin, TList%0A%0Afrom jumeaux.addons.dump import DumpExecutor%0Afrom jumeaux.logger import Logger%0Afrom jumeaux.models import DumpAddOnPayload%0A%0Alogger: Logger = Logger(__name__)%0ALOG_PREFIX = %22%5Bdump/html%5D%22%0A%0A%0Aclass Config(OwlMixin):%0A default_encoding: str = 'utf8'%0A force: bool = False%0A mime_types: TList%5Bstr%5D = %5B%0A 'text/html'%0A %5D%0A%0A%0Adef pretty(html: str) -%3E str:%0A return BeautifulSoup(html, %22lxml%22).html.prettify()%0A%0A%0Aclass Executor(DumpExecutor):%0A def __init__(self, config: dict):%0A self.config: Config = Config.from_dict(config or %7B%7D)%0A%0A def exec(self, payload: DumpAddOnPayload) -%3E DumpAddOnPayload:%0A mime_type: str = payload.response.mime_type.get()%0A encoding: str = payload.encoding.get_or(self.config.default_encoding)%0A%0A if self.config.force:%0A logger.debug(f%22%7BLOG_PREFIX%7D Forced to html -- mime_type: %7Bmime_type%7D -- encoding: %7Bencoding%7D%22)%0A body = pretty(payload.body.decode(encoding, errors='replace')).encode(encoding, errors='replace')%0A elif mime_type in self.config.mime_types:%0A logger.debug(f%22%7BLOG_PREFIX%7D Parse as html -- mime_type: %7Bmime_type%7D -- encoding: %7Bencoding%7D%22)%0A body = pretty(payload.body.decode(encoding, errors='replace')).encode(encoding, errors='replace')%0A else:%0A logger.debug(f%22%7BLOG_PREFIX%7D Don't Parse as html -- mime_type: %7Bmime_type%7D -- encoding: %7Bencoding%7D%22)%0A body = payload.body%0A%0A return DumpAddOnPayload.from_dict(%7B%0A %22response%22: payload.response,%0A %22body%22: body,%0A %22encoding%22: encoding%0A %7D)%0A%0A
|
|
575fd05ace28ed392591228bfdb01f6e739eeff4
|
Create RobotMemory.py
|
RobotMemory.py
|
RobotMemory.py
|
Python
| 0
|
@@ -0,0 +1,1791 @@
+#-------------------------------------------------------------------------------%0A# Name: Robot Memory%0A# Purpose: Stores memory about where robot has been%0A#%0A# Author: Liam McInory%0A#%0A# Created: 06/03/2014%0A# Copyright: (c) Liam 2014%0A# Licence: GNU%0A#-------------------------------------------------------------------------------%0Afrom Myro import *%0Afrom math import *%0A%0Aclass RobotMemory:%0A Plot = %5B%5B%5D%5D%0A Speed = 0.0%0A MidpointX = 0%0A MidpointY = 0%0A Robot = 0%0A X = 0%0A Y = 0%0A TowardsX = 0%0A TowardsY = 0%0A Scale = 0.0%0A def __init__ (robot, length, height, speed = 0.5, scale = 0.5, lookX = 0, lookY = 1):%0A Plot = %5B%5B0 for x in xrange(length)%5D for x in xrange(height)%5D%0A Speed = speed%0A Robot = robot%0A Scale = scale%0A X = MidpointX%0A Y = MidpointY%0A TowardsX = lookX%0A TowardsY = lookY%0A%0A def Start(x, y):%0A MidpointX = x%0A MidpointY = y%0A X = MidpointX%0A Y = MidpointY%0A%0A def Turn(degrees, left):%0A time90 = 3 * abs(Speed)%0A time = Time90 / abs(degrees)%0A if (left == 1):%0A Robot.turnLeft(time, abs(Speed))%0A else:%0A Robot.turnRight(time, abs(Speed))%0A TowardsX = TowardsX * cos(degrees) + TowardsY * sin(degrees)%0A TowardsY = TowardsX * -sin(degrees) + TowardsY * sin(degrees)%0A%0A def GoForward(duration):%0A slope = (TowardsY - Y) / (TowardsX - X)%0A TowardsX += duration%0A TowardsY += duration%0A Robot.motors(Speed, Speed)%0A wait(duration)%0A divisible = duration / Scale%0A for x in xrange(X, divisible):%0A for y in xrange(Y, divisible):%0A if (Plot%5Bx%5D%5By%5D == 0):%0A Plot%5Bx%5D%5By%5D = 1%0A X += divisible%0A Y += divisible%0A%0A
|
|
5579100489031b941617a93baef398212db23d6e
|
Update openerp
|
__openerp__.py
|
__openerp__.py
|
{
'name': "Gantt Improvement",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Gantt Improvement
=================
""",
'version': '0.3',
'depends': ['web', 'web_gantt'],
'js': [
'static/src/js/gantt.js',
'static/dhtmlxGantt/sources/dhtmlxgantt.js',
],
'css': [
'static/src/css/gantt.css',
'static/dhtmlxGantt/dhtmlxgantt.css',
],
'qweb': ['static/src/xml/gantt.xml'],
'data': [
#'views/web_gantt.xml', #Odoo V8.0
],
}
|
Python
| 0.000001
|
@@ -69,17 +69,17 @@
@ TeMPO-
-c
+C
onsultin
|
64184fa97e9bc55dc50ed492b0b03896a7f5328d
|
Add degree_size
|
problem/pop_map/grid/degree_size.py
|
problem/pop_map/grid/degree_size.py
|
Python
| 0.999462
|
@@ -0,0 +1,1627 @@
+#! /usr/bin/env python%0A%0A# Copyright 2020 John Hanley.%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a%0A# copy of this software and associated documentation files (the %22Software%22),%0A# to deal in the Software without restriction, including without limitation%0A# the rights to use, copy, modify, merge, publish, distribute, sublicense,%0A# and/or sell copies of the Software, and to permit persons to whom the%0A# Software is furnished to do so, subject to the following conditions:%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A# The software is provided %22AS IS%22, without warranty of any kind, express or%0A# implied, including but not limited to the warranties of merchantability,%0A# fitness for a particular purpose and noninfringement. In no event shall%0A# the authors or copyright holders be liable for any claim, damages or%0A# other liability, whether in an action of contract, tort or otherwise,%0A# arising from, out of or in connection with the software or the use or%0A# other dealings in the software.%0A%0Afrom geopy.distance import distance%0Aimport geopy%0A%0A%0Adef main():%0A # https://en.wikipedia.org/wiki/St._Louis_Lambert_International_Airport%0A stl = geopy.Point(38.747222, -90.361389)%0A one_grid = distance(miles=64)%0A north = one_grid.destination(stl, bearing=0)%0A east = one_grid.destination(stl, bearing=90)%0A print(stl.format_decimal())%0A lat_step = north.latitude - stl.latitude%0A lng_step = east.longitude - stl.longitude%0A print(lat_step)%0A print(lng_step)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
aa1fbbaca3e26904855a33014c5077867df54342
|
Add Vetinari example
|
examples/vetinari/vetinari.py
|
examples/vetinari/vetinari.py
|
Python
| 0
|
@@ -0,0 +1,1473 @@
+#! /usr/bin/env python%0A# -*- coding: utf-8 -*-%0A'''A Lord Vetinari clock API.'''%0A%0Afrom time import strftime, localtime, time%0Afrom random import randint%0Afrom uwsgi import async_sleep as sleep%0A%0Afrom swaggery.keywords import *%0A%0A%0Aclass TickStream(Model):%0A%0A '''A stream of clock ticks.'''%0A%0A schema = %7B%0A 'type': 'array',%0A 'items': %7B'type': 'string'%7D%0A %7D%0A%0A%0Aclass LordVetinari(Api):%0A%0A '''The API of Lord Vetinari.'''%0A%0A version = '1.0.0'%0A path = 'vetinari'%0A%0A%0Aclass Clock(Resource):%0A%0A '''The world-famous irregular and yet accurate clock.'''%0A%0A api = LordVetinari%0A subpath = 'ticks/%3Clength%3E/%3Cstyle%3E'%0A _styles = %7B'compact': '%25H:%25M:%25S', 'extended': '%25a, %25d %25b %25Y %25H:%25M:%25S'%7D%0A%0A @operations('GET')%0A def ticks(%0A cls, request,%0A length: (Ptypes.path, Integer('Duration of the stream, in seconds.')),%0A style: (Ptypes.path, String('Tick style.', enum=%5B'compact', 'extended'%5D))%0A ) -%3E %5B%0A (200, 'Ok', TickStream),%0A (400, 'Invalid parameters')%0A %5D:%0A '''A streaming Lord Vetinari clock...'''%0A try:%0A length = int(length)%0A style = cls._styles%5Bstyle%5D%0A except (ValueError, KeyError):%0A Respond(400)%0A def venturi_clock():%0A start = time()%0A while time() - start %3C= length:%0A sleep(randint(25, 400) / 100)%0A yield strftime(style, localtime())%0A Respond(200, venturi_clock())%0A
|
|
ef14a21d5bbd2b0c98ac20eb455bd13402749463
|
fix bug
|
app/api_1_0/activities.py
|
app/api_1_0/activities.py
|
# -*- coding:utf8 -*-
# Author: shizhenyu96@gamil.com
# github: https://github.com/imndszy
import time
from flask import request, jsonify, session
from flask_login import login_required
from app.api_1_0 import api
from app.admin.functions import admin_login_required
from app import db
from app.models import Activity
@api.route('/activities', methods=['GET', 'POST'])
@admin_login_required
def activities():
if request.method == 'GET':
activity = db.session.query(Activity).all()
if activity is None:
return jsonify(status='empty', stuid=session.get('stuid'))
a_list = [i.return_dict() for i in activity]
return jsonify(status='ok', result=a_list, stuid=session.get('stuid'))
elif request.method == 'POST':
data = request.values
if data:
if data.get('acid'):
acid = data.get('acid')
activity = Activity.query.filter_by(acid=acid).first()
activity.actype = data.get('actype')
activity.vol_time = data.get('vol_time')
activity.ac_place = data.get('ac_place')
activity.subject = data.get('title')
finish_time = data.get('finish_time').encode('utf8')
start_time = data.get('start_time').encode('utf8')
activity.introduce = data.get('introduce')
activity.required_stus = data.get('required_stus')
activity.finish_time = ' '.join(finish_time.split('T')) + ':00'
activity.start_time = ' '.join(start_time.split('T')) + ':00'
else:
acid = int(time.time())
actype = data.get('actype')
vol_time = data.get('vol_time')
ac_place = data.get('ac_place')
subject = data.get('title')
finish_time = data.get('finish_time').encode('utf8')
start_time = data.get('start_time').encode('utf8')
introduce = data.get('introduce')
required_stus = data.get('required_stus')
finish_time = ' '.join(finish_time.split('T'))+':00'
start_time = ' '.join(start_time.split('T'))+':00'
activity = Activity(acid=acid, actype=actype, vol_time=vol_time,
ac_place=ac_place, subject=subject,
finish_time=finish_time, start_time=start_time,
introduce=introduce, required_stus=required_stus)
db.session.add(activity)
db.session.commit()
return jsonify(status='ok', stuid=session.get('stuid'))
return jsonify(status='fail', stuid=session.get('stuid'))
@api.route('/activity', methods=['GET'])
@login_required
def activity():
activity = db.session.query(Activity).all()
if activity is None:
return jsonify(status='empty', stuid=session.get('stuid'))
a_list = [i.return_dict() for i in activity]
return jsonify(status='ok', result=a_list, stuid=session.get('stuid'))
|
Python
| 0.000001
|
@@ -178,16 +178,30 @@
required
+, current_user
%0A%0Afrom a
@@ -577,36 +577,8 @@
pty'
-, stuid=session.get('stuid')
)%0A
@@ -681,36 +681,8 @@
list
-, stuid=session.get('stuid')
)%0A
@@ -2602,36 +2602,8 @@
'ok'
-, stuid=session.get('stuid')
)%0A
@@ -2640,36 +2640,8 @@
ail'
-, stuid=session.get('stuid')
)%0A%0A%0A
@@ -2831,36 +2831,34 @@
, stuid=
-session.get('
+current_user.
stuid
-')
)%0A a_
@@ -2957,26 +2957,24 @@
uid=
-session.get('
+current_user.
stuid
-')
)%0A
|
eb8f749b2094d61737af496fb6e6c90bad423761
|
add disk_usage.py example script
|
examples/disk_usage.py
|
examples/disk_usage.py
|
Python
| 0.000001
|
@@ -0,0 +1,1172 @@
+#!/usr/bin/env python%0A%0A%22%22%22%0AList all mounted disk partitions a-la %22df%22 command.%0A%22%22%22%0A%0Aimport sys%0Aimport psutil%0A%0Adef convert_bytes(n):%0A if n == 0:%0A return %220B%22%0A symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')%0A prefix = %7B%7D%0A for i, s in enumerate(symbols):%0A prefix%5Bs%5D = 1 %3C%3C (i+1)*10%0A for s in reversed(symbols):%0A if n %3E= prefix%5Bs%5D:%0A value = float(n) / prefix%5Bs%5D%0A return '%25.1f%25s' %25 (value, s)%0A%0Adef main():%0A print %22Device Total Used Free Use %25 Type Mount%22%0A for part in psutil.disk_partitions(0):%0A usage = psutil.disk_usage(part.mountpoint)%0A print %22%25-9s %258s %258s %258s %255s%25%25 %258s %25s%22 %25 (part.device,%0A convert_bytes(usage.total),%0A convert_bytes(usage.used),%0A convert_bytes(usage.free),%0A int(usage.percent),%0A part.fstype,%0A part.mountpoint)%0A%0Aif __name__ == '__main__':%0A sys.exit(main())%0A
|
|
5721ec07b9a40d2f8f5e04bd2c37c1e015fb99df
|
add an example client, nsq_to_nsq.py
|
examples/nsq_to_nsq.py
|
examples/nsq_to_nsq.py
|
Python
| 0.000001
|
@@ -0,0 +1,2448 @@
+# nsq_to_nsq.py%0A# Written by Ryder Moody and Jehiah Czebotar.%0A# Slower than the golang nsq_to_nsq included with nsqd, but useful as a%0A# starting point for a message transforming client written in python.%0A%0Aimport tornado.options%0Afrom nsq import Reader, run%0Afrom nsq import Writer, Error%0Aimport functools%0Aimport logging%0Afrom host_pool import HostPool%0A%0Aclass NSQProxy:%0A def __init__(self, topic, nsqds):%0A self.topic = topic%0A self.writer_pool = HostPool(%5BWriter(%5Bnsqd%5D) for nsqd in nsqds%5D)%0A%0A def relay(self, nsq_message):%0A nsq_message.enable_async()%0A writer = self.writer_pool.get()%0A callback = functools.partial(self._on_message_response, nsq_message=nsq_message, writer=writer)%0A writer.pub(self.topic, nsq_message.body, callback)%0A%0A def _on_message_response(self, conn, data, nsq_message, writer):%0A if isinstance(data, Error):%0A logging.warning(%22requeuing message: %25s%22, nsq_message.body)%0A self.writer_pool.failed(writer)%0A nsq_message.requeue()%0A else:%0A self.writer_pool.success(writer)%0A nsq_message.finish()%0A%0Aif __name__ == %22__main__%22:%0A tornado.options.define('destination_topic', type=str)%0A tornado.options.define('topic', type=str)%0A tornado.options.define('nsqd_tcp_address', type=str, multiple=True)%0A tornado.options.define('destination_nsqd_tcp_address', type=str, multiple=True)%0A tornado.options.define('lookupd_http_address', type=str, multiple=True)%0A tornado.options.define('channel', type=str)%0A tornado.options.define('max_in_flight', type=int, default=500)%0A%0A tornado.options.parse_command_line()%0A%0A assert tornado.options.options.topic%0A assert tornado.options.options.destination_nsqd_tcp_address%0A assert tornado.options.options.channel%0A%0A destination_topic = str(tornado.options.options.destination_topic or tornado.options.options.topic)%0A lookupd_http_addresses = map(lambda addr: 'http://' + addr, tornado.options.options.lookupd_http_address)%0A%0A proxy = NSQProxy(destination_topic, tornado.options.options.destination_nsqd_tcp_address)%0A%0A Reader(%0A topic=tornado.options.options.topic,%0A channel=tornado.options.options.channel,%0A message_handler=proxy.relay,%0A max_in_flight=tornado.options.options.max_in_flight,%0A lookupd_http_addresses=lookupd_http_addresses,%0A nsqd_tcp_addresses=tornado.options.options.nsqd_tcp_address,%0A )%0A run()%0A
|
|
46d6a2a92b79816592af5d3af1fafd218aa3533f
|
Clarify that changelog entry indicates Trellis version (#987)
|
lib/trellis/utils/output.py
|
lib/trellis/utils/output.py
|
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
import platform
import re
import textwrap
from ansible import __version__
from ansible.module_utils._text import to_text
def system(vagrant_version=None):
# Get most recent Trellis CHANGELOG entry
changelog_msg = ''
ansible_path = os.getenv('ANSIBLE_CONFIG', os.getcwd())
changelog = os.path.join(ansible_path, 'CHANGELOG.md')
if os.path.isfile(changelog):
with open(changelog) as f:
str = f.read(200)
# Retrieve release number if it is most recent entry
release = re.search(r'^###\s((?!HEAD).*)', str)
if release is not None:
changelog_msg = '\n Trellis {0}'.format(release.group(1))
# Retrieve most recent changelog entry
else:
change = re.search(r'^\*\s?(\[BREAKING\])?([^\(\n\[]+)', str, re.M|re.I)
if change is not None:
changelog_msg = '\n Trellis at "{0}"'.format(change.group(2).strip())
# Vagrant info, if available
vagrant = ' Vagrant {0};'.format(vagrant_version) if vagrant_version else ''
# Assemble components and return
return 'System info:\n Ansible {0};{1} {2}{3}'.format(__version__, vagrant, platform.system(), changelog_msg)
def reset_task_info(obj, task=None):
obj.action = None if task is None else task._get_parent_attribute('action')
obj.first_host = True
obj.first_item = True
obj.task_failed = False
# Display dict key only, instead of full json dump
def replace_item_with_key(obj, result):
item = '_ansible_item_label' if '_ansible_item_label' in result._result else 'item'
should_replace = (
not obj._display.verbosity
and 'label' not in result._task._ds.get('loop_control', {})
and item in result._result
)
if should_replace:
if 'key' in result._result[item]:
result._result[item] = result._result[item]['key']
elif type(result._result[item]) is dict:
subitem = '_ansible_item_label' if '_ansible_item_label' in result._result[item] else 'item'
if 'key' in result._result[item].get(subitem, {}):
result._result[item] = result._result[item][subitem]['key']
elif '_ansible_item_label' in result._result[item]:
result._result[item] = result._result[item]['_ansible_item_label']
def display(obj, result):
msg = ''
result = result._result
display = obj._display.display
wrap_width = 77
first = obj.first_host and obj.first_item
# Only display msg if debug module or if failed (some modules have undesired 'msg' on 'ok')
if 'msg' in result and (obj.task_failed or obj.action == 'debug'):
msg = result.pop('msg', '')
# Disable Ansible's verbose setting for debug module to avoid the CallbackBase._dump_results()
if '_ansible_verbose_always' in result:
del result['_ansible_verbose_always']
# Display additional info when failed
if obj.task_failed:
items = (item for item in ['reason', 'module_stderr', 'module_stdout', 'stderr'] if item in result and to_text(result[item]) != '')
for item in items:
msg = result[item] if msg == '' else '\n'.join([msg, result.pop(item, '')])
# Add blank line between this fail message and the json dump Ansible displays next
msg = '\n'.join([msg, ''])
# Must pass unicode strings to Display.display() to prevent UnicodeError tracebacks
if isinstance(msg, list):
msg = '\n'.join([to_text(x) for x in msg])
elif not isinstance(msg, unicode):
msg = to_text(msg)
# Wrap text
msg = '\n'.join([textwrap.fill(line, wrap_width, replace_whitespace=False)
for line in msg.splitlines()])
# Display system info and msg, with horizontal rule between hosts/items
hr = '-' * int(wrap_width*.67)
if obj.task_failed and first:
display(system(obj.vagrant_version), 'bright gray')
display(hr, 'bright gray')
if msg == '':
if obj.task_failed and not first:
display(hr, 'bright gray')
else:
return
else:
if not first:
display(hr, 'bright gray')
display(msg, 'red' if obj.task_failed else 'bright purple')
def display_host(obj, result):
if 'results' not in result._result:
display(obj, result)
obj.first_host = False
def display_item(obj, result):
display(obj, result)
obj.first_item = False
|
Python
| 0.000042
|
@@ -1021,18 +1021,40 @@
Trellis
-at
+version (per changelog):
%22%7B0%7D%22'.
|
f2a824715216ca637251a19648f52c030a8abb30
|
Update handler.py
|
tendrl/node_agent/message/handler.py
|
tendrl/node_agent/message/handler.py
|
import os
from io import BlockingIOError
import sys
import traceback
import gevent.event
import gevent.greenlet
from gevent.server import StreamServer
from gevent import socket
from gevent.socket import error as socket_error
from gevent.socket import timeout as socket_timeout
from tendrl.commons.message import Message
from tendrl.commons.logger import Logger
RECEIVE_DATA_SIZE = 4096
MESSAGE_SOCK_PATH = "/var/run/tendrl/message.sock"
class MessageHandler(gevent.greenlet.Greenlet):
def __init__(self):
super(MessageHandler, self).__init__()
self.server = StreamServer(
self.bind_unix_listener(),
self.read_socket
)
def read_socket(self, sock, *args):
try:
self.data = sock.recv(RECEIVE_DATA_SIZE)
message = Message.from_json(self.data)
Logger(message)
except (socket_error, socket_timeout):
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_tb, file=sys.stderr)
except (TypeError, ValueError, KeyError, AttributeError):
sys.stderr.write(
"Unable to log the message.%s\n" % self.data)
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_tb, file=sys.stderr)
def _run(self):
try:
self.server.serve_forever()
except (TypeError, BlockingIOError, socket_error, ValueError):
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(
exc_type, exc_value, exc_tb, file=sys.stderr)
def stop(self):
pass
def bind_unix_listener(self):
# http://0pointer.de/blog/projects/systemd.html (search "file
# descriptor 3")
try:
socket_fd = 3
self.sock = socket.fromfd(socket_fd, socket.AF_UNIX,
socket.SOCK_STREAM)
self.sock.setblocking(0)
self.sock.listen(50)
except (TypeError, BlockingIOError, socket_error, ValueError):
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_tb,
file=sys.stderr)
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if os.path.exists(MESSAGE_SOCK_PATH):
os.remove(socket_path)
self.sock.setblocking(0)
self.sock.bind(MESSAGE_SOCK_PATH)
self.sock.listen(50)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_tb,
file=sys.stderr)
finally:
return self.sock
|
Python
| 0.000001
|
@@ -2510,19 +2510,25 @@
ove(
-socket_path
+MESSAGE_SOCK_PATH
)%0A
|
39c50fe7d4713b9d0a8e4618a829d94b4fe7456c
|
Add code to test van der pol model
|
van_der_pol_sync.py
|
van_der_pol_sync.py
|
Python
| 0
|
@@ -0,0 +1,909 @@
+%0Afrom __future__ import division%0A%0Aimport sys%0Aimport numpy as np%0Asys.path.append('/media/ixaxaar/Steam/src/nest/local/lib/python2.7/site-packages/')%0Aimport nest%0Aimport nest.raster_plot%0Aimport nest.voltage_trace%0Aimport uuid%0Aimport pylab%0A%0Anest.SetKernelStatus(%7B%22resolution%22: .001%7D)%0Au = uuid.uuid4()%0A%0Anest.CopyModel('ac_generator', u, %7B'amplitude': 1., 'frequency': 20.%7D)%0Aac = nest.Create(u)%0A%0A%0An = ()%0Afor i in xrange(1,10):%0A r = np.random.uniform(1000)%0A print r%0A n += nest.Create(%22relaxos_van_der_pol%22, 1, %7B%22epsilon%22: r/1000, %22input_currents_ex%22: r/1000%7D)%0A%0Ad = nest.Create(%22spike_detector%22)%0A%0Av = nest.Create('voltmeter', 1, %7B%22withgid%22: True, %22withtime%22: True%7D)%0A%0A# nest.Connect(ac, n, 'all_to_all', %7B'weight': .05, 'model': 'static_synapse'%7D)%0Anest.Connect(n, n, 'all_to_all', %7B'weight': .1, 'model': 'static_synapse'%7D)%0Anest.Connect(v, n)%0A%0Anest.Simulate(%221000%22)%0A%0Anest.voltage_trace.from_device(v)%0Apylab.show()%0A%0A
|
|
161802f87065a6b724c8c02357edf8cbb5b38f1a
|
Add a rosenbrock example.
|
examples/rosenbrock.py
|
examples/rosenbrock.py
|
Python
| 0
|
@@ -0,0 +1,1840 @@
+import climate%0Aimport downhill%0Aimport matplotlib.pyplot as plt%0Aimport matplotlib.animation as anim%0Aimport mpl_toolkits.mplot3d.axes3d%0Aimport numpy as np%0Aimport theano%0Aimport theano.tensor as TT%0A%0Aclimate.enable_default_logging()%0A%0A%0A_, ax = plt.subplots(1, 1)%0A%0A# run several optimizers for comparison.%0Afor i, (algo, label, kw) in enumerate((%0A ('sgd', 'SGD - Momentum 0', %7B%7D),%0A ('sgd', 'SGD - Momentum 0.5', dict(momentum=0.5, nesterov=False)),%0A ('rmsprop', 'RMSProp - Momentum 0', %7B%7D),%0A ('rmsprop', 'RMSProp - Momentum 0.5', dict(momentum=0.5, nesterov=False)),%0A ('adam', 'Adam - Momentum 0', %7B%7D),%0A #('esgd', 'ESGD - Momentum 0', %7B%7D),%0A ('rprop', 'RProp - Momentum 0', %7B%7D),%0A ('adadelta', 'ADADELTA - Momentum 0', %7B%7D),%0A )):%0A print(label)%0A x = theano.shared(np.array(%5B-1.1, -0.4%5D, 'f'), name='x')%0A opt = downhill.build(%0A algo,%0A loss=(100 * (x%5B1:%5D - x%5B:-1%5D ** 2) ** 2 + (1 - x%5B:-1%5D) ** 2).sum(),%0A params=%5Bx%5D,%0A inputs=%5B%5D,%0A monitors=%5B('x', x%5B:-1%5D.sum()), ('y', x%5B1:%5D.sum())%5D)%0A xs, ys = %5B%5D, %5B%5D%0A for tm, _ in opt.iteropt(%5B%5B%5D%5D,%0A max_gradient_clip=1,%0A min_improvement=0,%0A learning_rate=0.01,%0A patience=0,%0A **kw):%0A xs.append(tm%5B'x'%5D)%0A ys.append(tm%5B'y'%5D)%0A if len(xs) == 100:%0A break%0A ax.plot(np.array(xs), np.array(ys) + 0.05 * i,%0A 'o-', label=label, alpha=0.3)%0A%0A# make a contour plot of the rosenbrock function surface.%0Aa = b = np.arange(-1.2, 1.2, 0.05)%0AX, Y = np.meshgrid(a, b)%0AZ = 100 * (Y - X ** 2) ** 2 + (1 - X) ** 2%0A%0Aax.plot(%5B1%5D, %5B1%5D, 'x', mew=2, color='#111111')%0Aax.contourf(X, Y, Z, np.logspace(0, 3, 10))%0A%0Aplt.legend(loc='upper left')%0Aplt.show()%0A
|
|
d14130c30f776d9b10ab48c993096dce251aba28
|
Add script to get list of HRS station IDs
|
get_hrs_cc_streamflow_list.py
|
get_hrs_cc_streamflow_list.py
|
Python
| 0
|
@@ -0,0 +1,1786 @@
+import pandas as pd%0Afrom kiwis_pie import KIWIS%0A%0Ak = KIWIS('http://www.bom.gov.au/waterdata/services')%0A%0Adef get_cc_hrs_station_list(update = False):%0A %22%22%22%0A Return list of station IDs that exist in HRS and are supplied by providers that license their data under the Creative Commons license.%0A%0A :param update: Flag to indicate if cached station information should be fetched from WISKI again (and saved to disk as CSV).%0A :type update: boolean%0A %22%22%22%0A if update:%0A stations = k.get_timeseries_list(parametertype_name = 'Water Course Discharge', ts_name = 'DMQaQc.Merged.DailyMean.09HR')%0A stations.to_csv('available_watercoursedischarge_stations.csv')%0A else:%0A stations = pd.read_csv('available_watercoursedischarge_stations.csv', index_col=0)%0A%0A hrs_stations = pd.read_csv('hrs_station_list.csv', skiprows=1)%0A%0A station_subset = stations.ix%5Bstations.station_no.isin(hrs_stations.station_id)%5D%0A%0A if update:%0A station_attrs = %5B%5D%0A for i, station in station_subset.iterrows():%0A attrs = k.get_station_list(station_no = station.station_no, parametertype_name = 'Water Course Discharge', return_fields=%5B'station_id','custom_attributes'%5D)%0A station_attrs.append(attrs.set_index('station_id'))%0A%0A station_attributes = pd.concat(station_attrs)%0A station_attributes.to_csv('station_attributes.csv')%0A else:%0A station_attributes = pd.read_csv('station_attributes.csv', index_col=0)%0A%0A cc_providers = pd.read_csv('cc_providers.csv', skiprows=8)%0A%0A station_list = station_attributes.ix%5Bstation_attributes.DATA_OWNER.isin(cc_providers.ProviderID.values)%5D.index.values%0A%0A return station_list%0A%0Aif __name__ == %22__main__%22:%0A for station in get_cc_hrs_station_list():%0A print(station)%0A%0A
|
|
cbb7fd7d31bf103e0e9c7b385926b61d42dbb8ec
|
add __main__ file
|
homework_parser/__main__.py
|
homework_parser/__main__.py
|
Python
| 0.000099
|
@@ -0,0 +1,699 @@
+from homework_parser.file_parser import detect_plugin%0A%0Afrom sys import argv, stdin, stdout, stderr, exit%0A%0Aif __name__ == %22__main__%22:%0A in_format = argv%5B1%5D%0A out_format = argv%5B2%5D%0A out_plugin = detect_plugin(out_format)%0A%0A if out_plugin is None:%0A print %3E%3E stderr, ('out-plugin %25s not found' %25 out_format)%0A exit(-1)%0A%0A in_plugin = detect_plugin(in_format)%0A if in_plugin is None:%0A print %3E%3E stderr, ('in-plugin %25s not found' %25 in_format)%0A exit(-1)%0A %0A if len(argv) == 4:%0A with open(argv%5B3%5D) as f:%0A data = in_plugin.read_from_file(f)%0A else:%0A data = in_plugin.read_from_file(stdin)%0A %0A out_plugin.write_to_file(stdout, data)%0A
|
|
de5c4e57ccedf0b5c9897bc2046b79ac19a18a0c
|
add solution for Remove Duplicates from Sorted List
|
src/removeDuplicatesFromSortedList.py
|
src/removeDuplicatesFromSortedList.py
|
Python
| 0
|
@@ -0,0 +1,449 @@
+# Definition for singly-linked list.%0A# class ListNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.next = None%0A%0A%0Aclass Solution:%0A # @param head, a ListNode%0A # @return a ListNode%0A%0A def deleteDuplicates(self, head):%0A p1 = head%0A while p1:%0A p2 = p1.next%0A while p2 and p1.val == p2.val:%0A p2 = p2.next%0A p1.next = p2%0A p1 = p1.next%0A return head%0A
|
|
aec88e4f9cf2d9ee7f9fe876a7b884028b6c190c
|
Add Script to generate a container schema from DockerFile
|
bin/buildHierarchiqueDiagram.py
|
bin/buildHierarchiqueDiagram.py
|
Python
| 0.000001
|
@@ -0,0 +1,1896 @@
+#!/usr/bin/env/python%0D%0A%0D%0Afrom datetime import datetime%0D%0Aimport os%0D%0Aimport argparse%0D%0Aimport re%0D%0Afrom graphviz import Digraph%0D%0A%0D%0APATH = os.path.dirname(os.path.abspath(__file__))%0D%0AFROM_REGEX = re.compile(ur'%5EFROM%5Cs+(?P%3Cimage%3E%5B%5E:%5D+)(:(?P%3Ctag%3E.+))?', re.MULTILINE)%0D%0ACONTAINERS = %7B%7D%0D%0A%0D%0Adef get_current_date():%0D%0A import datetime%0D%0A return datetime.date.today().strftime(%22%25d.%25m.%25Y%22)%0D%0A%0D%0Adef processDockerfile(inputFile):%0D%0A outputFile = os.path.splitext(inputFile)%0D%0A outputFile = os.path.join(os.path.dirname(outputFile%5B0%5D),os.path.basename(outputFile%5B0%5D))%0D%0A%0D%0A dockerImage = os.path.basename(os.path.dirname(os.path.dirname(outputFile)))%0D%0A dockerTag = os.path.basename(os.path.dirname(outputFile))%0D%0A%0D%0A with open(inputFile, 'r') as fileInput:%0D%0A DockerfileContent = fileInput.read()%0D%0A data = (%5Bm.groupdict() for m in FROM_REGEX.finditer(DockerfileContent)%5D)%5B0%5D%0D%0A CONTAINERS%5B%22webdevops/%25s%22%25dockerImage%5D = data.get('image')%0D%0A%0D%0Adef main(args):%0D%0A dockerfilePath = os.path.abspath(args.dockerfile)%0D%0A %0D%0A u = Digraph('webdevops', filename='webdevops.gv')%0D%0A u.body.append('size=%2210,10%22')%0D%0A u.body.append(r'label = %22%5Cn%5CnWebdevops Containers%5Cn at :%25s%22' %25 get_current_date() )%0D%0A u.node_attr.update(color='lightblue2', style='filled', shape='box') %0D%0A%0D%0A # Parse Docker file%0D%0A for root, dirs, files in os.walk(dockerfilePath):%0D%0A%0D%0A for file in files:%0D%0A if file.endswith(%22Dockerfile%22):%0D%0A processDockerfile(os.path.join(root, file))%0D%0A%0D%0A # Build and render graph%0D%0A for image, base in CONTAINERS.items():%0D%0A if %22webdevops%22 in base:%0D%0A u.edge(base, image)%0D%0A else:%0D%0A u.node(image)%0D%0A print u.source%0D%0A%0D%0A%0D%0Aif __name__ == '__main__':%0D%0A parser = argparse.ArgumentParser()%0D%0A parser.add_argument('-d','--dockerfile' ,help='',type=str)%0D%0A args = parser.parse_args()%0D%0A main(args)%0D%0A
|
|
77b7b4603466c390bf2dc61428c64e85f7babbb0
|
create a new file test_cut_milestone.py
|
test/unit_test/test_cut_milestone.py
|
test/unit_test/test_cut_milestone.py
|
Python
| 0.000004
|
@@ -0,0 +1,1778 @@
+from lexos.processors.prepare.cutter import cut_by_milestone%0A%0A%0Aclass TestMileStone:%0A def test_milestone_regular(self):%0A text_content = %22The bobcat slept all day..%22%0A milestone = %22bobcat%22%0A assert cut_by_milestone(text_content, milestone) == %5B%22The %22,%0A %22 slept all day..%22%0A %5D%0A%0A def test_milestone_no_milestone_in_text(self):%0A text_content = %22The bobcat slept all day.%22%0A milestone = %22am%22%0A assert cut_by_milestone(text_content, milestone) == %5B%0A %22The bobcat slept all day.%22%5D%0A%0A def test_milestone_longer_than_text(self):%0A text_content = %22The bobcat slept all day.%22%0A milestone = %22The cute bobcat slept all day.%22%0A assert cut_by_milestone(text_content, milestone) == %5B%0A %22The bobcat slept all day.%22%5D%0A%0A def test_milestone_len_zero(self):%0A text_content = %22The bobcat slept all day.%22%0A milestone = %22%22%0A assert cut_by_milestone(text_content, milestone) == %5B%0A %22The bobcat slept all day.%22%5D%0A%0A def test_milestone_empty_text(self):%0A text_content = %22%22%0A milestone = %22bobcat%22%0A assert cut_by_milestone(text_content, milestone) == %5B%5D%0A%0A def test_milestone_check_case_sensative(self):%0A text_content = %22The bobcat slept all day.%22%0A milestone = %22BOBCAT%22%0A assert cut_by_milestone(text_content, milestone) == %5B%22The bobcat %22%0A %22slept all day.%22%5D%0A%0A def test_milestone_whole_text_milestone(self):%0A text_content = %22The bobcat slept all day.%22%0A milestone = %22The bobcat slept all day.%22%0A assert cut_by_milestone(text_content, milestone) == %5B%5D%0A
|
|
3284a384a4147857c16462c0fde6a4dec39de2b7
|
Read temperature
|
1-wire/ds18b20/python/ds18b20.py
|
1-wire/ds18b20/python/ds18b20.py
|
Python
| 0
|
@@ -0,0 +1,648 @@
+import glob%0Aimport time%0A%0Abase_dir = '/sys/bus/w1/devices/'%0Adevice_folder = glob.glob(base_dir + '28*')%5B0%5D%0Adevice_file = device_folder + '/w1_slave'%0A%0Adef read_temp_raw():%0A%09f = open(device_file, 'r')%0A%09lines = f.readlines()%0A%09f.close()%0A%09return lines%0A%0Adef read_temp():%0A%09lines = read_temp_raw()%0A%09while lines%5B0%5D.strip()%5B-3:%5D != 'YES':%0A%09%09time.sleep(0.2)%0A%09%09lines = read_temp_raw()%0A%09equals_pos = lines%5B1%5D.find('t=')%0A%09if -1 != equals_pos:%0A%09%09temp_string = lines%5B1%5D%5Bequals_pos+2:%5D%0A%09%09return float(temp_string) / 1000.0%0A%09%0AtemperatureC = read_temp()%0AtemperatureF = temperatureC * 9.0 / 5.0 + 32.0%0Aprint(%22Temperature: %25.2fC (%25.2fF)%22 %25 (temperatureC, temperatureF))%0A
|
|
89a65c75ade2629e2b67a9887e27a177617dd39e
|
add armes
|
armes/armes.py
|
armes/armes.py
|
Python
| 0.999991
|
@@ -0,0 +1,195 @@
+class Arme(object) :%0A%0A%09def __init__(self): %0A%09%09'''Caracteristiques de la classe arme'''%0A%09%09pass%0A%0A%09def tirer(self, position, vecteur) :%0A%09%09'''cree et envoie un projectile dans une direction'''%0A%09%09pass
|
|
244b7a3b8d3bd32517effdd4b7bab35628a6db61
|
move init db
|
flask_again/init_db.py
|
flask_again/init_db.py
|
Python
| 0
|
@@ -0,0 +1,42 @@
+from aone_app.db import init_db%0Ainit_db()%0A
|
|
d054178a75caecfb20a5c4989dc4e9cd7bf4a853
|
add grayscale conversion test - refs #1454
|
tests/python_tests/grayscale_test.py
|
tests/python_tests/grayscale_test.py
|
Python
| 0
|
@@ -0,0 +1,320 @@
+import mapnik%0Afrom nose.tools import *%0A%0Adef test_grayscale_conversion():%0A im = mapnik.Image(2,2)%0A im.background = mapnik.Color('white')%0A im.set_grayscale_to_alpha()%0A pixel = im.get_pixel(0,0)%0A eq_((pixel %3E%3E 24) & 0xff,255);%0A%0Aif __name__ == %22__main__%22:%0A %5Beval(run)() for run in dir() if 'test_' in run%5D%0A
|
|
a76d8287d5ad0b9d43c4b509b2b42eb0a7fa03a2
|
Add asyncio slackbot
|
slackbot_asyncio.py
|
slackbot_asyncio.py
|
Python
| 0.000003
|
@@ -0,0 +1,2863 @@
+import asyncio%0Aimport json%0Aimport signal%0A%0Aimport aiohttp%0A%0Afrom config import DEBUG, TOKEN%0A%0Aimport websockets%0A%0A%0ARUNNING = True%0A%0A%0Aasync def api_call(method, data=None, file=None, token=TOKEN):%0A %22%22%22Perform an API call to Slack.%0A :param method: Slack API method name.%0A :param type: str%0A :param data: Form data to be sent.%0A :param type: dict%0A :param file: file pointer to send (for files.upload).%0A :param type: file%0A :param token: OAuth2 tokn%0A :param type: str%0A %22%22%22%0A with aiohttp.ClientSession() as session:%0A form = aiohttp.FormData(data or %7B%7D)%0A form.add_field(%22token%22, token)%0A if file:%0A form.add_field(%22file%22, file)%0A async with session.post('https://slack.com/api/%7B0%7D'.format(method),%0A data=form) as response:%0A assert 200 == response.status, (%22%7B0%7D with %7B1%7D failed.%22%0A .format(method, data))%0A return await response.json()%0A%0A%0Aasync def producer(send, timeout=20):%0A %22%22%22Produce a ping message every timeout seconds.%22%22%22%0A while RUNNING:%0A await asyncio.sleep(timeout)%0A send(%7B%22type%22: %22ping%22%7D)%0A%0A%0Aasync def consumer(message):%0A %22%22%22Consume the message by printing it.%22%22%22%0A message = json.loads(message)%0A if message.get('type') == 'message':%0A user_info = await api_call('users.info',%0A dict(user=message.get('user')))%0A print(%22%7Buser%5Buser%5D%5Bname%5D%7D: %7Bmessage%5Btext%5D%7D%22%0A .format(user=user_info, message=message))%0A%0A%0Aasync def bot(get, token=TOKEN):%0A %22%22%22Create a bot that joins Slack.%22%22%22%0A rtm = await api_call(%22rtm.start%22)%0A assert 'ok' in rtm and rtm%5B'ok'%5D, %22Error connecting to RTM.%22%0A%0A async with websockets.connect(rtm%5B%22url%22%5D) as ws:%0A while RUNNING:%0A listener_task = asyncio.ensure_future(ws.recv())%0A producer_task = asyncio.ensure_future(get())%0A%0A done, pending = await asyncio.wait(%0A %5Blistener_task, producer_task%5D,%0A return_when=asyncio.FIRST_COMPLETED%0A )%0A%0A for task in pending:%0A task.cancel()%0A%0A if listener_task in done:%0A message = listener_task.result()%0A asyncio.ensure_future(consumer(message))%0A%0A if producer_task in done:%0A message = producer_task.result()%0A await ws.send(message)%0A%0A%0Adef stop():%0A %22%22%22Gracefully stop the bot.%22%22%22%0A global RUNNING%0A RUNNING = False%0A print(%22Stopping... closing connections.%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A loop = asyncio.get_event_loop()%0A%0A outbox = asyncio.Queue()%0A%0A loop.set_debug(DEBUG)%0A loop.add_signal_handler(signal.SIGINT, stop)%0A loop.run_until_complete(asyncio.wait((bot(outbox.get),%0A producer(outbox.put))))%0A loop.close()%0A
|
|
f4ed2ec503bc12fe645b6d79a330787d2dde6c8e
|
Bump version 0.15.0rc7 --> 0.15.0rc8
|
lbrynet/__init__.py
|
lbrynet/__init__.py
|
import logging
__version__ = "0.15.0rc7"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Python
| 0
|
@@ -36,9 +36,9 @@
.0rc
-7
+8
%22%0Ave
|
1f1da12d49b9aa9b28a937fdf877bb990eb0bd2a
|
add convenience script to sync local from test
|
scratchpad/sync/sync_from_remote.py
|
scratchpad/sync/sync_from_remote.py
|
Python
| 0
|
@@ -0,0 +1,1053 @@
+import esprit%0Afrom portality.core import app%0A%0Aremote = esprit.raw.Connection(%22http://ooz.cottagelabs.com:9200%22, %22doaj%22)%0Alocal = esprit.raw.Connection(%22http://localhost:9200%22, %22doaj%22)%0A%0Aesprit.tasks.copy(remote, %22journal%22, local, %22journal%22)%0Aesprit.tasks.copy(remote, %22account%22, local, %22account%22)%0Aesprit.tasks.copy(remote, %22article%22, local, %22article%22)%0Aesprit.tasks.copy(remote, %22suggestion%22, local, %22suggestion%22)%0Aesprit.tasks.copy(remote, %22upload%22, local, %22upload%22)%0Aesprit.tasks.copy(remote, %22cache%22, local, %22cache%22)%0Aesprit.tasks.copy(remote, %22toc%22, local, %22toc%22)%0Aesprit.tasks.copy(remote, %22lcc%22, local, %22lcc%22)%0Aesprit.tasks.copy(remote, %22article_history%22, local, %22article_history%22)%0Aesprit.tasks.copy(remote, %22editor_group%22, local, %22editor_group%22)%0Aesprit.tasks.copy(remote, %22news%22, local, %22news%22)%0Aesprit.tasks.copy(remote, %22lock%22, local, %22lock%22)%0Aesprit.tasks.copy(remote, %22bulk_reapplication%22, local, %22bulk_reapplication%22)%0Aesprit.tasks.copy(remote, %22bulk_upload%22, local, %22bulk_upload%22)%0Aesprit.tasks.copy(remote, %22journal_history%22, local, %22journal_history%22)%0A
|
|
9d7166e489b425acd64e1294236a821d76270cfc
|
Create letter_game_v1.1.py
|
letter_game_v1.1.py
|
letter_game_v1.1.py
|
Python
| 0.000032
|
@@ -0,0 +1,1631 @@
+# only guess a single letter%0A# only guess an alphabetic%0A# user can play again%0A# strikes max up to 7%0A# draw guesses letter, spaces, and strikes%0A%0Aimport random%0A%0Awords = %5B%0A 'cow',%0A 'cat',%0A 'crocodile',%0A 'lion',%0A 'tiger',%0A 'mouse',%0A 'goat',%0A 'giraffe',%0A 'elephant',%0A 'dear',%0A 'eagle',%0A 'bear'%0A%5D%0A%0Awhile True:%0A start = input(%22Press enter to play or q to quit %22).lower()%0A if start == 'q':%0A break%0A%0A secret_word = random.choice(words)%0A bad_guesses = %5B%5D%0A good_guesses = %5B%5D%0A%0A while len(bad_guesses) %3C 7 and len(good_guesses) != len(list(secret_word)):%0A%0A # draw the letters%0A for letter in secret_word:%0A if letter in good_guesses:%0A print(letter, end='')%0A else:%0A print('.', end='')%0A print('')%0A print('Strikes %7B%7D/7'.format(len(bad_guesses)))%0A%0A guess = input(%22Guess a letter: %22)%0A%0A if len(guess) != 1:%0A print(%22You can only guess a single letter.%22)%0A continue%0A elif guess in good_guesses or guess in bad_guesses:%0A print(%22You already guess that letter.%22)%0A continue%0A elif not guess.isalpha():%0A print(%22You can only guess a letter.%22)%0A continue%0A%0A if guess in secret_word:%0A good_guesses.append(guess)%0A if len(good_guesses) == len(list(secret_word)):%0A print(%22You win! The word was %7B%7D%22.format(secret_word))%0A break%0A else:%0A bad_guesses.append(guess)%0A%0A else:%0A print(%22You lost! The secret word was %7B%7D%22.format(secret_word))%0A continue%0A
|
|
e1021970c445acd8ba3acc24294611bebc63bc5a
|
test if weather forecast saves data in the db
|
server/forecasting/tests/test_weather_forecast.py
|
server/forecasting/tests/test_weather_forecast.py
|
Python
| 0.000001
|
@@ -0,0 +1,402 @@
+#import unittest%0Afrom server.forecasting.forecasting.weather import WeatherForecast%0Afrom django.test import TestCase%0A#from server.models import Device, Sensor, SensorEntry%0A%0A''''class ForecastingTest(unittest.TestCase):%0A%09def test_test(self):%0A%09%09cast = WeatherForecast()%0A'''%0Aclass ForecastingDBTest(TestCase):%0A%09def test_crawled_data_in_data(self):%0A%09%09pass%0A%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
177fae394115dd0777efa9673198137eb57b1a50
|
Test docstrings.
|
test/test_basic_logic.py
|
test/test_basic_logic.py
|
# -*- coding: utf-8 -*-
"""
test_basic_logic
~~~~~~~~~~~~~~~~
Test the basic logic of the h2 state machines.
"""
import pytest
import h2.connection
import h2.exceptions
class TestBasicClient(object):
"""
Basic client-side tests.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
example_response_headers = [
(':status', '200'),
('server', 'fake-serv/0.1.0')
]
def test_begin_connection(self):
c = h2.connection.H2Connection()
events = c.initiate_connection()
assert not events
assert c.data_to_send.startswith(b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n')
def test_sending_headers(self):
c = h2.connection.H2Connection()
c.initiate_connection()
# Clear the data, then send headers.
c.data_to_send = b''
events = c.send_headers(1, self.example_request_headers)
assert not events
assert c.data_to_send == (
b'\x00\x00\r\x01\x04\x00\x00\x00\x01'
b'A\x88/\x91\xd3]\x05\\\x87\xa7\x84\x87\x82'
)
def test_sending_data(self):
c = h2.connection.H2Connection()
c.initiate_connection()
c.send_headers(1, self.example_request_headers)
# Clear the data, then send some data.
c.data_to_send = b''
events = c.send_data(1, b'some data')
assert not events
assert c.data_to_send == b'\x00\x00\t\x00\x00\x00\x00\x00\x01some data'
class TestBasicServer(object):
"""
Basic server-side tests.
"""
example_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
]
example_response_headers = [
(':status', '200'),
('server', 'hyper-h2/0.1.0')
]
def test_ignores_preamble(self):
c = h2.connection.H2Connection(client_side=False)
preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
events = c.receive_data(preamble)
assert not events
assert not c.data_to_send
@pytest.mark.parametrize("chunk_size", range(1, 24))
def test_drip_feed_preamble(self, chunk_size):
c = h2.connection.H2Connection(client_side=False)
preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n'
events = []
for i in range(0, len(preamble), chunk_size):
events += c.receive_data(preamble[i:i+chunk_size])
assert not events
assert not c.data_to_send
def test_no_preamble(self):
c = h2.connection.H2Connection(client_side=False)
encoded_headers_frame = (
b'\x00\x00\r\x01\x04\x00\x00\x00\x01'
b'A\x88/\x91\xd3]\x05\\\x87\xa7\x84\x87\x82'
)
with pytest.raises(h2.exceptions.ProtocolError):
c.receive_data(encoded_headers_frame)
|
Python
| 0
|
@@ -536,32 +536,109 @@
nnection(self):%0A
+ %22%22%22%0A Client connections emit the HTTP/2 preamble.%0A %22%22%22%0A
c = h2.c
@@ -654,32 +654,32 @@
.H2Connection()%0A
-
events =
@@ -836,32 +836,109 @@
_headers(self):%0A
+ %22%22%22%0A Single headers frames are correctly encoded.%0A %22%22%22%0A
c = h2.c
@@ -1338,32 +1338,106 @@
ing_data(self):%0A
+ %22%22%22%0A Single data frames are encoded correctly.%0A %22%22%22%0A
c = h2.c
@@ -2149,32 +2149,128 @@
preamble(self):%0A
+ %22%22%22%0A The preamble does not cause any events or frames to be written.%0A %22%22%22%0A
c = h2.c
@@ -2570,32 +2570,122 @@
f, chunk_size):%0A
+ %22%22%22%0A The preamble can be sent in in less than a single buffer.%0A %22%22%22%0A
c = h2.c
@@ -2997,39 +2997,124 @@
test_no_preamble
-(self):
+_errors(self):%0A %22%22%22%0A Server side connections require the preamble.%0A %22%22%22
%0A c = h2.
|
f7d88f43779f94dc2623e4726bd50f997104865f
|
add compress-the-string
|
contest/pythonist3/compress-the-string/compress-the-string.py
|
contest/pythonist3/compress-the-string/compress-the-string.py
|
Python
| 0.999703
|
@@ -0,0 +1,274 @@
+# -*- coding: utf-8 -*-%0A# @Author: Zeyuan Shang%0A# @Date: 2016-05-13 12:35:11%0A# @Last Modified by: Zeyuan Shang%0A# @Last Modified time: 2016-05-13 12:35:16%0Afrom itertools import groupby%0A%0As = raw_input()%0Afor k, g in groupby(s):%0A print '(%7B%7D, %7B%7D)'.format(len(list(g)), k),
|
|
bf7bfce64b2964cd6adb515788420747fcbedeae
|
Add an app.wsgi just in case
|
app.wsgi
|
app.wsgi
|
Python
| 0
|
@@ -0,0 +1,83 @@
+#!/usr/bin/env python%0A%0Aimport itty%0A%0Aimport leapreader%0A%0A%0Aapp = itty.handle_request%0A%0A
|
|
ada3083c38fe75f139079e93b7c544540fe95e1a
|
add sources/ package
|
sources/__init__.py
|
sources/__init__.py
|
Python
| 0
|
@@ -0,0 +1,124 @@
+import sqlaload as sl%0A%0Afrom lobbyfacts.core import app%0A%0Adef etl_engine():%0A return sl.connect(app.config.get('ETL_URL'))%0A%0A
|
|
1e2b5e699114ba4e54dc77a56858cdcd2f29a87c
|
Add support for subscriptions
|
homeassistant/components/switch/wemo.py
|
homeassistant/components/switch/wemo.py
|
"""
homeassistant.components.switch.wemo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support for WeMo switches.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/switch.wemo/
"""
import logging
from homeassistant.components.switch import SwitchDevice
from homeassistant.const import STATE_ON, STATE_OFF, STATE_STANDBY
REQUIREMENTS = ['pywemo==0.3.3']
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument, too-many-function-args
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Find and return WeMo switches. """
import pywemo
import pywemo.discovery as discovery
if discovery_info is not None:
location = discovery_info[2]
mac = discovery_info[3]
device = discovery.device_from_description(location, mac)
if device:
add_devices_callback([WemoSwitch(device)])
return
_LOGGER.info("Scanning for WeMo devices.")
switches = pywemo.discover_devices()
# Filter out the switches and wrap in WemoSwitch object
add_devices_callback(
[WemoSwitch(switch) for switch in switches
if isinstance(switch, pywemo.Switch)])
class WemoSwitch(SwitchDevice):
""" Represents a WeMo switch. """
def __init__(self, wemo):
self.wemo = wemo
self.insight_params = None
self.maker_params = None
@property
def unique_id(self):
""" Returns the id of this WeMo switch """
return "{}.{}".format(self.__class__, self.wemo.serialnumber)
@property
def name(self):
""" Returns the name of the switch if any. """
return self.wemo.name
@property
def state(self):
""" Returns the state. """
is_on = self.is_on
if not is_on:
return STATE_OFF
elif self.is_standby:
return STATE_STANDBY
return STATE_ON
@property
def current_power_mwh(self):
""" Current power usage in mwh. """
if self.insight_params:
return self.insight_params['currentpower']
@property
def today_power_mw(self):
""" Today total power usage in mw. """
if self.insight_params:
return self.insight_params['todaymw']
@property
def is_standby(self):
""" Is the device on - or in standby. """
if self.insight_params:
standby_state = self.insight_params['state']
# Standby is actually '8' but seems more defensive
# to check for the On and Off states
if standby_state == '1' or standby_state == '0':
return False
else:
return True
@property
def sensor_state(self):
""" Is the sensor on or off. """
if self.maker_params and self.has_sensor:
# Note a state of 1 matches the WeMo app 'not triggered'!
if self.maker_params['sensorstate']:
return STATE_OFF
else:
return STATE_ON
@property
def switch_mode(self):
""" Is the switch configured as toggle(0) or momentary (1). """
if self.maker_params:
return self.maker_params['switchmode']
@property
def has_sensor(self):
""" Is the sensor present? """
if self.maker_params:
return self.maker_params['hassensor']
@property
def is_on(self):
""" True if switch is on. """
return self.wemo.get_state()
def turn_on(self, **kwargs):
""" Turns the switch on. """
self.wemo.on()
def turn_off(self):
""" Turns the switch off. """
self.wemo.off()
def update(self):
""" Update WeMo state. """
try:
self.wemo.get_state(True)
if self.wemo.model_name == 'Insight':
self.insight_params = self.wemo.insight_params
self.insight_params['standby_state'] = (
self.wemo.get_standby_state)
elif self.wemo.model_name == 'Maker':
self.maker_params = self.wemo.maker_params
except AttributeError:
_LOGGER.warning('Could not update status for %s', self.name)
|
Python
| 0
|
@@ -370,16 +370,18 @@
TANDBY%0A%0A
+#
REQUIREM
@@ -405,16 +405,16 @@
0.3.3'%5D%0A
-
_LOGGER
@@ -444,16 +444,51 @@
ame__)%0A%0A
+_WEMO_SUBSCRIPTION_REGISTRY = None%0A
%0A# pylin
@@ -718,16 +718,213 @@
covery%0A%0A
+ global _WEMO_SUBSCRIPTION_REGISTRY%0A if _WEMO_SUBSCRIPTION_REGISTRY is None:%0A _WEMO_SUBSCRIPTION_REGISTRY = pywemo.SubscriptionRegistry()%0A _WEMO_SUBSCRIPTION_REGISTRY.start()%0A%0A
if d
@@ -1613,16 +1613,16 @@
= None%0A
-
@@ -1647,16 +1647,670 @@
= None%0A%0A
+ global _WEMO_SUBSCRIPTION_REGISTRY%0A _WEMO_SUBSCRIPTION_REGISTRY.register(wemo)%0A _WEMO_SUBSCRIPTION_REGISTRY.on(wemo, 'BinaryState', self._update_callback)%0A _WEMO_SUBSCRIPTION_REGISTRY.on(wemo, 'attributeList', self._update_callback)%0A%0A def _update_callback(self, _device, _params):%0A _LOGGER.info('Subscription update for %25s, sevice=%25s params=%25s', self.name, _device, _params)%0A # import pdb; pdb.set_trace()%0A self.update()%0A%0A @property%0A def should_poll(self):%0A %22%22%22 No polling should be needed with subscriptions, but leave in for initial version in case of issues. %22%22%22%0A return True%0A%0A
@pro
|
78a8fef6123b81011b3d896af69470d249570b05
|
Add ls.py
|
kadai3/ls.py
|
kadai3/ls.py
|
Python
| 0.000093
|
@@ -0,0 +1,1804 @@
+# -*- coding: utf-8 -*-%0A%0Aimport sys%0Aimport os%0Aimport time%0Aimport argparse%0Aimport re%0Afrom tarfile import filemode%0Aimport pwd%0Aimport grp%0A%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument(%22path%22, %0A metavar=%22path%22,%0A nargs=%22?%22, %0A default=%22%22,%0A type=str, %0A help=%22expect shown directory%22)%0Aparser.add_argument(%22-a%22,%0A action=%22store_true%22,%0A default=False)%0Aparser.add_argument(%22-i%22,%0A action=%22store_true%22,%0A default=False)%0Aparser.add_argument(%22-l%22,%0A action=%22store_true%22,%0A default=False)%0A%0Aargs = parser.parse_args()%0A%0Apattern = re.compile(r'%5E%5C..*$')%0A%0Atry:%0A dir_list = os.listdir(%22./%25s%22 %25 args.path)%0Aexcept OSError as (errorno, error):%0A sys.exit(%22python %25s: %25s: %25s%22 %25 (sys.argv%5B0%5D, args.path, error))%0A%0Aif not args.a:%0A for src in dir_list%5B:%5D:%0A if pattern.match(src):%0A dir_list.remove(src)%0Aelse:%0A dir_list = %5B%22.%22, %22..%22%5D + dir_list%0A%0Aif args.l:%0A for src in dir_list:%0A stat = os.stat(args.path + src)%0A uname = pwd.getpwuid(stat.st_uid).pw_name%0A gname = grp.getgrgid(stat.st_gid).gr_name%0A mtime = time.strftime(%22%25m %25d %25H:%25M%22, time.localtime(stat.st_mtime))%0A inode = stat.st_ino%0A if args.i:%0A print %22%25s %25s %252s %25s %25s %255d %25s %25s%22 %25 (stat.st_ino, filemode(stat.st_mode), stat.st_nlink, uname, gname, stat.st_size, mtime, src)%0A else:%0A print %22%25s %252s %25s %25s %255d %25s %25s%22 %25 (filemode(stat.st_mode), stat.st_nlink, uname, gname, stat.st_size, mtime, src)%0Aelse:%0A if args.i:%0A print %22%5Ct%22.join(%5B %22%25s %25s%22 %25 (os.stat(args.path + src).st_ino, src) for src in dir_list%5D)%0A else:%0A print %22%5Ct%22.join(dir_list)
|
|
0223ae91b669ce12b16d8b89456f3291eeed441e
|
Add log command.
|
src/commands/log.py
|
src/commands/log.py
|
Python
| 0.000001
|
@@ -0,0 +1,2334 @@
+#%0A# Copyright (c) 2012 Joshua Hughes %3Ckivhift@gmail.com%3E%0A#%0Aimport os%0Aimport subprocess%0Aimport tempfile%0Aimport threading%0A%0Aimport qmk%0Aimport pu.utils%0A%0Aclass LogCommand(qmk.Command):%0A '''Make log entries using restructured text.'''%0A def __init__(self):%0A super(LogCommand, self).__init__(self)%0A self._name = 'log'%0A self._help = self.__doc__%0A%0A self._ui = pu.utils.get_user_info()%0A self._base_dir = os.path.join(qmk.base_dir(), 'logs')%0A if not os.path.exists(self._base_dir):%0A os.mkdir(self._base_dir, 0755)%0A%0A @qmk.capture_and_show_exceptions('log')%0A def _make_entry(self, type_):%0A _, entry_tmp_file = tempfile.mkstemp(%0A prefix = type_, suffix = '.rst', dir = self._base_dir)%0A os.close(_)%0A%0A start_time = pu.utils.ISO_8601_time_stamp()%0A subprocess.call(%5Bself._ui.EDITOR, entry_tmp_file%5D)%0A end_time = pu.utils.ISO_8601_time_stamp()%0A%0A if 0 == os.stat(entry_tmp_file).st_size:%0A os.remove(entry_tmp_file)%0A return%0A%0A entry_dir = os.path.join(self._base_dir, type_)%0A if not os.path.exists(entry_dir):%0A os.mkdir(entry_dir, 0755)%0A%0A st = start_time.split('-')%0A YM = '-'.join(st%5B:2%5D)%0A entry_file = os.path.join(entry_dir, '%25s-%25s.rst' %25 (type_, YM))%0A write_title = (%0A not os.path.exists(entry_file)%0A or 0 == os.stat(entry_file).st_size)%0A with open(entry_file, 'ab') as fout:%0A if write_title:%0A title = '%25s log for %25s' %25 (type_, YM)%0A fout.write('%5Cn'.join((title, '=' * len(title), '', '')))%0A%0A with open(entry_tmp_file, 'rb') as fin:%0A fout.write('%5Cn'.join(%0A (start_time, '-' * len(start_time), '', '')))%0A for ln in fin:%0A fout.write(ln.rstrip() + '%5Cn')%0A fout.write('%5Cn'.join(%0A ('', '.. Editing finished: ' + end_time, '', '')))%0A os.remove(entry_tmp_file)%0A%0A def action(self, arg):%0A if arg is None:%0A subj = 'work'%0A else:%0A subj = arg.strip()%0A%0A # Don't want to block so fire off a thread to do the actual work.%0A threading.Thread(target = self._make_entry, args = (subj,)).start()%0A%0Adef commands(): return %5B LogCommand() %5D%0A
|
|
c4d583966ef1a4d9bdb57715ef5e766ba62fbed6
|
Add tests for the Django directory
|
jacquard/directory/tests/test_django.py
|
jacquard/directory/tests/test_django.py
|
Python
| 0
|
@@ -0,0 +1,1601 @@
+from jacquard.directory.base import UserEntry%0Afrom jacquard.directory.django import DjangoDirectory%0A%0Aimport pytest%0Aimport unittest.mock%0A%0Atry:%0A import sqlalchemy%0Aexcept ImportError:%0A sqlalchemy = None%0A%0A%0Aif sqlalchemy is not None:%0A test_database = sqlalchemy.create_engine('sqlite://')%0A test_database.execute(%22%22%22%0A CREATE TABLE auth_user(%0A id INTEGER NOT NULL PRIMARY KEY,%0A date_joined DATETIME NOT NULL,%0A is_superuser BOOLEAN NOT NULL%0A )%0A %22%22%22)%0A%0A test_database.execute(%22%22%22%0A INSERT INTO auth_user(id, date_joined, is_superuser) VALUES%0A (1, date('now'), 1),%0A (2, date('now'), 0),%0A (3, date('now'), 0)%0A %22%22%22)%0A%0A%0A@pytest.mark.skipif(%0A sqlalchemy is None,%0A reason=%22sqlalchemy not installed%22,%0A)%0A@unittest.mock.patch('sqlalchemy.create_engine', lambda *args: test_database)%0Adef test_get_extant_user():%0A directory = DjangoDirectory('')%0A%0A user_one = directory.lookup('1')%0A%0A assert list(user_one.tags) == %5B'superuser'%5D%0A%0A%0A@pytest.mark.skipif(%0A sqlalchemy is None,%0A reason=%22sqlalchemy not installed%22,%0A)%0A@unittest.mock.patch('sqlalchemy.create_engine', lambda *args: test_database)%0Adef test_get_missing_user():%0A directory = DjangoDirectory('')%0A%0A user_zero = directory.lookup('0')%0A%0A assert user_zero is None%0A%0A%0A@pytest.mark.skipif(%0A sqlalchemy is None,%0A reason=%22sqlalchemy not installed%22,%0A)%0A@unittest.mock.patch('sqlalchemy.create_engine', lambda *args: test_database)%0Adef test_get_all_users():%0A directory = DjangoDirectory('')%0A%0A users = directory.all_users()%0A%0A assert %5Bx.id for x in users%5D == %5B1, 2, 3%5D%0A
|
|
6babb6e64e93ed74a72203fdc67955ae8ca3bfb3
|
Add a baseline set of _MultiCall performance tests
|
testing/benchmark.py
|
testing/benchmark.py
|
Python
| 0
|
@@ -0,0 +1,1155 @@
+%22%22%22%0ABenchmarking and performance tests.%0A%22%22%22%0Aimport pytest%0A%0Afrom pluggy import _MultiCall, HookImpl%0Afrom pluggy import HookspecMarker, HookimplMarker%0A%0A%0Ahookspec = HookspecMarker(%22example%22)%0Ahookimpl = HookimplMarker(%22example%22)%0A%0A%0Adef MC(methods, kwargs, firstresult=False):%0A hookfuncs = %5B%5D%0A for method in methods:%0A f = HookImpl(None, %22%3Ctemp%3E%22, method, method.example_impl)%0A hookfuncs.append(f)%0A return _MultiCall(hookfuncs, kwargs, %7B%22firstresult%22: firstresult%7D)%0A%0A%0A@hookimpl(hookwrapper=True)%0Adef m1(arg1, arg2, arg3):%0A yield%0A%0A%0A@hookimpl%0Adef m2(arg1, arg2, arg3):%0A return arg1, arg2, arg3%0A%0A%0A@hookimpl(hookwrapper=True)%0Adef w1(arg1, arg2, arg3):%0A yield%0A%0A%0A@hookimpl(hookwrapper=True)%0Adef w2(arg1, arg2, arg3):%0A yield%0A%0A%0Adef inner_exec(methods):%0A return MC(methods, %7B'arg1': 1, 'arg2': 2, 'arg3': 3%7D).execute()%0A%0A%0A@pytest.mark.benchmark%0Adef test_hookimpls_speed(benchmark):%0A benchmark(inner_exec, %5Bm1, m2%5D)%0A%0A%0A@pytest.mark.benchmark%0Adef test_hookwrappers_speed(benchmark):%0A benchmark(inner_exec, %5Bw1, w2%5D)%0A%0A%0A@pytest.mark.benchmark%0Adef test_impls_and_wrappers_speed(benchmark):%0A benchmark(inner_exec, %5Bm1, m2, w1, w2%5D)%0A
|
|
21dc462b47f5b5577d51119ddd340c518d8cfb94
|
Add script to rename photos in directory
|
photos.py
|
photos.py
|
Python
| 0
|
@@ -0,0 +1,882 @@
+import os%0Afrom datetime import date%0A%0A# Programs at the Coral Gables Art Cinema.%0Aprograms = %5B'1. Main Features', '2. After Hours', '3. Special Screenings',%0A '4. Family Day on Aragon', '5. National Theatre Live',%0A '6. See It in 70mm', '7. Alternative Content'%5D%0A%0Afor program in programs:%0A print(program)%0A%0Aindex = int(input('Select a program by its number: '))%0Aprogram = programs%5Bindex - 1%5D%5B3:%5D%0A%0A%0Atitle = input('Select a film: ')%0Aphoto_dir = input('Location of the photos: ')%0Anew_name = input('Enter new base file name: ')%0A%0Aroot = 'M:/Coral Gables Art Cinema/Programming/'%0Ayear = str(date.today().year)%0A%0Apath = os.path.join(root, program, year, title, photo_dir)%0A%0Anum_suffix = 1%0Afor photo in os.listdir(path):%0A final_name = '%7B%7D %7B%7D.jpg'.format(new_name, num_suffix)%0A os.rename(os.path.join(path, photo), os.path.join(path, final_name))%0A num_suffix += 1%0A
|
|
f182dae6eb0a17f8b7a437694b69b273595f9549
|
Add YAML export
|
jrnl/plugins/yaml_exporter.py
|
jrnl/plugins/yaml_exporter.py
|
Python
| 0.000001
|
@@ -0,0 +1,2836 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0Afrom __future__ import absolute_import, unicode_literals, print_function%0Afrom .text_exporter import TextExporter%0Aimport re%0Aimport sys%0Aimport yaml%0A%0A%0Aclass MarkdownExporter(TextExporter):%0A %22%22%22This Exporter can convert entries and journals into Markdown with YAML front matter.%22%22%22%0A names = %5B%22yaml%22%5D%0A extension = %22md%22%0A%0A @classmethod%0A def export_entry(cls, entry, to_multifile=True):%0A %22%22%22Returns a markdown representation of a single entry, with YAML front matter.%22%22%22%0A if to_multifile is False:%0A print(%22%7B%7DERROR%7B%7D: YAML export must be to individual files. Please specify a directory to export to.%22.format(%22%5C033%5B31m%22, %22%5C033%5B0m%22, file=sys.stderr))%0A return%0A%0A date_str = entry.date.strftime(entry.journal.config%5B'timeformat'%5D)%0A body_wrapper = %22%5Cn%22 if entry.body else %22%22%0A body = body_wrapper + entry.body%0A%0A '''Increase heading levels in body text'''%0A newbody = ''%0A heading = '###'%0A previous_line = ''%0A warn_on_heading_level = False%0A for line in entry.body.splitlines(True):%0A if re.match(r%22#+ %22, line):%0A %22%22%22ATX style headings%22%22%22%0A newbody = newbody + previous_line + heading + line%0A if re.match(r%22#######+ %22, heading + line):%0A warn_on_heading_level = True%0A line = ''%0A elif re.match(r%22=+$%22, line) and not re.match(r%22%5E$%22, previous_line):%0A %22%22%22Setext style H1%22%22%22%0A newbody = newbody + heading + %22# %22 + previous_line%0A line = ''%0A elif re.match(r%22-+$%22, line) and not re.match(r%22%5E$%22, previous_line):%0A %22%22%22Setext style H2%22%22%22%0A newbody = newbody + heading + %22## %22 + previous_line%0A line = ''%0A else:%0A newbody = newbody + previous_line%0A previous_line = line%0A newbody = newbody + previous_line # add very last line%0A%0A if warn_on_heading_level is True:%0A print(%22%7B%7DWARNING%7B%7D: Headings increased past H6 on export - %7B%7D %7B%7D%22.format(%22%5C033%5B33m%22, %22%5C033%5B0m%22, date_str, entry.title), file=sys.stderr)%0A%0A # top = yaml.dump(entry)%0A%0A return %22title: %7Btitle%7D%5Cndate: %7Bdate%7D%5Cnstared: %7Bstared%7D%5Cntags: %7Btags%7D%5Cn%7Bbody%7D %7Bspace%7D%22.format(%0A date=date_str,%0A title=entry.title,%0A stared=entry.starred,%0A tags=', '.join(%5Btag%5B1:%5D for tag in entry.tags%5D),%0A body=newbody,%0A space=%22%22%0A )%0A%0A @classmethod%0A def export_journal(cls, journal):%0A %22%22%22Returns an error, as YAML export requires a directory as a target.%22%22%22%0A print(%22%7B%7DERROR%7B%7D: YAML export must be to individual files. Please specify a directory to export to.%22.format(%22%5C033%5B31m%22, %22%5C033%5B0m%22, file=sys.stderr))%0A return%0A
|
|
41bd33421f14498737aa0088f2d93b00bb521d7b
|
implement a viewset controller, capable of containing controllers
|
julesTk/controller/viewset.py
|
julesTk/controller/viewset.py
|
Python
| 0
|
@@ -0,0 +1,1838 @@
+%0Afrom . import ViewController%0A%0A%0Aclass ViewSetController(ViewController):%0A%0A def __init__(self, parent, view=None):%0A super(ViewSetController, self).__init__(parent, view)%0A self._controllers = %7B%7D%0A%0A @property%0A def controllers(self):%0A %22%22%22 Dictionary with all controllers used in this viewset%0A%0A :return:%0A :rtype: dict%5Bstr, julesTk.controller.BaseController%5D%0A %22%22%22%0A return self._controllers%0A%0A def has_controller(self, name):%0A %22%22%22Whether a controller is registered to this controller using the given name%22%22%22%0A return name in self.controllers.keys()%0A%0A def get_controller(self, name):%0A %22%22%22Return the controller registered under the given name%22%22%22%0A if not self.has_controller(name):%0A raise KeyError(%22No controller registered using the name: %7B%7D%22.format(name))%0A return self.controllers%5Bname%5D%0A%0A def add_controller(self, name, c):%0A %22%22%22Register a controller under a new name%22%22%22%0A if self.has_controller(name):%0A raise KeyError(%22Another controller is already registered under: %7B%7D%22.format(name))%0A self.controllers%5Bname%5D = c%0A%0A def remove_controller(self, name):%0A %22%22%22Remove controller and name from the registry%22%22%22%0A if not self.has_controller(name):%0A raise KeyError(%22No controller registered using the name: %7B%7D%22.format(name))%0A return self.controllers.pop(name)%0A%0A def remove_controllers(self):%0A %22%22%22Remove all controllers from the registry%0A%0A And tell them to stop%0A %22%22%22%0A while len(self.controllers.keys()) %3E 0:%0A key = self.controllers.keys()%5B0%5D%0A self.remove_controller(key).stop()%0A return len(self.controllers.keys()) == 0%0A%0A def _stop(self):%0A self.remove_controllers()%0A super(ViewSetController, self)._stop()%0A
|
|
8c5fb07b37eebf484c33ca735bd2b9dac5d0dede
|
solve 1 problem
|
solutions/nested-list-weight-sum.py
|
solutions/nested-list-weight-sum.py
|
Python
| 0.000027
|
@@ -0,0 +1,1517 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0A%22%22%22%0Anested-list-weight-sum.py%0A %0ACreated by Shuailong on 2016-03-30.%0A%0Ahttps://leetcode.com/problems/nested-list-weight-sum/.%0A%0A%22%22%22%0A%0A# %22%22%22%0A# This is the interface that allows for creating nested lists.%0A# You should not implement it, or speculate about its implementation%0A# %22%22%22%0A# class NestedInteger(object):%0A%0A# def isInteger(self):%0A# %22%22%22%0A# @return True if this NestedInteger holds a single integer, rather than a nested list.%0A# :rtype bool%0A# %22%22%22%0A%0A%0A# def getInteger(self):%0A# %22%22%22%0A# @return the single integer that this NestedInteger holds, if it holds a single integer%0A# Return None if this NestedInteger holds a nested list%0A# :rtype int%0A# %22%22%22%0A%0A# def getList(self):%0A# %22%22%22%0A# @return the nested list that this NestedInteger holds, if it holds a nested list%0A# Return None if this NestedInteger holds a single integer%0A# :rtype List%5BNestedInteger%5D%0A# %22%22%22%0A%0Aclass Solution(object):%0A def depthSum1(self, nestedList, depth):%0A s = 0%0A for nl in nestedList:%0A if nl.isInteger():%0A s += depth * nl.getInteger()%0A else:%0A s += self.depthSum1(nl.getList(), depth+1)%0A return s%0A%0A def depthSum(self, nestedList):%0A %22%22%22%0A :type nestedList: List%5BNestedInteger%5D%0A :rtype: int%0A %22%22%22%0A return self.depthSum1(nestedList, 1)%0A%0A%0Adef main():%0A pass%0A %0Aif __name__ == '__main__':%0A main()%0A%0A
|
|
0738b3816db752b8cb678324ff4c113625660b94
|
add test for pathops.operations.intersection
|
tests/operations_test.py
|
tests/operations_test.py
|
Python
| 0.000025
|
@@ -0,0 +1,1306 @@
+from pathops import Path, PathVerb%0Afrom pathops.operations import union, difference, intersection, reverse_difference, xor%0Aimport pytest%0A%0A%0A@pytest.mark.parametrize(%0A %22subject_path, clip_path, expected%22,%0A %5B%0A %5B%0A %5B%0A (PathVerb.MOVE, ((0, 0),)),%0A (PathVerb.LINE, ((0, 10),)),%0A (PathVerb.LINE, ((10, 10),)),%0A (PathVerb.LINE, ((10, 0),)),%0A (PathVerb.CLOSE, ()),%0A %5D,%0A %5B%0A (PathVerb.MOVE, ((5, 5),)),%0A (PathVerb.LINE, ((5, 15),)),%0A (PathVerb.LINE, ((15, 15),)),%0A (PathVerb.LINE, ((15, 5),)),%0A (PathVerb.CLOSE, ()),%0A %5D,%0A %5B%0A (PathVerb.MOVE, ((5, 5),)),%0A (PathVerb.LINE, ((10, 5),)),%0A (PathVerb.LINE, ((10, 10),)),%0A (PathVerb.LINE, ((5, 10),)),%0A (PathVerb.CLOSE, ()),%0A %5D,%0A %5D%0A %5D,%0A)%0Adef test_intersection(subject_path, clip_path, expected):%0A sub = Path()%0A for verb, pts in subject_path:%0A sub.add(verb, *pts)%0A clip = Path()%0A for verb, pts in clip_path:%0A clip.add(verb, *pts)%0A result = Path()%0A%0A intersection(%5Bsub%5D, %5Bclip%5D, result.getPen())%0A%0A assert list(result) == expected%0A
|
|
f3c11599ef1714f7337191719172614c43b87eff
|
Add tests.test_OrderedSet.
|
tests/test_OrderedSet.py
|
tests/test_OrderedSet.py
|
Python
| 0
|
@@ -0,0 +1,555 @@
+from twisted.trial import unittest%0A%0Afrom better_od import OrderedSet%0A%0A%0Aclass TestOrderedDict(unittest.TestCase):%0A def setUp(self):%0A self.values = 'abcddefg'%0A self.s = OrderedSet(self.values)%0A%0A def test_order(self):%0A expected = list(enumerate('abcdefg'))%0A self.assertEquals(list(enumerate(self.s)), expected)%0A%0A def test_index(self):%0A self.assertEquals(self.s.key_index('c'), 2)%0A%0A def test_add_new_value(self):%0A prev = len(self.s)%0A self.s.add('z')%0A self.assertEqual(len(self.s), prev + 1)%0A
|
|
7df6189dbfd69c881fedf71676dd4fdbc7dba2f0
|
Add test for renormalize migration
|
tests/test_migrations.py
|
tests/test_migrations.py
|
import copy
import pytest
import mock
import scrapi
from scrapi.linter.document import NormalizedDocument
from scrapi import tasks
from scrapi import registry
from scrapi.migrations import delete
from scrapi.migrations import rename
# Need to force cassandra to ignore set keyspace
from scrapi.processing.cassandra import CassandraProcessor, DocumentModel
from . import utils
test_cass = CassandraProcessor()
harvester = utils.TestHarvester()
NORMALIZED = NormalizedDocument(utils.RECORD)
RAW = harvester.harvest()[0]
@pytest.fixture
def harvester():
pass # Need to override this
@pytest.mark.cassandra
def test_rename():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
old_source = NORMALIZED['shareProperties']['source']
assert(queryset[0].source == utils.RECORD['shareProperties']['source'])
assert(queryset[0].source == old_source)
new_record = copy.deepcopy(utils.RECORD)
new_record['shareProperties']['source'] = 'wwe_news'
test_info = registry['test'].__class__()
test_info.short_name = 'wwe_news'
registry['wwe_news'] = test_info
tasks.migrate(rename, source=old_source, target='wwe_news')
queryset = DocumentModel.objects(docID=RAW['docID'], source='wwe_news')
assert(queryset[0].source == 'wwe_news')
assert(len(queryset) == 1)
scrapi.processing.elasticsearch.es = real_es
@pytest.mark.cassandra
def test_delete():
real_es = scrapi.processing.elasticsearch.es
scrapi.processing.elasticsearch.es = mock.MagicMock()
test_cass.process_raw(RAW)
test_cass.process_normalized(RAW, NORMALIZED)
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
assert(len(queryset) == 1)
tasks.migrate(delete, source=RAW['source'])
queryset = DocumentModel.objects(docID=RAW['docID'], source=RAW['source'])
assert(len(queryset) == 0)
scrapi.processing.elasticsearch.es = real_es
|
Python
| 0.000001
|
@@ -228,16 +228,58 @@
t rename
+%0Afrom scrapi.migrations import renormalize
%0A%0A# Need
@@ -2145,28 +2145,589 @@
.elasticsearch.es = real_es%0A
+%0A%0A@pytest.mark.cassandra%0Adef test_renormalize():%0A real_es = scrapi.processing.elasticsearch.es%0A scrapi.processing.elasticsearch.es = mock.MagicMock()%0A test_cass.process_raw(RAW)%0A test_cass.process_normalized(RAW, NORMALIZED)%0A%0A queryset = DocumentModel.objects(docID=RAW%5B'docID'%5D, source=RAW%5B'source'%5D)%0A assert(len(queryset) == 1)%0A%0A tasks.migrate(renormalize, source=RAW%5B'source'%5D)%0A queryset = DocumentModel.objects(docID=RAW%5B'docID'%5D, source=RAW%5B'source'%5D)%0A assert(len(queryset) == 1)%0A scrapi.processing.elasticsearch.es = real_es%0A
|
85fc51ef3d75d2f78e80b346897d22bebf797424
|
add mf_helpers
|
mf2py/mf_helpers.py
|
mf2py/mf_helpers.py
|
Python
| 0.000001
|
@@ -0,0 +1,399 @@
+def get_url(mf):%0A %22%22%22parses the mf dictionary obtained as returns the URL%22%22%22%0A%0A urls = %5B%5D%0A for item in mf:%0A if isinstance(item, basestring):%0A urls.append(item)%0A else:%0A itemtype = %5Bx for x in item.get('type',%5B%5D) if x.startswith('h-')%5D%0A if itemtype is not %5B%5D:%0A urls.extend(item.get('properties',%7B%7D).get('url',%5B%5D))%0A%0A return urls%0A
|
|
1831dbd065a8776a77d18e10b44f84c99bca4c75
|
Add test of simple textcat workflow
|
spacy/tests/textcat/test_textcat.py
|
spacy/tests/textcat/test_textcat.py
|
Python
| 0.000001
|
@@ -0,0 +1,553 @@
+from __future__ import unicode_literals%0Afrom ...language import Language%0A%0Adef test_simple_train():%0A nlp = Language()%0A%0A nlp.add_pipe(nlp.create_pipe('textcat'))%0A nlp.get_pipe('textcat').add_label('is_good')%0A%0A nlp.begin_training()%0A%0A for i in range(5):%0A for text, answer in %5B('aaaa', 1.), ('bbbb', 0), ('aa', 1.),%0A ('bbbbbbbbb', 0.), ('aaaaaa', 1)%5D:%0A nlp.update(%5Btext%5D, %5B%7B'cats': %7B'answer': answer%7D%7D%5D)%0A doc = nlp(u'aaa')%0A assert 'is_good' in doc.cats%0A assert doc.cats%5B'is_good'%5D %3E= 0.5%0A%0A
|
|
47af5fc466936f46e05f4ebaf89257e5c731a38e
|
add test_handle_conversation_after_delete
|
plugin/test/test_handle_conversation_after_delete.py
|
plugin/test/test_handle_conversation_after_delete.py
|
Python
| 0.000008
|
@@ -0,0 +1,676 @@
+import unittest%0Aimport copy%0Afrom unittest.mock import Mock%0A%0Aimport chat_plugin%0Afrom chat_plugin import handle_conversation_after_delete%0A%0Aclass TestHandleConversationAfterDelete(unittest.TestCase):%0A%0A def setUp(self):%0A self.conn = None%0A self.mock_publish_event = Mock()%0A chat_plugin._publish_event = self.mock_publish_event%0A%0A def record(self):%0A return %7B%0A 'participant_ids': %5B'user1', 'user2', 'user3'%5D,%0A 'admin_ids': %5B'user1'%5D%0A %7D%0A%0A def test_publish_event_count_should_be_three(self):%0A handle_conversation_after_delete(self.record(), self.conn)%0A self.assertIs(self.mock_publish_event.call_count, 3)%0A
|
|
7516f369be3723520def3a9141facc6783d3a887
|
remove 4handler
|
githubapp.py
|
githubapp.py
|
import os
import base64
from flask import Flask , request, render_template
import nbconvert.nbconvert as nbconvert
import requests
from nbformat import current as nbformat
from flask import Flask, redirect, abort
import re
import github as gh
from gist import render_content
app = Flask(__name__)
github = gh.Github()
@app.route('/')
def render_url():
return 'you are at root'
@app.route('/<user>/')
def user(user):
return github.get_user(user).name
@app.route('/<user>/<repo>/')
def repo(user,repo):
return redirect('/%(user)s/%(repo)s/tree/master/'%{'user':user, 'repo':repo})
@app.route('/<user>/<repo>/<tree>/<branch>/')
def dummy1(user,repo,tree,branch):
if user == 'static':
return open('static/%s/%s/%s'%(repo,tree,branch)).read()
return file(user,repo,tree,branch,None)
@app.route('/<user>/<repo>/<tree>/<branch>/<path:subfile>')
def file(user,repo,tree,branch, subfile):
#we don't care about tree or branch now...
#convert names to objects
user = github.get_user(user)
repo = user.get_repo(repo)
master = branch if branch else repo.master_branch
branch = [b for b in repo.get_branches() if b.name == master][0]
if subfile:
atroot = False
e = rwt(repo, branch.commit.sha, subfile.strip('/').split('/'))
else :
atroot = True
e = repo.get_git_tree(branch.commit.sha);
if hasattr(e,'type') and e.type == 'blob' :
f = repo.get_git_blob(e.sha)
return render_content(base64.decodestring(f.content))
else :
entries = []
for en in e.tree:
var = {}
var['path'] = en.path
var['url'] = relative_url_for_tree(en)
var['type'] = type_for_tree(en)
entries.append(var)
return render_template('treelist.html', entries=entries, atroot=atroot)
def relative_url_for_tree(obj):
if hasattr(obj, 'type') and obj.type == 'blob' :
return obj.path
else :
return obj.path+'/'
def type_for_tree(obj):
if hasattr(obj, 'type') and obj.type == 'blob' :
return 'blob'
else :
return 'tree'
#recursively walk tree....
def rwt(repo,sha,path):
tree = repo.get_git_tree(sha)
if len(path)==0:
return tree
subpath = path[1:]
key = path[0]
nodes = tree.tree
for n in nodes :
if n.path == key:
if n.type == 'tree':
return rwt(repo, n.sha, subpath)
else :
return n
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT', 5000))
debug = os.path.exists('.debug')
if debug :
print 'DEBUG MODE IS ACTIVATED !!!'
else :
print 'debug is not activated'
app.run(host='0.0.0.0', port=port, debug=debug)
|
Python
| 0.000006
|
@@ -589,16 +589,17 @@
repo%7D)%0A%0A
+#
@app.rou
@@ -632,24 +632,25 @@
%3Cbranch%3E/')%0A
+#
def dummy1(u
@@ -668,24 +668,25 @@
ee,branch):%0A
+#
if user
@@ -698,16 +698,17 @@
tatic':%0A
+#
@@ -764,16 +764,17 @@
.read()%0A
+#
retu
|
a4620f5371cea0a90360c6968c7ecbe426e9e1f4
|
Create genomic_range_query.py
|
codility/genomic_range_query.py
|
codility/genomic_range_query.py
|
Python
| 0.000167
|
@@ -0,0 +1,1341 @@
+%22%22%22%0Ahttps://codility.com/programmers/task/genomic_range_query/%0A%22%22%22%0A%0A%0Afrom collections import Counter%0A%0A%0Adef solution(S, P, Q):%0A # Instead of counters, could've also used four prefix-sum and four suffix-sum%0A # arrays. E.g., %60pref_1%60 would just do a prefix sum across S, summing up%0A # only the ones; %60pref_2%60 would sum up only the twos; etc.%0A values = %7B'A': 1, 'C': 2, 'G': 3, 'T': 4%7D%0A S = tuple(values%5Bchar%5D for char in S)%0A total_counts = Counter(S)%0A pref = prefix_counts(S)%0A suff = suffix_counts(S)%0A%0A def _min_impact_factor(p, q):%0A slice_counts = %7Bval: (count - pref%5Bp%5D%5Bval%5D - suff%5Bq%5D%5Bval%5D) %0A for val,count in total_counts.iteritems()%7D%0A return next(v for v in (1, 2, 3, 4) if v in slice_counts and slice_counts%5Bv%5D %3E 0)%0A%0A return %5B_min_impact_factor(p, q) for p,q in zip(P, Q)%5D%0A%0A%0Adef prefix_counts(A):%0A result = %5BNone%5D * len(A)%0A result%5B0%5D = %7Bval: 0 for val in (1, 2, 3, 4)%7D%0A%0A for i in xrange(1, len(A)):%0A counts = result%5Bi-1%5D.copy()%0A counts%5BA%5Bi-1%5D%5D += 1%0A result%5Bi%5D = counts%0A%0A return result%0A%0A%0Adef suffix_counts(A):%0A result = %5BNone%5D * len(A)%0A result%5B-1%5D = %7Bval: 0 for val in (1, 2, 3, 4)%7D%0A%0A for i in xrange(len(A)-2, -1, -1):%0A counts = result%5Bi+1%5D.copy()%0A counts%5BA%5Bi+1%5D%5D += 1%0A result%5Bi%5D = counts%0A%0A return result%0A
|
|
dfd92f2062f5f8c1f8a483f7af1dc6877502c1b2
|
add myself as author
|
build.py
|
build.py
|
#!/usr/bin/env python
#
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.insert(0, 'src/main/python') # This is only necessary in PyBuilder sources for bootstrap
from pybuilder import bootstrap
from pybuilder.core import Author, init, use_bldsup, use_plugin
bootstrap()
use_plugin("python.core")
use_plugin("python.pytddmon")
use_plugin("python.distutils")
use_plugin("python.install_dependencies")
use_plugin("copy_resources")
use_plugin("filter_resources")
use_plugin("source_distribution")
use_plugin("python.unittest")
use_plugin("python.cram")
use_plugin("python.integrationtest")
use_plugin("python.coverage")
use_plugin("python.flake8")
use_plugin("python.sphinx")
use_plugin('filter_resources')
if not sys.version_info[0] == 3:
use_plugin("python.snakefood")
use_plugin("python.pydev")
use_plugin("python.pycharm")
use_plugin("python.pytddmon")
use_bldsup()
use_plugin("pdoc")
name = "PyBuilder"
summary = "An extensible, easy to use continuous build tool for Python"
description = """PyBuilder is a build automation tool for python.
PyBuilder is a software build tool written in pure Python which mainly targets Python applications.
It is based on the concept of dependency based programming but also comes along with powerful plugin mechanism that
allows the construction of build life cycles similar to those known from other famous build tools like Apache Maven.
"""
authors = [Author("Alexander Metzner", "alexander.metzner@gmail.com"),
Author("Maximilien Riehl", "max@riehl.io"),
Author("Michael Gruber", "aelgru@gmail.com"),
Author("Udo Juettner", "udo.juettner@gmail.com"),
Author("Marcel Wolf", "marcel.wolf@me.com"),
Author("Arcadiy Ivanov", "arcadiy@ivanov.biz"),
]
url = "http://pybuilder.github.io"
license = "Apache License"
version = "0.11.4.dev"
default_task = ["analyze", "publish"]
@init
def initialize(project):
project.build_depends_on("fluentmock")
project.build_depends_on("mock")
project.build_depends_on("mockito-without-hardcoded-distribute-version")
project.build_depends_on("pyfix") # required test framework
project.build_depends_on("pyassert")
project.build_depends_on("pygments")
if sys.version_info[0:2] == (2, 6):
project.build_depends_on("importlib") # for fluentmock
project.depends_on("tblib")
project.depends_on("pip", ">=7.0")
project.depends_on("wheel")
project.set_property("verbose", True)
project.set_property("coverage_break_build", False)
project.set_property("coverage_reset_modules", True)
project.get_property("coverage_exceptions").append("pybuilder.cli")
project.get_property("coverage_exceptions").append("pybuilder.plugins.core_plugin")
# Issue #284
project.set_property("integrationtest_inherit_environment", True)
project.set_property('flake8_break_build', True)
project.set_property('flake8_include_test_sources', True)
project.set_property('flake8_include_scripts', True)
project.set_property('flake8_max_line_length', 130)
project.set_property('frosted_include_test_sources', True)
project.set_property('frosted_include_scripts', True)
project.set_property("copy_resources_target", "$dir_dist")
project.get_property("copy_resources_glob").append("LICENSE")
project.get_property("filter_resources_glob").append("**/pybuilder/__init__.py")
project.set_property("sphinx_doc_author", "PyBuilder Team")
project.set_property("sphinx_doc_builder", "html")
project.set_property("sphinx_project_name", project.name)
project.set_property("sphinx_project_version", project.version)
project.get_property("source_dist_ignore_patterns").append(".project")
project.get_property("source_dist_ignore_patterns").append(".pydevproject")
project.get_property("source_dist_ignore_patterns").append(".settings")
# enable this to build a bdist on vagrant
# project.set_property("distutils_issue8876_workaround_enabled", True)
project.set_property("distutils_console_scripts", ["pyb_ = pybuilder.cli:main"])
project.set_property("distutils_classifiers", [
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing'])
|
Python
| 0.000538
|
@@ -2385,24 +2385,84 @@
anov.biz%22),%0A
+ Author(%22Valentin Haenel%22, %22valentin@haenel.co%22),%0A
%5D
|
1cb55aa6b3abd4a3a20ff0f37b6c80c0c89ef1ff
|
Add a dummy pavement file.
|
tools/win32/build_scripts/pavement.py
|
tools/win32/build_scripts/pavement.py
|
Python
| 0
|
@@ -0,0 +1,137 @@
+options(%0A setup=Bunch(%0A name = %22scipy-superpack%22,%0A )%0A)%0A%0A@task%0Adef setup():%0A print %22Setting up package %25s%22 %25 options.name%0A
|
|
cef74f6d84f1d7fec54fd9a314888e7d0e84ac3f
|
Create telnet-cmdrunner.py
|
telnet-cmdrunner.py
|
telnet-cmdrunner.py
|
Python
| 0.000006
|
@@ -0,0 +1,1837 @@
+#!/usr/bin/python%0A%0A%0Afrom __future__ import absolute_import, division, print_function%0A%0Aimport netmiko%0Aimport json%0Aimport tools%0Aimport sys ### Capture and handle signals past from the Operating System.%0Aimport signal%0A%0A%0Asignal.signal(signal.SIGPIPE, signal.SIG_DFL) ### IOERror: Broken pipe%0Asignal.signal(signal.SIGINT, signal.SIG_DFL) ### KeyboardInterrupt: Ctrl-C%0A%0A%0A### If authentication fails, the script will continue to run.%0A### If connection times out, the script will continue to run.%0Anetmiko_exceptions = (netmiko.ssh_exception.NetMikoTimeoutException,%0A netmiko.ssh_exception.NetMikoAuthenticationException)%0A%0A%0Ausername, password = tools.get_credentials()%0A%0A%0Awith open('cisco_ios_telnet_devices.json') as dev_file:%0A cisco_ios_telnet_devices = json.load(dev_file)%0A%0A%0Adomain_name = %5B 'ip domain-name a-corp.com'%5D%0A%0Acrypto_key_gen = %5B 'crypto key generate rsa label SSH mod 2048'%5D%0A%0Assh_commands = %5B 'ip ssh rsa keypair-name SSH',%0A 'ip ssh version 2',%5D%0A%0A%0Afor device in cisco_ios_telnet_devices:%0A device%5B'username'%5D = username%0A device%5B'password'%5D = password%0A try:%0A print()%0A print('='*79)%0A print('Connecting to device:', device%5B'ip'%5D)%0A print('-'*79)%0A ### Establish session to each device in %22cisco_ios_telnet_devices.json%22%0A ### ** is used to unpack the dictonary for Netmiko%0A connection = netmiko.ConnectHandler(**device)%0A%0A print(connection.send_config_set(domain_name))%0A print('-'*79)%0A print(connection.send_config_set(crypto_key_gen, delay_factor=10))%0A print('-'*79)%0A print(connection.send_config_set(ssh_commands))%0A print('-'*79)%0A%0A ### Disconnect sessions.%0A connection.disconnect()%0A%0A except netmiko_exceptions as e:%0A print('Failed to:', device%5B'ip'%5D)%0A print(e)%0A
|
|
ecb6390c800260cedddba655f253a8307e096d76
|
Create setup.py
|
setup.py
|
setup.py
|
Python
| 0.001383
|
@@ -0,0 +1,260 @@
+from distutils.core import setup%0A%0Asetup(name='atmPy',%0A version='0.1',%0A description='Python Distribution Utilities',%0A author='Hagen Telg and Matt Richardson',%0A author_email='matt.richardson@msrconsults.com',%0A packages=%5B'atmPy'%5D,%0A )%0A
|
|
93cb8184fe5fdbf294c1e8f36b45ed8b514b2ce5
|
Allow setup file to enable pip installation
|
setup.py
|
setup.py
|
Python
| 0
|
@@ -0,0 +1,97 @@
+from distutils.core import setup%0A%0Asetup(%0A name='multiworld',%0A packages=('multiworld', ),%0A)%0A
|
|
dad2024344f581aa042f767e4aa473d50a8f78bc
|
Create individual_dist_label.py
|
sandbox/individual_distance/individual_dist_label.py
|
sandbox/individual_distance/individual_dist_label.py
|
Python
| 0.000005
|
@@ -0,0 +1,2158 @@
+#!/usr/bin/python%0A%0Aimport os, numpy as np, scipy as sp, nibabel.freesurfer as fs%0Afrom sklearn.utils.arpack import eigsh %0A%0A# Set defaults:%0Abase_dir = '/scr/liberia1/LEMON_LSD/LSD_rest_surf'%0Aoutput_base_dir = '/scr/liberia1'%0Asubjects = %5B26410%5D%0A %0Afor subject in subjects:%0A for hemi in %5B'lh', 'rh'%5D:%0A%0A%09%09# read in cortical mask%0A%09%09cort = # nodes%0A%0A%09%09dataCorr = # load conn mat and mask out only cortex%0A%09%09fullsize = # length of full cortex%0A%0A embedding = DoFiedler(dataCorr%5Bcort,cort%5D) # see below for details%0A del dataCorr%0A # reinsert zeros:%0A fiedler = np.zeros(fullsize)%0A fiedler%5Bcort%5D = embedding%5B1%5D # check if this reads the first eigenvector correctly%0A %0A # read in distance matrix %0A distmat = # read in%0A # get labels from freesurfer for anatomy%0A fs_labels = # read in%0A label_parietal = fs_labels == # XXX # grab parietal mask%0A for i in %5Blabel1, label2, etc%5D:%0A label_dist = np.mean(distmat(fs_labels == i))%0A # mask fiedler by parietal to get peak of DMN in parietal%0A masked_fiedler = fiedler * label_parietal%0A if masked_fiedler %3E mean(fiedler):%0A anat_dist = label_dist(max(masked_fiedler)) # does that compute elementwise product?%0A else:%0A anat_dist = label_dist(min(masked_fiedler)) # does that compute elementwise product?%0A %0A # save out anat_dist for subject / hemi / anat label%0A # also create images for quality control: fiedler, masked_fiedler%0A%0A%0Adef DoFiedler(conn):%0A # prep for embedding%0A K = (conn + 1) / 2. %0A v = np.sqrt(np.sum(K, axis=1)) %0A A = K/(v%5B:, None%5D * v%5BNone, :%5D) %0A del K%0A A = np.squeeze(A * %5BA %3E 0%5D)%0A%0A # diffusion embedding%0A n_components_embedding = 2%0A lambdas, vectors = eigsh(A, k=n_components_embedding+1) %0A del A%0A lambdas = lambdas%5B::-1%5D %0A vectors = vectors%5B:, ::-1%5D %0A psi = vectors/vectors%5B:, 0%5D%5B:, None%5D %0A lambdas = lambdas%5B1:%5D / (1 - lambdas%5B1:%5D) %0A embedding = psi%5B:, 1:(n_components_embedding + 1 + 1)%5D * lambdas%5B:n_components_embedding+1%5D%5BNone, :%5D %0A%0A return embedding%0A %0A
|
|
a2865b712d0a28e3a0b8943f67703a77b5d90894
|
Add a stub for testing _utils
|
tests/test__utils.py
|
tests/test__utils.py
|
Python
| 0
|
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-%0A
|
|
9a3a619791d34847e07c7dc7b952863d2a6d30c7
|
Add simple test for monthly ghistory
|
tests/test_splits.py
|
tests/test_splits.py
|
import re
from tests.base import IntegrationTest
from tasklib.task import local_zone
from datetime import datetime
class TestBurndownDailySimple(IntegrationTest):
def execute(self):
self.command("TaskWikiBurndownDaily")
assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.daily")
assert "Daily Burndown" in self.read_buffer()[0]
class TestBurndownMonthlySimple(IntegrationTest):
def execute(self):
self.command("TaskWikiBurndownMonthly")
assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.monthly")
assert "Monthly Burndown" in self.read_buffer()[0]
class TestBurndownWeeklySimple(IntegrationTest):
def execute(self):
self.command("TaskWikiBurndownWeekly")
assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer burndown.weekly")
assert "Weekly Burndown" in self.read_buffer()[0]
class TestCalendarSimple(IntegrationTest):
def execute(self):
self.command("TaskWikiCalendar")
assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer calendar")
# Assert each day is displayed at least once.
output = self.read_buffer()
for day in map(str, range(1, 29)):
assert any(day in line for line in output)
class TestGhistorySimple(IntegrationTest):
tasks = [
dict(description="test task"),
dict(description="completed task 1", status="completed", end="now"),
dict(description="completed task 2", status="completed", end="now"),
dict(description="deleted task", status="deleted"),
]
def execute(self):
self.command("TaskWikiGhistoryAnnual")
assert self.command(":py print vim.current.buffer", silent=False).startswith("<buffer ghistory.annual")
output = self.read_buffer()
header_words = ("Year", "Number", "Added", "Completed", "Deleted")
for word in header_words:
assert word in output[0]
legend_words = ("Legend", "Added", "Completed", "Deleted")
for word in legend_words:
assert re.search(word, output[-1], re.IGNORECASE)
current_year = local_zone.localize(datetime.now()).year
assert str(current_year) in '\n'.join(output)
|
Python
| 0
|
@@ -1409,16 +1409,22 @@
Ghistory
+Annual
Simple(I
@@ -2355,16 +2355,1326 @@
n'.join(output)%0A
+%0A%0Aclass TestGhistoryMonthlySimple(IntegrationTest):%0A%0A tasks = %5B%0A dict(description=%22test task%22),%0A dict(description=%22completed task 1%22, status=%22completed%22, end=%22now%22),%0A dict(description=%22completed task 2%22, status=%22completed%22, end=%22now%22),%0A dict(description=%22deleted task%22, status=%22deleted%22),%0A %5D%0A%0A def execute(self):%0A self.command(%22TaskWikiGhistoryMonthly%22)%0A assert self.command(%22:py print vim.current.buffer%22, silent=False).startswith(%22%3Cbuffer ghistory.monthly%22)%0A output = self.read_buffer()%0A%0A header_words = (%22Year%22, %22Month%22, %22Number%22, %22Added%22, %22Completed%22, %22Deleted%22)%0A for word in header_words:%0A assert word in output%5B0%5D%0A%0A legend_words = (%22Legend%22, %22Added%22, %22Completed%22, %22Deleted%22)%0A for word in legend_words:%0A assert re.search(word, output%5B-1%5D, re.IGNORECASE)%0A%0A current_year = local_zone.localize(datetime.now()).year%0A current_month_number = local_zone.localize(datetime.now()).month%0A months = %5B%22January%22, %22February%22, %22March%22, %22April%22,%0A %22May%22, %22June%22, %22July%22, %22August%22,%0A %22September%22, %22October%22, %22November%22, %22December%22%5D%0A%0A assert str(current_year) in '%5Cn'.join(output)%0A assert str(months%5Bcurrent_month_number - 1%5D) in '%5Cn'.join(output)%0A
|
a986397ca1bdc3bdc8894fab8b336803c172b295
|
add settings file for staging (has a database url but no Sentry)
|
txlege84/txlege84/settings/staging.py
|
txlege84/txlege84/settings/staging.py
|
Python
| 0
|
@@ -0,0 +1,1655 @@
+#######################%0A# PRODUCTION SETTINGS #%0A#######################%0A%0Aimport dj_database_url%0A%0Afrom .base import *%0A%0ALOGGING = %7B%0A 'version': 1,%0A 'handlers': %7B%0A 'console':%7B%0A 'level':'DEBUG',%0A 'class':'logging.StreamHandler',%0A %7D,%0A %7D,%0A 'loggers': %7B%0A 'django.request': %7B%0A 'handlers':%5B'console'%5D,%0A 'propagate': True,%0A 'level':'DEBUG',%0A %7D%0A %7D,%0A%7D%0A%0A%0A######################%0A# HOST CONFIGURATION #%0A######################%0A%0A# https://docs.djangoproject.com/en/1.7/ref/settings/#allowed-hosts%0A# https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production%0AALLOWED_HOSTS = %5B'.texastribune.org'%5D #FIXME%0A%0A%0A##########################%0A# DATABASE CONFIGURATION #%0A##########################%0A%0A# https://docs.djangoproject.com/en/1.7/ref/settings/#databases%0A# https://github.com/kennethreitz/dj-database-url%0ADATABASES = %7B%0A 'default': dj_database_url.config()%0A%7D%0A%0A%0A#######################%0A# CACHE CONFIGURATION #%0A#######################%0A%0A# See: https://docs.djangoproject.com/en/1.7/ref/settings/#caches%0ACACHES = %7B%0A 'default': %7B%0A 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',%0A %7D%0A%7D%0A%0A%0A############################%0A# SECRET KEY CONFIGURATION #%0A############################%0A%0A# https://docs.djangoproject.com/en/1.7/ref/settings/#secret-key%0ASECRET_KEY = get_env_setting('SECRET_KEY')%0A%0A%0A################################%0A# DJANGO STORAGE CONFIGURATION #%0A################################%0A%0A# http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html%0A# DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'%0A
|
|
f5ba686196866c78dfeafb34a5f78f5cfc2c50bd
|
Add buildbot.py with required coverage
|
buildbot.py
|
buildbot.py
|
Python
| 0
|
@@ -0,0 +1,239 @@
+%EF%BB%BF#!/usr/bin/env python%0A# encoding: utf-8%0A%0Aproject_name = 'sak'%0A%0Adef configure(options):%0A pass%0A%0Adef build(options):%0A pass%0A%0Adef run_tests(options):%0A pass%0A%0Adef coverage_settings(options):%0A options%5B'required_line_coverage'%5D = 94.9%0A
|
|
7bbf99a60526e1b15aaf7a7fc9f5b7d6889a9efc
|
Create getnotfound.py
|
tools/getnotfound.py
|
tools/getnotfound.py
|
Python
| 0.000001
|
@@ -0,0 +1,2505 @@
+#!/usr/bin/env python%0A%0Afrom __future__ import print_function%0Aimport argparse%0Aimport requests%0Aimport json%0Aimport wget%0Aimport sys%0Aimport os%0A%0A__author__ = %22Vesselin Bontchev %3Cvbontchev@yahoo.com%3E%22%0A__license__ = %22GPL%22%0A__VERSION__ = %221.00%22%0A%0Adef error(e):%0A print(%22Error: %25s.%22 %25 e, file=sys.stderr)%0A sys.exit(-1)%0A%0Adef makeOutputDir(pageNum):%0A outputDir = str(pageNum).zfill(3)%0A try:%0A if (not os.path.exists(outputDir)):%0A os.mkdir(outputDir)%0A except Exception as e:%0A error(e)%0A return outputDir%0A%0Adef downloadTheFiles(jsonData, hashes, elementsPerDir):%0A seen = set()%0A i = 0%0A paginate = False%0A outputDir = %22%22%0A if ((elementsPerDir %3E 0) and (len(jsonData) %3E elementsPerDir)):%0A paginate = True%0A pageNum = 0%0A elementNum = 0%0A outputDir = makeOutputDir(pageNum)%0A for element in jsonData:%0A url = element%5B%22url%22%5D%0A ext = element%5B%22ext%22%5D%0A hash = element%5B%22md5%22%5D.upper()%0A if (hash in hashes and not hash in seen):%0A seen.add(hash)%0A i += 1%0A fileName = hash + %22.%22 + ext%0A if (paginate):%0A fileName = os.path.join(outputDir, fileName)%0A elementNum += 1%0A if (elementNum %3E= elementsPerDir):%0A elementNum = 0%0A pageNum += 1%0A outputDir = makeOutputDir(pageNum)%0A print(%22%5B%22 + str(i) + %22%5D %22 + url + %22 -%3E %22 + fileName, file=sys.stderr)%0A try:%0A outputFile = wget.download(url, out=fileName)%0A except Exception as e:%0A error(e)%0A print(%22%22)%0A%0Aif __name__ == %22__main__%22:%0A parser = argparse.ArgumentParser(version=%22%25(prog)s version %22 + __VERSION__,%0A%09description=%22Downloads suspected malware from Wikileaks.%22)%0A parser.add_argument(%22-e%22, %22--elements%22, type=int, help=%22elements per page%22)%0A parser.add_argument(%22jsonfile%22, help=%22JSON data file%22)%0A parser.add_argument(%22notfoundhashes%22, help=%22file with MD5 hashes of unknown files%22)%0A args = parser.parse_args()%0A elements = args.elements%0A if (elements %3C 1):%0A elements = 0%0A try:%0A with open(args.jsonfile, %22r%22) as contentFile:%0A content = contentFile.read()%0A jsonData = json.loads(content)%0A with open(args.notfoundhashes, %22r%22) as hashFile:%0A hashes = %5Bline.split()%5B0%5D.upper() for line in hashFile%5D%0A except Exception as e:%0A error(e)%0A downloadTheFiles(jsonData, hashes, elements)%0A sys.exit(0)%0A
|
|
1a1e9123313fdedab14700ead90748d9e6182a42
|
Add revision for new boardmoderator columns
|
migrations/versions/da8b38b5bdd5_add_board_moderator_roles.py
|
migrations/versions/da8b38b5bdd5_add_board_moderator_roles.py
|
Python
| 0
|
@@ -0,0 +1,1908 @@
+%22%22%22Add board moderator roles%0A%0ARevision ID: da8b38b5bdd5%0ARevises: 90ac01a2df%0ACreate Date: 2016-05-03 09:32:06.756899%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = 'da8b38b5bdd5'%0Adown_revision = '90ac01a2df'%0Abranch_labels = None%0Adepends_on = None%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0A%0A%0Adef upgrade():%0A op.drop_index(op.f('ix_boardmoderator_board_id'), table_name='boardmoderator')%0A op.drop_index(op.f('ix_boardmoderator_moderator_id'), table_name='boardmoderator')%0A op.drop_table('boardmoderator')%0A%0A op.create_table('boardmoderator',%0A sa.Column('board_id', sa.Integer(), nullable=False),%0A sa.Column('moderator_id', sa.Integer(), nullable=False),%0A sa.Column('roles', postgresql.ARRAY(sa.String()), nullable=False),%0A sa.ForeignKeyConstraint(%5B'board_id'%5D, %5B'board.id'%5D, ),%0A sa.ForeignKeyConstraint(%5B'moderator_id'%5D, %5B'moderator.id'%5D, ),%0A sa.PrimaryKeyConstraint('board_id', 'moderator_id')%0A )%0A op.create_index(op.f('ix_boardmoderator_roles'), 'boardmoderator', %5B'roles'%5D, unique=False)%0A%0A%0Adef downgrade():%0A op.drop_index(op.f('ix_boardmoderator_roles'), table_name='boardmoderator')%0A op.drop_table('boardmoderator')%0A%0A op.create_table('boardmoderator',%0A sa.Column('board_id', sa.Integer(), nullable=True),%0A sa.Column('moderator_id', sa.Integer(), nullable=True),%0A sa.ForeignKeyConstraint(%5B'board_id'%5D, %5B'board.id'%5D, ),%0A sa.ForeignKeyConstraint(%5B'moderator_id'%5D, %5B'moderator.id'%5D, )%0A )%0A op.create_index(op.f('ix_boardmoderator_board_id'), 'boardmoderator', %5B'board_id'%5D, unique=False)%0A op.create_index(op.f('ix_boardmoderator_moderator_id'), 'boardmoderator', %5B'moderator_id'%5D, unique=False)%0A
|
|
4213e9756872cd3a64ca75f374b5bc292e08e3be
|
add scraping script
|
scrapingArticle.py
|
scrapingArticle.py
|
Python
| 0.000001
|
@@ -0,0 +1,1170 @@
+# -*- coding: utf-8 -*-%0A%0Afrom urllib.request import urlopen%0Afrom urllib.error import HTTPError%0Afrom bs4 import BeautifulSoup%0A%0A%0Adef scrapingArticleText(url):%0A %22%22%22%0A %E5%BC%95%E6%95%B0%E3%81%8B%E3%82%89%E5%BE%97%E3%81%9FURL%E3%81%8B%E3%82%89%E3%83%96%E3%83%AD%E3%82%B0%E6%9C%AC%E6%96%87%E3%82%92%E5%8F%96%E5%BE%97%E3%81%97%E3%81%A6%0A %E4%B8%80%E6%96%87%E3%81%9A%E3%81%A4%E5%8C%BA%E5%88%87%E3%81%A3%E3%81%9Fstring%E3%81%AElist%E3%82%92return%E3%81%99%E3%82%8B%0A %22%22%22%0A try:%0A html = urlopen(url)%0A except HTTPError as e:%0A print(e)%0A return None%0A try:%0A soup = BeautifulSoup(html.read(), %22lxml%22)%0A lawArticleText = %22%22%0A%0A date = soup.find('time').string%0A%0A for i in soup.findAll(%22%22, %7B%22class%22: %22articleText%22%7D):%0A lawArticleText += i.get_text()%0A%0A articleText = lawArticleText.replace(u%22%5Cxa0%22, %22%5Cn%22)%0A articleText = articleText.split(%22%5Cn%22)%0A%0A while articleText.count(%22%22) %3E 0:%0A articleText.remove(%22%22)%0A%0A except AttributeError as e:%0A return None%0A%0A return dict(date=date, article=articleText)%0A%0A%0Adef main():%0A url = %22http://ameblo.jp/ogurayui-0815/entry-12145717070.html%22%0A result = scrapingArticleText(url)%0A%0A if result == None:%0A print(%22Can't find the text%22)%0A%0A else:%0A print(result%5B'date'%5D)%0A%0A for i in result%5B'article'%5D:%0A print(i)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
0c5f2c0003ceb1568aa4f6dccce5f6de42b5462e
|
Add a simple monitoring solution
|
scripts/monitor.py
|
scripts/monitor.py
|
Python
| 0.000001
|
@@ -0,0 +1,1452 @@
+#!/usr/bin/python%0A# -*- coding: UTF-8%0A# Copyright: 2014 Tor Hveem %3Cthveem%3E%0A# License: GPL3%0A#%0A# Simple Python script for polling amatyr installation and check latest date%0A#%0A# Usage: python monitor.py %3CAMATYR BASEURL%3E %3CEMAIL RECIPIENT%3E%0A# Check every 5 minute in crontab:%0A# */5 * * * * %3CAMATYRPATH%3E/scripts/monitor.py%0A#%0A%0A%0Aimport urllib2%0Afrom datetime import datetime%0Aimport time%0Aimport sys%0Aimport simplejson%0Afrom email.mime.text import MIMEText%0Afrom subprocess import Popen, PIPE%0Aimport os%0A%0A%0ABASE_DIR = os.path.dirname(os.path.abspath(__file__))%0ASAVEFILENAME = os.path.join(BASE_DIR, '.last')%0A%0Ajson = urllib2.urlopen('http://%25s/api/now'%25(sys.argv%5B1%5D)).read()%0Ajson = simplejson.loads(json)%0A%0Afmt = '%25Y-%25m-%25d %25H:%25M:%25S'%0A%0Ajsontimestamp = json%5B0%5D%5B'datetime'%5D%0Ayrtime = datetime.strptime(jsontimestamp, fmt)%0Anow = datetime.now()%0Atime_d = now - yrtime%0A%0A%0A# Give it an hour%0Aif time_d.total_seconds() %3E 3600:%0A # Check if we alerted for this timestamp before %0A try:%0A oldalert = file(SAVEFILENAME, 'r').read()%0A if oldalert == jsontimestamp:%0A sys.exit(1)%0A except IOError:%0A 'File does not exist'%0A%0A # Save timestamp%0A file(SAVEFILENAME, 'w').write(jsontimestamp)%0A%0A # Alert.%0A msg = MIMEText(%22Please help me.%22)%0A msg%5B%22From%22%5D = %22amatyr@%25s%22 %25BASEURL%0A msg%5B%22To%22%5D = sys.argv%5B2%5D %0A msg%5B%22Subject%22%5D = %22AmatYr stopped %25s ago%22 %25time_d %0A p = Popen(%5B%22/usr/sbin/sendmail%22, %22-t%22%5D, stdin=PIPE)%0A p.communicate(msg.as_string())%0A%0A
|
|
5e20df222456fe17fa78290e8fa08b051a951b38
|
Add events.py
|
octokit/resources/events.py
|
octokit/resources/events.py
|
Python
| 0.000004
|
@@ -0,0 +1,147 @@
+# encoding: utf-8%0A%0A%22%22%22Methods for the Events API%0Ahttp://developer.github.com/v3/activity/events/%0Ahttp://developer.github.com/v3/issues/events/%0A%22%22%22%0A
|
|
3b22994b26db1c224ef0076bf9a031f661953ada
|
create Feed of latest articles on the current site.
|
opps/articles/views/feed.py
|
opps/articles/views/feed.py
|
Python
| 0
|
@@ -0,0 +1,697 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Afrom django.contrib.syndication.views import Feed%0Afrom django.contrib.sites.models import get_current_site%0A%0Afrom opps.articles.models import Article%0A%0A%0Aclass ArticleFeed(Feed):%0A%0A link = %22/RSS%22%0A%0A def __call__(self, request, *args, **kwargs):%0A self.site = get_current_site(request)%0A return super(ArticleFeed, self).__call__(request, *args, **kwargs)%0A%0A def title(self):%0A return %22%7B0%7D's news%22.format(self.site.name)%0A%0A def description(self):%0A return %22Latest news on %7B0%7D's%22.format(self.site.name)%0A%0A def items(self):%0A return Article.objects.filter(site=self.site).order_by(%0A '-date_available')%5B:40%5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.