commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
7f8c2b4b9991bfa74b486bddae218124fc7c85f4
|
remove hack dbproxy for dev of latest script
|
htdocs/plotting/auto/scripts100/p103.py
|
htdocs/plotting/auto/scripts100/p103.py
|
from pandas.io.sql import read_sql
import psycopg2
from pyiem import network
import sys
import numpy as np
PDICT = {'spring': '1 January - 30 June',
'fall': '1 July - 31 December'}
def get_description():
""" Return a dict describing how to call this plotter """
d = dict()
d['data'] = True
d['description'] = """This plot analyzes the number of steps down in
low temperature during the fall season and the number of steps up in
high temperature during the spring season. These steps are simply having
a newer colder low or warmer high for the season to date period.
"""
d['arguments'] = [
dict(type='station', name='station', default='IA2203',
label='Select Station'),
dict(type='select', name='season', options=PDICT,
label='Select which half of year', default='fall'),
]
return d
def plotter(fdict):
""" Go """
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
pgconn = psycopg2.connect(database='coop', host='iemdb', user='nobody',
port=5555)
station = fdict.get('station', 'IA2203')
season = fdict.get('season', 'fall')
table = "alldata_%s" % (station[:2],)
nt = network.Table("%sCLIMATE" % (station[:2],))
df = read_sql("""
WITH obs as (
SELECT day, year, month, high, low,
case when month > 6 then 'fall' else 'spring' end as season
from """ + table + """ WHERE station = %s),
data as (
SELECT year, season,
max(high) OVER (PARTITION by year, season ORDER by day ASC
ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as mh,
min(low) OVER (PARTITION by year, season ORDER by day ASC
ROWS BETWEEN 183 PRECEDING and CURRENT ROW) as ml
from obs),
lows as (
SELECT distinct year, ml as level, season from data
where season = 'fall'),
highs as (
SELECT distinct year, mh as level, season from data
where season = 'spring')
SELECT year, level, season from lows UNION
SELECT year, level, season from highs
""", pgconn, params=[station])
df2 = df[df['season'] == season]
(fig, ax) = plt.subplots(2, 1)
dyear = df2.groupby(['year']).count()
ax[0].bar(dyear.index, dyear['level'], facecolor='tan', edgecolor='tan')
ax[0].axhline(dyear['level'].mean(), lw=2)
ax[0].set_ylabel("Yearly Events Avg: %.1f" % (dyear['level'].mean(), ))
ax[0].set_xlim(dyear.index.min()-1, dyear.index.max()+1)
title = "%s Steps %s" % (PDICT[season],
"Down" if season == 'fall' else 'Up')
ax[0].set_title("%s [%s]\n%s in Temperature" % (nt.sts[station]['name'],
station, title))
ax[0].grid(True)
ax[1].hist(np.array(df2['level'], 'f'),
bins=np.arange(df2['level'].min(),
df2['level'].max()+1, 2),
normed=True, facecolor='tan')
ax[1].set_ylabel("Probability Density")
ax[1].axvline(32, lw=2)
ax[1].grid(True)
ax[1].set_xlabel("Temperature $^\circ$F, 32 degrees highlighted")
return fig, df
|
Python
| 0
|
@@ -1073,51 +1073,8 @@
ody'
-,%0A port=5555
)%0A%0A
|
4998dc79a5dc70ee3897e9ffd5d24632e87ec8c6
|
set a dtype
|
htdocs/plotting/auto/scripts100/p194.py
|
htdocs/plotting/auto/scripts100/p194.py
|
"""USDM Heatmaps and friends"""
import datetime
import numpy as np
import cartopy.crs as ccrs
from affine import Affine
import pandas as pd
from geopandas import read_postgis
from pyiem.plot import MapPlot
from pyiem.plot.colormaps import stretch_cmap
from pyiem.grid.zs import CachingZonalStats
from pyiem.util import get_autoplot_context, get_dbconn
from pyiem.exceptions import NoDataFound
PDICT = {
"0": "D0: Abnormally Dry",
"1": "D1: Moderate Drought",
"2": "D2: Severe Drought",
"3": "D3: Extreme Drought",
"4": "D4: Exceptional Drought",
}
PDICT2 = {"weeks": "Number of Weeks", "percent": "Percentage of Weeks"}
def get_description():
"""Return a dict describing how to call this plotter"""
desc = dict()
desc["data"] = True
desc["cache"] = 600
desc[
"description"
] = """This application generates a heatmap of the
frequency of a given drought classification. The classification is the
minimal threshold, so if a location is in D3 classification drought, it
would count as D0, D1, D2, and D3 for this analysis. The dates you
specify are rectified to the previous Tuesday on which the USDM analysis
is valid for.
<p><strong>Caution:</strong> This is an unofficial depiction of time
duration of Drought Monitor classfication and due to complexities with
how the grid analysis is done, the exact pixel location is nebulous.
Having said that, it should be close!
"""
today = datetime.date.today()
desc["arguments"] = [
dict(
type="csector",
name="csector",
default="IA",
label="Select state/sector to plot",
),
dict(
type="date",
name="sdate",
default="%s/01/01" % (today.year,),
label="Start Date:",
min="2000/01/04",
max=today.strftime("%Y/%m/%d"),
),
dict(
type="date",
name="edate",
default=today.strftime("%Y/%m/%d"),
label="End Date:",
min="2000/01/04",
max=today.strftime("%Y/%m/%d"),
),
dict(
type="select",
name="d",
default="0",
options=PDICT,
label="Select Drought Classification (at and above counted):",
),
dict(
type="select",
name="w",
default="percent",
options=PDICT2,
label="How to express time for plot:",
),
dict(type="cmap", name="cmap", default="plasma", label="Color Ramp:"),
]
return desc
def make_tuesday(date):
"""Make sure we back up to a tuesday"""
offset = (date.weekday() - 1) % 7
return date - datetime.timedelta(days=offset)
def plotter(fdict):
"""Go"""
ctx = get_autoplot_context(fdict, get_description())
csector = ctx["csector"]
sdate = make_tuesday(ctx["sdate"])
edate = make_tuesday(ctx["edate"])
dlevel = ctx["d"]
griddelta = 0.1
mp = MapPlot(
sector=("state" if len(csector) == 2 else csector),
state=ctx["csector"],
title=('%s at or above "%s" %s - %s')
% (
PDICT2[ctx["w"]],
PDICT[dlevel],
sdate.strftime("%b %-d, %Y"),
edate.strftime("%b %-d, %Y"),
),
subtitle=(
"based on weekly US Drought Monitor Analysis, "
r"%.2f$^\circ$ grid analysis"
)
% (griddelta,),
continentalcolor="white",
titlefontsize=14,
nocaption=True,
twitter=True,
)
# compute the affine
(west, east, south, north) = mp.ax.get_extent(ccrs.PlateCarree())
raster = np.zeros(
(int((north - south) / griddelta), int((east - west) / griddelta))
)
lons = np.arange(raster.shape[1]) * griddelta + west
lats = np.arange(0, 0 - raster.shape[0], -1) * griddelta + north
lats = lats[::-1]
affine = Affine(griddelta, 0.0, west, 0.0, 0 - griddelta, north)
# get the geopandas data
pgconn = get_dbconn("postgis")
df = read_postgis(
"""
with d as (
select valid, (ST_Dump(st_simplify(geom, 0.01))).geom from usdm where
valid >= %s and valid <= %s and dm >= %s and
ST_Intersects(geom, ST_GeomFromEWKT('SRID=4326;POLYGON((%s %s, %s %s,
%s %s, %s %s, %s %s))'))
)
select valid, st_collect(geom) as the_geom from d GROUP by valid
""",
pgconn,
params=(
sdate,
edate,
dlevel,
west,
south,
west,
north,
east,
north,
east,
south,
west,
south,
),
geom_col="the_geom",
)
if df.empty:
raise NoDataFound("No Data Found, sorry!")
# loop over the cached stats
czs = CachingZonalStats(affine)
czs.compute_gridnav(df["the_geom"], raster)
for nav in czs.gridnav:
if nav is None:
continue
grid = np.ones((nav.ysz, nav.xsz))
grid[nav.mask] = 0.0
jslice = slice(nav.y0, nav.y0 + nav.ysz)
islice = slice(nav.x0, nav.x0 + nav.xsz)
raster[jslice, islice] += grid
maxval = 10 if np.max(raster) < 11 else np.max(raster)
ramp = np.linspace(1, maxval + 1, 11, dtype="i")
if ctx["w"] == "percent":
ramp = np.arange(0, 101, 10)
ramp[0] = 0.01
ramp[-1] = 100.1
# we add one since we are rectified to tuesdays, so we have an extra
# week in there
raster = raster / ((edate - sdate).days / 7.0 + 1.0) * 100.0
# plot
cmap = stretch_cmap(ctx["cmap"], ramp)
cmap.set_under("white")
cmap.set_bad("white")
mp.pcolormesh(
lons,
lats,
np.flipud(raster),
ramp,
cmap=cmap,
units="count" if ctx["w"] == "weeks" else "Percent",
)
if len(csector) == 2:
mp.drawcounties()
mp.drawcities()
rows = []
for j in range(raster.shape[0]):
for i in range(raster.shape[1]):
rows.append(dict(lon=lons[i], lat=lats[j], value=raster[j, i]))
return mp.fig, pd.DataFrame(rows)
if __name__ == "__main__":
fig, _df = plotter(dict())
fig.savefig("/tmp/test.png")
|
Python
| 0.00006
|
@@ -5438,16 +5438,29 @@
101, 10
+, dtype=float
)%0A
@@ -6289,22 +6289,32 @@
plotter(
-dict()
+%7B%22w%22: %22percent%22%7D
)%0A fi
|
c729d72fc6d31af4d6a2567cc705c78d42bdb54e
|
Add new example for training new entity types
|
examples/training/train_new_entity_type.py
|
examples/training/train_new_entity_type.py
|
from __future__ import unicode_literals, print_function
import json
import pathlib
import random
import spacy
from spacy.pipeline import EntityRecognizer
from spacy.gold import GoldParse
from spacy.tagger import Tagger
try:
unicode
except:
unicode = str
def train_ner(nlp, train_data, output_dir):
# Add new words to vocab.
for raw_text, _ in train_data:
doc = nlp.make_doc(raw_text)
for word in doc:
_ = nlp.vocab[word.orth]
for itn in range(20):
random.shuffle(train_data)
for raw_text, entity_offsets in train_data:
gold = GoldParse(doc, entities=entity_offsets)
doc = nlp.make_doc(raw_text)
nlp.tagger(doc)
loss = nlp.entity.update(doc, gold)
nlp.save_to_directory(output_dir)
#nlp.end_training(output_dir)
def main(model_name, output_directory=None):
nlp = spacy.load(model_name)
train_data = [
(
"Horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')],
),
(
"horses are too tall and they pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"horses pretend to care about your feelings",
[(0, 6, 'ANIMAL')]
),
(
"they pretend to care about your feelings, those horses",
[(48, 54, 'ANIMAL')]
)
]
nlp.entity.add_label('ANIMAL')
if output_directory is not None:
output_directory = pathlib.Path(output_directory)
ner = train_ner(nlp, train_data, output_directory)
doc = nlp('Do you like horses?')
for ent in doc.ents:
print(ent.label_, ent.text)
nlp2 = spacy.load('en', path=output_directory)
nlp2.entity.add_label('ANIMAL')
doc2 = nlp2('Do you like horses?')
for ent in doc2.ents:
print(ent.label_, ent.text)
if __name__ == '__main__':
import plac
plac.call(main)
|
Python
| 0.000001
|
@@ -771,59 +771,48 @@
nlp.
-save_to_directory(output_dir
+end_training(
)%0A
-#
nlp.
-end_training
+save_to_directory
(out
|
4fb7f37e927fa90a34053ae51c1bb061b127a909
|
Add another uvmap generator.
|
a500/uvgen.py
|
a500/uvgen.py
|
#!/usr/bin/env python
from math import floor, atan2, cos, sin, sqrt
from array import array
def frpart(x):
return x - floor(x)
def lerp(lo, hi, step):
return lo + (hi - lo) * step
def dist(x1, y1, x2, y2):
dx = x2 - x1
dy = y2 - y1
return sqrt(dx * dx + dy * dy)
def generate(width, height, fn):
uvmap = array("H")
for j in range(height):
for i in range(width):
x = lerp(-1.0, 1.0, float(i) / width)
y = lerp(-1.0, 1.0, float(j) / height)
(u, v) = fn(x, y)
u = int(u * 256) & 127
v = int(v * 256) & 127
uvmap.append(u * 128 + v)
return uvmap
def scramble(uvmap):
out = array("H")
i = 0
while i < len(uvmap):
out.append(uvmap[i + 0])
out.append(uvmap[i + 1])
out.append(uvmap[i + 4])
out.append(uvmap[i + 5])
out.append(uvmap[i + 2])
out.append(uvmap[i + 3])
out.append(uvmap[i + 6])
out.append(uvmap[i + 7])
i += 8
out.byteswap()
return out
def FancyEye(x, y):
a = atan2(x, y)
r = dist(x, y, 0.0, 0.0)
if r == 0:
return (0, 0)
u = 0.04 * y + 0.06 * cos(a * 3.0) / r
v = 0.04 * x + 0.06 * sin(a * 3.0) / r
return (u, v)
if __name__ == "__main__":
with open("data/uvmap.bin", "w") as f:
uvmap = generate(160, 100, FancyEye)
scramble(uvmap).tofile(f)
|
Python
| 0
|
@@ -1156,16 +1156,186 @@
u, v)%0A%0A%0A
+def Anamorphosis(x, y):%0A a = atan2(x, y)%0A r = dist(x, y, 0.0, 0.0)%0A%0A if r == 0:%0A return (0, 0)%0A%0A u = cos(a) / (3.0 * r)%0A v = sin(a) / (3.0 * r)%0A%0A return (u, v)%0A%0A
if __nam
|
64eb2f165f5eaeca52baadc4bc9135f771c19cda
|
introduce moving averages
|
daemon19.py
|
daemon19.py
|
#!/usr/bin/env python
# Based on previous work by
# Charles Menguy (see: http://stackoverflow.com/questions/10217067/implementing-a-full-python-unix-style-daemon-process)
# and Sander Marechal (see: http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python/)
# Adapted by M.Hendrix [2015]
# daemon19.py measures the temperature of the diskarray.
import syslog, traceback
import os, sys, time, math, commands
from libdaemon import Daemon
from libsmart2 import SmartDisk
# BEWARE
# The disks identified here as `sda`, `sdb` etc. may not necessarily
# be called `/dev/sda`, `/dev/sdb` etc. on the system!!
sda = SmartDisk("wwn-0x7914297948508409858x")
sdb = SmartDisk("wwn-0x1138954418312597505x")
sdc = SmartDisk("wwn-0x4891478331354402817x")
sdd = SmartDisk("wwn-0x2556643098891800577x")
sde = SmartDisk("wwn-0x13230455334254301185x")
#sdf
#sdg
DEBUG = False
class MyDaemon(Daemon):
def run(self):
sampleptr = 0
samples = 1
datapoints = 7
data = [[None]*datapoints for _ in range(samples)]
sampleTime = 5*60
cycleTime = samples * sampleTime
# sync to whole minute
waitTime = (cycleTime + sampleTime) - (time.time() % cycleTime)
if DEBUG:
print "NOT waiting {0} s.".format(waitTime)
else:
time.sleep(waitTime)
while True:
try:
startTime = time.time()
result = do_work().split(',')
if DEBUG: print result
data[sampleptr] = map(float, result)
# report sample average
sampleptr = sampleptr + 1
if (sampleptr == samples):
somma = map(sum,zip(*data))
averages = [format(s / samples, '.3f') for s in somma]
if DEBUG:print averages
do_report(averages)
sampleptr = 0
waitTime = sampleTime - (time.time() - startTime) - (startTime%sampleTime)
if (waitTime > 0):
if DEBUG:print "Waiting {0} s".format(int(waitTime))
time.sleep(waitTime)
except Exception as e:
if DEBUG:
print("Unexpected error:")
print e.message
syslog.syslog(e.__doc__)
syslog_trace(traceback.format_exc())
raise
def syslog_trace(trace):
'''Log a python stack trace to syslog'''
log_lines = trace.split('\n')
for line in log_lines:
if len(line):
syslog.syslog(line)
def do_work():
# 5 datapoints gathered here
#
sda.smart()
sdb.smart()
sdc.smart()
sdd.smart()
sde.smart()
#sdf
#sdg
# disktemperature
Tsda=sda.getdata('194')
Tsdb=sdb.getdata('194')
Tsdc=sdc.getdata('194')
Tsdd=sdd.getdata('194')
Tsde=sde.getdata('194')
Tsdf=0
Tsdg=0
if DEBUG: print Tsda, Tsdb, Tsdc, Tsdd, Tsde
return '{0}, {1}, {2}, {3}, {4}'.format(Tsda, Tsdb, Tsdc, Tsdd, Tsde)
def do_report(result):
# Get the time and date in human-readable form and UN*X-epoch...
outDate = commands.getoutput("date '+%F %H:%M:%S, %s'")
result = ', '.join(map(str, result))
flock = '/tmp/ubundiagd/19.lock'
lock(flock)
f = file('/tmp/ubundiagd/19-tempdisk.csv', 'a')
# write out a NaN for disks sdf and sdg
f.write('{0}, {1}, NaN, NaN\n'.format(outDate, result) )
f.close()
unlock(flock)
return
def lock(fname):
open(fname, 'a').close()
def unlock(fname):
if os.path.isfile(fname):
os.remove(fname)
if __name__ == "__main__":
daemon = MyDaemon('/tmp/ubundiagd/19.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
elif 'foreground' == sys.argv[1]:
# assist with debugging.
print "Debug-mode started. Use <Ctrl>+C to stop."
DEBUG = True
if DEBUG:
logtext = "Daemon logging is ON"
syslog.syslog(syslog.LOG_DEBUG, logtext)
daemon.run()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart|foreground" % sys.argv[0]
sys.exit(2)
|
Python
| 0
|
@@ -942,92 +942,114 @@
-samp
+cyc
les =
-1
+2
%0A
-datapoints = 7
+SamplesPerCycle = 1
%0A
-data = %5B%5BNone%5D*datapoints for _ in range(samples)
+samples = SamplesPerCycle * cycles%0A%0A datapoints = 4%0A data = %5B
%5D%0A%0A
@@ -1448,22 +1448,16 @@
data
-%5Bsampleptr%5D =
+.append(
map(
@@ -1470,16 +1470,17 @@
result)
+)
%0A
@@ -1484,31 +1484,44 @@
-# report sample average
+if (len(data) %3E samples):data.pop(0)
%0A
@@ -1550,16 +1550,49 @@
eptr + 1
+%0A%0A # report sample average
%0A
@@ -1610,18 +1610,30 @@
ptr
-== s
+%25 S
amples
+PerCycle == 0
):%0A
@@ -1702,23 +1702,25 @@
mat(s /
-samples
+len(data)
, '.3f')
@@ -1800,16 +1800,55 @@
erages)%0A
+ if (sampleptr == samples):%0A
|
3f74de582c48f02b5b085f11fe76dbf87189db8b
|
add passing validation for 2012 prez general md contest
|
openelex/us/md/validate.py
|
openelex/us/md/validate.py
|
from openelex.models import Contest, Candidate, Result
def validate_unique_contests():
"""Count of contests should match unique set of election ids"""
elec_ids_count = len(Contest.objects.filter(state='MD').distinct('election_id'))
contest_count = Contest.objects.filter(state='MD').count()
try:
assert elec_ids_count == contest_count
except AssertionError:
raise AssertionError("MD - mismatch between contest count (%s) and election id count (%s)" % (contest_count, elec_ids_count))
def validate_unique_candidates():
#for each election date
#count of unique set of candidates should match Candidate.objects.count()
pass
def validate_name_parsing():
#Check assortment of names
#Check that Other was skipped
pass
|
Python
| 0
|
@@ -53,129 +53,191 @@
lt%0A%0A
-def validate_unique_contests():%0A %22%22%22Count of contests should match unique set of election ids%22%22%22%0A elec_ids_
+#TODO: Genericize this to check unique contests for all elections%0Adef validate_unique_prez_2012_general():%0A %22%22%22Should only be a single contest for 2012 prez general%22%22%22%0A
count =
len(
@@ -236,12 +236,8 @@
t =
-len(
Cont
@@ -259,158 +259,204 @@
ter(
-state='MD').distinct('election_id'))%0A contest_count = Contest.objects.filter(state='MD').count()%0A try:%0A assert elec_ids_count == contest_
+election_id='md-2012-11-06-general', slug='president-vice-pres').count()%0A expected = 1%0A try:%0A assert count == expected%0A print %22PASS: %25s general prez election found for 2012%22 %25
coun
@@ -519,13 +519,8 @@
r(%22M
-D - m
isma
@@ -531,16 +531,34 @@
between
+2012 general prez
contest
@@ -577,17 +577,14 @@
nd e
-lection i
+xpecte
d co
@@ -602,39 +602,26 @@
(co
-ntest_count, elec_ids_count
+unt, expected
))%0A%0A
+#
def
@@ -752,24 +752,25 @@
cts.count()%0A
+#
pass%0A%0Ade
@@ -767,16 +767,17 @@
pass%0A%0A
+#
def vali
@@ -862,16 +862,17 @@
skipped%0A
+#
pass
|
1a546e6f6ca95772f0d7dbc2792477becbb8ea63
|
use f-strings in feature_minchainwork.py
|
test/functional/feature_minchainwork.py
|
test/functional/feature_minchainwork.py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMinimumChainWork on command line.
Nodes don't consider themselves out of "initial block download" until
their active chain has more work than nMinimumChainWork.
Nodes don't download blocks from a peer unless the peer's best known block
has more work than nMinimumChainWork.
While in initial block download, nodes won't relay blocks to their peers, so
test that this parameter functions as intended by verifying that block relay
only succeeds past a given node once its nMinimumChainWork has been exceeded.
"""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
# 2 hashes required per regtest block (with no difficulty adjustment)
REGTEST_WORK_PER_BLOCK = 2
class MinimumChainWorkTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]]
self.node_min_work = [0, 101, 101]
def setup_network(self):
# This test relies on the chain setup being:
# node0 <- node1 <- node2
# Before leaving IBD, nodes prefer to download blocks from outbound
# peers, so ensure that we're mining on an outbound peer and testing
# block relay to inbound peers.
self.setup_nodes()
for i in range(self.num_nodes-1):
self.connect_nodes(i+1, i)
def run_test(self):
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
# minchainwork is exceeded
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
self.log.info("Testing relay across node %d (minChainWork = %d)", 1, self.node_min_work[1])
starting_blockcount = self.nodes[2].getblockcount()
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
self.log.info("Generating %d blocks on node0", num_blocks_to_generate)
hashes = self.nodes[0].generatetoaddress(num_blocks_to_generate,
self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork'])
# Sleep a few seconds and verify that node2 didn't get any new blocks
# or headers. We sleep, rather than sync_blocks(node0, node1) because
# it's reasonable either way for node1 to get the blocks, or not get
# them (since they're below node1's minchainwork).
time.sleep(3)
self.log.info("Verifying node 2 has no more blocks than before")
self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
# Node2 shouldn't have any new headers yet, because node1 should not
# have relayed anything.
assert_equal(len(self.nodes[2].getchaintips()), 1)
assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash()
assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
self.log.info("Generating one more block")
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
self.log.info("Verifying nodes are all synced")
# Because nodes in regtest are all manual connections (eg using
# addnode), node1 should not have disconnected node0. If not for that,
# we'd expect node1 to have disconnected node0 for serving an
# insufficient work chain, in which case we'd need to reconnect them to
# continue the test.
self.sync_all()
self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
if __name__ == '__main__':
MinimumChainWorkTest().main()
|
Python
| 0.000011
|
@@ -1911,16 +1911,17 @@
og.info(
+f
%22Testing
@@ -1939,18 +1939,17 @@
ss node
-%25d
+1
(minCha
@@ -1961,17 +1961,9 @@
k =
-%25d)%22, 1,
+%7B
self
@@ -1979,16 +1979,19 @@
_work%5B1%5D
+%7D)%22
)%0A%0A
@@ -2169,32 +2169,33 @@
self.log.info(
+f
%22Generating %25d b
@@ -2194,29 +2194,9 @@
ing
-%25d blocks on node0%22,
+%7B
num_
@@ -2213,16 +2213,34 @@
generate
+%7D blocks on node0%22
)%0A
@@ -2430,16 +2430,17 @@
og.info(
+f
%22Node0 c
@@ -2458,21 +2458,17 @@
n work:
-%25s%22,
+%7B
self.nod
@@ -2512,16 +2512,18 @@
inwork'%5D
+%7D%22
)%0A%0A
@@ -2920,32 +2920,33 @@
self.log.info(
+f
%22Blockcounts: %25s
@@ -2935,37 +2935,33 @@
(f%22Blockcounts:
-%25s%22,
+%7B
%5Bn.getblockcount
@@ -2983,16 +2983,18 @@
f.nodes%5D
+%7D%22
)%0A
@@ -3963,16 +3963,17 @@
og.info(
+f
%22Blockco
@@ -3982,13 +3982,9 @@
ts:
-%25s%22,
+%7B
%5Bn.g
@@ -4018,16 +4018,18 @@
f.nodes%5D
+%7D%22
)%0A%0Aif __
|
df0957a71251c96d363c8457befde6b33da8a8fb
|
test teams view : united messages are applied
|
buildbuild/teams/views.py
|
buildbuild/teams/views.py
|
from django.shortcuts import render
from django.http import HttpResponseRedirect,request
from django.http import HttpResponse
from django.views.generic.base import RedirectView
from django.views.generic.edit import FormView
from django.views.generic.list import ListView
from django.views.generic import DetailView
from django.contrib.auth import authenticate
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.db import IntegrityError, OperationalError
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from teams.forms import MakeTeamForm
from teams.models import Team, Membership, WaitList
from users.models import User
from projects.models import Project
from django.shortcuts import render
from teams.models import AlreadyMemberError, AlreadyWaitMemberError
# Warning : create team operation from view automatically make MtoM relationship
def join_team(request, team_id):
already_member = "the user is already team member"
already_wait_member = "the user already sent a request to join that team"
request_join_team = "the request to join the team sended"
wait_member = request.user
team = Team.objects.get(id=team_id)
try:
WaitList.objects.create_wait_list(team, wait_member)
except AlreadyMemberError:
messages.error(request, already_member)
return HttpResponseRedirect(reverse("home"))
except AlreadyWaitMemberError:
messages.error(request, already_wait_member)
return HttpResponseRedirect(reverse("home"))
messages.success(request, request_join_team)
return HttpResponseRedirect(reverse("home"))
def search_team(request):
search_team = request.GET['search_team']
# Case insensitive filtering
teams = Team.objects.filter(name__icontains = search_team)
return render(
request,
"teams/search_team_result.html",
{ "teams" : teams },
)
class MakeTeamView(FormView):
template_name = "teams/maketeam.html"
form_class = MakeTeamForm
def form_valid(self, form):
team_invalid = "ERROR : invalid team name"
team_already_exist = "ERROR : The team name already exists"
team_make_success = "Team created successfully"
# name field required
name = self.request.POST["teams_team_name"]
# valid team name test
try:
Team.objects.validate_name(name)
except ValidationError:
messages.error(self.request, team_invalid)
return HttpResponseRedirect(reverse("teams:maketeam"))
# unique team test
try:
Team.objects.get(name = name)
except ObjectDoesNotExist:
pass
else:
messages.error(self.request, team_already_exist)
return HttpResponseRedirect(reverse("teams:maketeam"))
# Login check is programmed in buildbuild/urls.py
# link user to team using Membership
user = self.request.user
team = Team.objects.create_team(name)
membership = Membership.objects.create_membership(
team = team,
user = user,
)
membership.is_admin = True
membership.save()
messages.success(self.request, team_make_success)
return HttpResponseRedirect(reverse("home"))
|
Python
| 0
|
@@ -824,16 +824,50 @@
berError
+%0Afrom buildbuild import custom_msg
%0A%0A# Warn
@@ -979,204 +979,8 @@
d):%0A
- already_member = %22the user is already team member%22%0A already_wait_member = %22the user already sent a request to join that team%22%0A request_join_team = %22the request to join the team sended%22%0A%0A
@@ -1171,32 +1171,43 @@
.error(request,
+custom_msg.
already_member)%0A
@@ -1326,16 +1326,27 @@
equest,
+custom_msg.
already_
@@ -1442,16 +1442,27 @@
equest,
+custom_msg.
request_
@@ -1988,184 +1988,8 @@
m):%0A
- team_invalid = %22ERROR : invalid team name%22%0A team_already_exist = %22ERROR : The team name already exists%22%0A team_make_success = %22Team created successfully%22%0A%0A
@@ -2234,24 +2234,35 @@
lf.request,
+custom_msg.
team_invalid
@@ -2517,24 +2517,35 @@
lf.request,
+custom_msg.
team_already
@@ -3034,24 +3034,35 @@
lf.request,
+custom_msg.
team_make_su
|
e0749f69d9a75ddcaa2ca927ac72ce75f0c75250
|
Update env_detect.py
|
device/src/env_detect.py
|
device/src/env_detect.py
|
#!/usr/bin/env python
#Weather station.
#detect environment information from several sensors:
#water leverl, air humity, raining, air temperature, light sensitivity.
#Air temperature&humity sensor: DHT11.
#Add dht.py in micropython/stmhal/modules, refer to esp8266
#Compile the DHT in firmware, then use DHT lib in application.
#Raining, same to soil moisture.
#Raining ? DO value: 0
from pyb import Pin
p_in = Pin('Y12', Pin.IN, Pin.PULL_UP)
p_in.value
adc = pyb.ADC(Pin('Y11')) # create an analog object from a pin
adc = pyb.ADC(pyb.Pin.board.Y11)
val = adc.read() # read an analog value
#-----------------------------------------#
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address) <--> None
#Command list:
#0x00 power off
#0x01 power on
#0x07 reset
#0x10 Continuously H-Resolution Mode, 1lx resolution, measurement time is typically 120ms
#0x11 0.5lx, 120ms
#0x13 4lx, 16ms
from pyb import I2C
i2c = I2C(1, I2C.MASTER) # create and init as a master
i2c.send(0x10, 0x23) # send 3 bytes to slave with address 0x23
i2c.is_ready(0x23) # check if slave 0x23 is ready
i2c.scan() # scan for slaves on the bus, returning
i2c.mem_read(3, 0x23, 2) # read 3 bytes from memory of slave 0x23,
# starting at address 2 in the slave
|
Python
| 0.000001
|
@@ -1,25 +1,260 @@
#
-!/usr/bin/env python
+--------------------------------------%0A# ____ ____%0A# / __/__ / __/%0A# _%5C %5C/ _ %5C_%5C %5C%0A# /___/ .__/___/%0A# /_/%0A#%0A# dev_detect.py%0A# Environment detection.%0A#%0A# Author : Arvin%0A# Date : 15/09/2017%0A#--------------------------------------%0A
%0A#We
|
f70bf550de5476d23bdd1e5fc375d9c703990bce
|
Drop version info from our sbt builds
|
builds/build_sbt_image.py
|
builds/build_sbt_image.py
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
"""
Build a Docker image for one of our sbt applications.
Usage:
build_sbt_image.py --project=<PROJECT> [--version=<VERSION>] [--env=<BUILD_ENV>]
build_sbt_image.py -h | --help
Options:
-h --help Show this screen.
--project=<PROJECT> Name of the sbt project (e.g. api, transformer)
--version=<VERSION> Version to use in the release ID
--env=<BUILD_ENV> Build environment (dev, prod, etc.)
"""
import os
import shutil
import subprocess
import docopt
from tooling import (
write_release_id,
CURRENT_COMMIT,
ROOT,
PLATFORM_ENV
)
DEFAULT_VERSION = '0.0.1'
if __name__ == '__main__':
args = docopt.docopt(__doc__)
# Read arguments from docopt
project = args['--project']
version = args['--version'] or DEFAULT_VERSION
build_env = args['--env'] or PLATFORM_ENV
print('*** Building sbt Docker image for %s' % project)
# Construct the release ID and the tag
release_id = '%s-%s_%s' % (version, CURRENT_COMMIT, build_env)
tag = '%s:%s' % (project, release_id)
print('*** Image will be tagged %s' % tag)
print('*** Building the Scala binaries')
subprocess.check_call(['sbt', 'project %s' % project, 'stage'])
source_target = os.path.join(ROOT, project, 'target', 'universal', 'stage')
docker_root = os.path.join(ROOT, 'docker', 'scala_service')
dest_target = os.path.join(docker_root, 'target', project)
print('*** Copying build artefacts to %s from %s' % (dest_target, source_target))
shutil.rmtree(dest_target, ignore_errors=True)
shutil.copytree(source_target, dest_target)
print('*** Building the new Docker image')
print('*** Dockerfile is at %s' % docker_root)
subprocess.check_call([
'docker', 'build',
'--file', os.path.join(docker_root, 'Dockerfile'),
'--tag', tag,
'--build-arg', 'project=%s' % project,
docker_root
])
print('*** Saving the release ID to .releases')
write_release_id(project=project, release_id=release_id)
|
Python
| 0
|
@@ -152,30 +152,8 @@
ECT%3E
- %5B--version=%3CVERSION%3E%5D
%5B--
@@ -169,16 +169,16 @@
D_ENV%3E%5D%0A
+
build_
@@ -340,70 +340,8 @@
er)%0A
- --version=%3CVERSION%3E Version to use in the release ID%0A
--
@@ -564,36 +564,8 @@
)%0A%0A%0A
-DEFAULT_VERSION = '0.0.1'%0A%0A%0A
if _
@@ -691,59 +691,8 @@
t'%5D%0A
- version = args%5B'--version'%5D or DEFAULT_VERSION%0A
@@ -862,19 +862,16 @@
'%25s
--%25s
_%25s' %25 (
vers
@@ -870,17 +870,8 @@
%25 (
-version,
CURR
|
a37a2818fb2d61dd4a1bba5358d6c22f166132e6
|
Fix a local rule reference
|
test/mac/archs/test-archs-multiarch.gyp
|
test/mac/archs/test-archs-multiarch.gyp
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'static_32_64',
'type': 'static_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'shared_32_64',
'type': 'shared_library',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'module_32_64',
'type': 'loadable_module',
'sources': [ 'my_file.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64',
'type': 'executable',
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
{
'target_name': 'exe_32_64_bundle',
'product_name': 'Test App',
'type': 'executable',
'mac_bundle': 1,
'sources': [ 'empty_main.cc' ],
'xcode_settings': {
'ARCHS': [ 'i386', 'x86_64' ],
},
},
# This only needs to compile.
{
'target_name': 'precompiled_prefix_header_mm_32_64',
'type': 'shared_library',
'sources': [ 'file.mm', ],
'xcode_settings': {
'GCC_PREFIX_HEADER': 'header.h',
'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
},
},
# This does not compile but should not cause generation errors.
{
'target_name': 'exe_32_64_no_sources',
'type': 'executable',
'dependencies': [
':static_32_64',
],
'sources': [],
'xcode_settings': {
'ARCHS': ['i386', 'x86_64'],
},
},
]
}
|
Python
| 0.999981
|
@@ -1653,17 +1653,16 @@
'
-:
static_3
|
fcee6aca22007c3ff64b4d87de3f1772b3c7eb99
|
add encoding, re: "non-ascii character" syntax error
|
kdtree/__init__.py
|
kdtree/__init__.py
|
"""A Python implemntation of a kd-tree
This package provides a simple implementation of a kd-tree in Python.
https://en.wikipedia.org/wiki/K-d_tree
"""
__author__ = 'Stefan Kögl <stefan@skoegl.net>'
__version__ = '0.1'
__website__ = 'https://github.com/stefankoegl/kdtree'
__license__ = 'GNU General Public License v3 or later'
|
Python
| 0.000001
|
@@ -1,8 +1,35 @@
+# -*- coding: latin-1 -*-%0A%0A
%22%22%22A Pyt
|
943a312c152ec6adeeff1e7a8b5b4b46334dfc0d
|
Fix dims in regression tf example
|
examples/regression_offset_tensorflow.py
|
examples/regression_offset_tensorflow.py
|
import tensorflow as tf
import numpy as np
from pymanopt import Problem
from pymanopt.solvers import TrustRegions
from pymanopt.manifolds import Euclidean, Product
if __name__ == "__main__":
# Generate random data
X = np.random.randn(3, 100).astype('float32')
Y = (X[0:1, :] - 2*X[1:2, :] + np.random.randn(1, 100) + 5).astype(
'float32')
# Cost function is the sqaured test error
w = tf.Variable(tf.zeros([3, 1]))
b = tf.Variable(tf.zeros([1]))
cost = tf.reduce_mean(tf.square(Y - tf.matmul(tf.transpose(w), X) - b))
# first-order, second-order
solver = TrustRegions()
# R^3 x R^1
manifold = Product([Euclidean(3, 1), Euclidean(1, 1)])
# Solve the problem with pymanopt
problem = Problem(manifold=manifold, cost=cost, arg=[w, b], verbosity=0)
wopt = solver.solve(problem)
print('Weights found by pymanopt (top) / '
'closed form solution (bottom)')
print(wopt[0].T)
print(wopt[1])
X1 = np.concatenate((X, np.ones((1, 100))), axis=0)
wclosed = np.linalg.inv(X1.dot(X1.T)).dot(X1).dot(Y.T)
print(wclosed[0:3].T)
print(wclosed[3])
|
Python
| 0.000006
|
@@ -469,16 +469,19 @@
.zeros(%5B
+1,
1%5D))%0A
|
4e4fa994f89f70ca04fa96c23febca8c360698c2
|
make datepublisher save aware datetimes
|
feincms/module/extensions/datepublisher.py
|
feincms/module/extensions/datepublisher.py
|
"""
Allows setting a date range for when the page is active. Modifies the active()
manager method so that only pages inside the given range are used in the default
views and the template tags.
Depends on the page class having a "active_filters" list that will be used by
the page's manager to determine which entries are to be considered active.
"""
# ------------------------------------------------------------------------
from datetime import datetime
from django.db import models
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
# ------------------------------------------------------------------------
def format_date(d, if_none=''):
"""
Format a date in a nice human readable way: Omit the year if it's the current
year. Also return a default value if no date is passed in.
"""
if d is None: return if_none
now = timezone.now()
fmt = (d.year == now.year) and '%d.%m' or '%d.%m.%Y'
return d.strftime(fmt)
def latest_children(self):
return self.get_children().order_by('-publication_date')
# ------------------------------------------------------------------------
def granular_now(n=None):
"""
A datetime.now look-alike that returns times rounded to a five minute
boundary. This helps the backend database to optimize/reuse/cache its
queries by not creating a brand new query each time.
Also useful if you are using johnny-cache or a similar queryset cache.
"""
if n is None:
n = timezone.now()
return datetime(n.year, n.month, n.day, n.hour, (n.minute // 5) * 5)
# ------------------------------------------------------------------------
def register(cls, admin_cls):
cls.add_to_class('publication_date', models.DateTimeField(_('publication date'),
default=granular_now))
cls.add_to_class('publication_end_date', models.DateTimeField(_('publication end date'),
blank=True, null=True,
help_text=_('Leave empty if the entry should stay active forever.')))
cls.add_to_class('latest_children', latest_children)
# Patch in rounding the pub and pub_end dates on save
orig_save = cls.save
def granular_save(obj, *args, **kwargs):
if obj.publication_date:
obj.publication_date = granular_now(obj.publication_date)
if obj.publication_end_date:
obj.publication_end_date = granular_now(obj.publication_end_date)
orig_save(obj, *args, **kwargs)
cls.save = granular_save
# Append publication date active check
if hasattr(cls._default_manager, 'add_to_active_filters'):
cls._default_manager.add_to_active_filters(
Q(publication_date__lte=granular_now) &
(Q(publication_end_date__isnull=True) | Q(publication_end_date__gt=granular_now)),
key='datepublisher')
def datepublisher_admin(self, page):
return u'%s – %s' % (
format_date(page.publication_date),
format_date(page.publication_end_date, '∞'),
)
datepublisher_admin.allow_tags = True
datepublisher_admin.short_description = _('visible from - to')
admin_cls.datepublisher_admin = datepublisher_admin
try:
pos = admin_cls.list_display.index('is_visible_admin')
except ValueError:
pos = len(admin_cls.list_display)
admin_cls.list_display.insert(pos + 1, 'datepublisher_admin')
admin_cls.add_extension_options(_('Date-based publishing'), {
'fields': ('publication_date', 'publication_end_date'),
})
# ------------------------------------------------------------------------
|
Python
| 0.000001
|
@@ -1558,16 +1558,36 @@
return
+timezone.make_aware(
datetime
@@ -1618,16 +1618,56 @@
n.hour,
+%0A
(n.minu
@@ -1679,16 +1679,27 @@
5) * 5)
+, n.tzinfo)
%0A%0A# ----
@@ -1833,24 +1833,56 @@
ation_date',
+%0A
models.Date
@@ -1989,16 +1989,48 @@
d_date',
+%0A
models.
@@ -2876,16 +2876,17 @@
+
(Q(publi
@@ -2916,16 +2916,30 @@
=True) %7C
+%0A
Q(publi
|
f4b7f90c2a7d5d575592efc21cfc83fb999fa57e
|
fix todo module path expansion
|
bumblebee/modules/todo.py
|
bumblebee/modules/todo.py
|
# pylint: disable=C0111,R0903
"""Displays the number of todo items from a text file
Parameters:
* todo.file: File to read TODOs from (defaults to ~/Documents/todo.txt)
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.output)
)
self._todos = self.count_items()
def output(self, widget):
self._todos = self.count_items()
return str(self._todos)
def state(self, widgets):
if self._todos == 0:
return "empty"
return "items"
def count_items(filename):
try:
i = -1
doc = self.parameter("file", "~/Documents/todo.txt")
with open(doc) as f:
for i, l in enumerate(f):
pass
return i+1
except Exception:
return 0
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Python
| 0
|
@@ -243,16 +243,31 @@
.engine%0A
+import os.path%0A
%0A%0Aclass
@@ -463,16 +463,103 @@
)%0A
+ self._doc = os.path.expanduser(self.parameter(%22file%22, %22~/Documents/todo.txt%22))%0A
@@ -831,16 +831,12 @@
ems(
-filename
+self
):%0A
@@ -870,73 +870,8 @@
-1%0A
- doc = self.parameter(%22file%22, %22~/Documents/todo.txt%22)%0A
@@ -888,16 +888,22 @@
th open(
+self._
doc) as
@@ -1021,16 +1021,16 @@
eption:%0A
+
@@ -1046,59 +1046,4 @@
n 0%0A
-%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
|
c1d18bb5f2838e9bf5c0fe59ab9978ff1fbe6b0f
|
Extend limit
|
tests/mock_vws/test_database_summary.py
|
tests/mock_vws/test_database_summary.py
|
"""
Tests for the mock of the database summary endpoint.
"""
from time import sleep
import pytest
import requests
import timeout_decorator
from requests import codes
from requests_mock import GET
from common.constants import ResultCodes
from tests.mock_vws.utils import assert_vws_response
from tests.utils import VuforiaServerCredentials
from vws._request_utils import target_api_request
def database_summary(
vuforia_server_credentials: VuforiaServerCredentials,
) -> requests.Response:
"""
Return the response of a request to the database summary endpoint.
Args:
vuforia_server_credentials: The credentials to use to connect to
Vuforia.
"""
return target_api_request(
access_key=vuforia_server_credentials.access_key,
secret_key=vuforia_server_credentials.secret_key,
method=GET,
content=b'',
request_path='/summary',
)
@timeout_decorator.timeout(seconds=200)
def wait_for_image_numbers(
vuforia_server_credentials: VuforiaServerCredentials,
active_images: int,
inactive_images: int,
failed_images: int,
processing_images: int,
) -> None:
"""
Wait up to 200 seconds (arbitrary) for the number of images in various
categories to match the expected number.
This is necessary because the database summary endpoint lags behind the
real data.
This is susceptible to false positives because if, for example, we expect
no images, and the endpoint adds images with a delay, we will not know.
Args:
vuforia_server_credentials: The credentials to use to connect to
Vuforia.
active_images: The expected number of active images.
inactive_images: The expected number of inactive images.
failed_images: The expected number of failed images.
processing_images: The expected number of processing images.
Raises:
TimeoutError: The numbers of images in various categories do not match
within the time limit.
"""
requirements = {
'active_images': active_images,
'inactive_images': inactive_images,
'failed_images': failed_images,
'processing_images': processing_images,
}
while True:
response = database_summary(
vuforia_server_credentials=vuforia_server_credentials
)
for requirement in requirements:
if response.json()[requirement] == requirements[requirement]:
requirements.pop(requirement)
if not requirements:
return
# We wait 0.2 seconds rather than less than that to decrease the number
# of calls made to the API, to decrease the likelihood of hitting the
# request quota.
sleep(0.2)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestDatabaseSummary:
"""
Tests for the mock of the database summary endpoint at `GET /summary`.
"""
def test_success(
self,
vuforia_server_credentials: VuforiaServerCredentials,
) -> None:
"""
It is possible to get a success response.
"""
response = database_summary(
vuforia_server_credentials=vuforia_server_credentials
)
assert_vws_response(
response=response,
status_code=codes.OK,
result_code=ResultCodes.SUCCESS,
)
assert response.json().keys() == {
'active_images',
'current_month_recos',
'failed_images',
'inactive_images',
'name',
'previous_month_recos',
'processing_images',
'reco_threshold',
'request_quota',
'request_usage',
'result_code',
'target_quota',
'total_recos',
'transaction_id',
}
assert response.json()['name'] == (
vuforia_server_credentials.database_name
)
wait_for_image_numbers(
vuforia_server_credentials=vuforia_server_credentials,
active_images=0,
inactive_images=0,
failed_images=0,
processing_images=0,
)
@pytest.mark.usefixtures('verify_mock_vuforia_inactive')
class TestInactiveProject:
"""
Tests for inactive projects.
"""
def test_inactive_project(
self,
inactive_server_credentials: VuforiaServerCredentials,
) -> None:
"""
The project's active state does not affect the database summary.
"""
response = database_summary(
vuforia_server_credentials=inactive_server_credentials,
)
assert_vws_response(
response=response,
status_code=codes.OK,
result_code=ResultCodes.SUCCESS,
)
|
Python
| 0.000012
|
@@ -947,17 +947,17 @@
seconds=
-2
+3
00)%0Adef
@@ -1174,17 +1174,17 @@
t up to
-2
+3
00 secon
@@ -2344,32 +2344,33 @@
ials%0A )%0A%0A
+%0A
for requ
|
d9a37dffd8e6c5ab44f6b355de4fc07aa64aea9f
|
Fix unreliability of pipeline publisher.
|
zeeko/telemetry/tests/test_pipeline.py
|
zeeko/telemetry/tests/test_pipeline.py
|
import pytest
import h5py
import time
import numpy as np
from ..pipeline import create_pipeline
@pytest.fixture
def chunksize():
"""The size of chunks."""
return 10
@pytest.fixture
def pipeline(address, context, chunksize, filename):
"""Pipeline"""
ioloop = create_pipeline(address, context, chunksize, filename)
yield ioloop
ioloop.cancel()
def test_create_pipeline(address, context, chunksize, filename):
"""Test creating a pipeline."""
ioloop = create_pipeline(address, context, chunksize, filename)
print("Created")
ioloop.cancel(timeout=0.1)
print("Canceled")
def test_run_pipeline(pipeline, Publisher, pub, filename, chunksize):
"""Test running the pipeline."""
with pipeline.running(timeout=0.1):
pipeline.state.selected("RUN").wait(timeout=0.1)
for i in range(10):
Publisher.update()
Publisher.publish(pub)
time.sleep(0.1)
pipeline.record.recorder.pushed.wait(timeout=3.0)
pipeline.write.fired.wait(timeout=3.0)
pipeline.state.selected("STOP").wait(timeout=1.0)
print(pipeline.record.recorder.complete)
for chunk in pipeline.record.recorder:
print("{0}: {1}".format(chunk, pipeline.record.recorder[chunk].lastindex))
assert pipeline.record.recorder.pushed.is_set()
assert pipeline.write.fired.is_set()
assert pipeline.record.recorder.chunkcount == 1
with h5py.File(filename, 'r') as f:
for name in Publisher.keys():
assert name in f
g = f[name]
assert g['data'].shape[0] == chunksize
np.testing.assert_allclose(g['data'][-1], Publisher[name].array)
|
Python
| 0
|
@@ -822,25 +822,57 @@
-for i in range(10
+while not pipeline.record.recorder.pushed.is_set(
):%0A
|
52ceae8d6ca99b3521d6d5cd3e20592f00870945
|
Fix regression which made libsecret backend unusable in v23.8.0
|
keyring/backend.py
|
keyring/backend.py
|
"""
Keyring implementation support
"""
import os
import abc
import logging
import operator
import copy
from typing import Optional
from .py310compat import metadata
from . import credentials, errors, util
from .util import properties
log = logging.getLogger(__name__)
by_priority = operator.attrgetter('priority')
_limit = None
class KeyringBackendMeta(abc.ABCMeta):
"""
A metaclass that's both an ABCMeta and a type that keeps a registry of
all (non-abstract) types.
"""
def __init__(cls, name, bases, dict):
super().__init__(name, bases, dict)
if not hasattr(cls, '_classes'):
cls._classes = set()
classes = cls._classes
if not cls.__abstractmethods__:
classes.add(cls)
class KeyringBackend(metaclass=KeyringBackendMeta):
"""The abstract base class of the keyring, every backend must implement
this interface.
"""
def __init__(self):
self.set_properties_from_env()
# @abc.abstractproperty
def priority(cls):
"""
Each backend class must supply a priority, a number (float or integer)
indicating the priority of the backend relative to all other backends.
The priority need not be static -- it may (and should) vary based
attributes of the environment in which is runs (platform, available
packages, etc.).
A higher number indicates a higher priority. The priority should raise
a RuntimeError with a message indicating the underlying cause if the
backend is not suitable for the current environment.
As a rule of thumb, a priority between zero but less than one is
suitable, but a priority of one or greater is recommended.
"""
@properties.ClassProperty
@classmethod
def viable(cls):
with errors.ExceptionRaisedContext() as exc:
cls.priority
return not exc
@classmethod
def get_viable_backends(cls):
"""
Return all subclasses deemed viable.
"""
return filter(operator.attrgetter('viable'), cls._classes)
@properties.ClassProperty
@classmethod
def name(cls):
"""
The keyring name, suitable for display.
The name is derived from module and class name.
"""
parent, sep, mod_name = cls.__module__.rpartition('.')
mod_name = mod_name.replace('_', ' ')
return ' '.join([mod_name, cls.__name__])
def __str__(self):
keyring_class = type(self)
return "{}.{} (priority: {:g})".format(
keyring_class.__module__, keyring_class.__name__, keyring_class.priority
)
@abc.abstractmethod
def get_password(self, service: str, username: str) -> Optional[str]:
"""Get password of the username for the service"""
return None
@abc.abstractmethod
def set_password(self, service: str, username: str, password: str) -> None:
"""Set password for the username of the service.
If the backend cannot store passwords, raise
PasswordSetError.
"""
raise errors.PasswordSetError("reason")
# for backward-compatibility, don't require a backend to implement
# delete_password
# @abc.abstractmethod
def delete_password(self, service: str, username: str) -> None:
"""Delete the password for the username of the service.
If the backend cannot delete passwords, raise
PasswordDeleteError.
"""
raise errors.PasswordDeleteError("reason")
# for backward-compatibility, don't require a backend to implement
# get_credential
# @abc.abstractmethod
def get_credential(
self,
service: str,
username: Optional[str],
) -> Optional[credentials.Credential]:
"""Gets the username and password for the service.
Returns a Credential instance.
The *username* argument is optional and may be omitted by
the caller or ignored by the backend. Callers must use the
returned username.
"""
# The default implementation requires a username here.
if username is not None:
password = self.get_password(service, username)
if password is not None:
return credentials.SimpleCredential(username, password)
return None
def set_properties_from_env(self):
"""For all KEYRING_PROPERTY_* env var, set that property."""
def parse(item):
key, value = item
pre, sep, name = key.partition('KEYRING_PROPERTY_')
return sep and (name.lower(), value)
props = filter(None, map(parse, os.environ.items()))
for name, value in props:
setattr(self, name, value)
def with_properties(self, **kwargs):
alt = copy.copy(self)
vars(alt).update(kwargs)
return alt
class Crypter:
"""Base class providing encryption and decryption"""
@abc.abstractmethod
def encrypt(self, value):
"""Encrypt the value."""
pass
@abc.abstractmethod
def decrypt(self, value):
"""Decrypt the value."""
pass
class NullCrypter(Crypter):
"""A crypter that does nothing"""
def encrypt(self, value):
return value
def decrypt(self, value):
return value
def _load_plugins():
"""
Locate all setuptools entry points by the name 'keyring backends'
and initialize them.
Any third-party library may register an entry point by adding the
following to their setup.cfg::
[options.entry_points]
keyring.backends =
plugin_name = mylib.mymodule:initialize_func
`plugin_name` can be anything, and is only used to display the name
of the plugin at initialization time.
`initialize_func` is optional, but will be invoked if callable.
"""
for ep in metadata.entry_points(group='keyring.backends'):
try:
log.debug('Loading %s', ep.name)
init_func = ep.load()
if callable(init_func):
init_func()
except Exception:
log.exception(f"Error initializing plugin {ep}.")
@util.once
def get_all_keyring():
"""
Return a list of all implemented keyrings that can be constructed without
parameters.
"""
_load_plugins()
viable_classes = KeyringBackend.get_viable_backends()
rings = util.suppress_exceptions(viable_classes, exceptions=TypeError)
return list(rings)
class SchemeSelectable:
"""
Allow a backend to select different "schemes" for the
username and service.
>>> backend = SchemeSelectable()
>>> backend._query('contoso', 'alice')
{'username': 'alice', 'service': 'contoso'}
>>> backend._query('contoso')
{'service': 'contoso'}
>>> backend.scheme = 'KeePassXC'
>>> backend._query('contoso', 'alice')
{'UserName': 'alice', 'Title': 'contoso'}
>>> backend._query('contoso', 'alice', foo='bar')
{'UserName': 'alice', 'Title': 'contoso', 'foo': 'bar'}
"""
scheme = 'default'
schemes = dict(
default=dict(username='username', service='service'),
KeePassXC=dict(username='UserName', service='Title'),
)
def _query(self, service, username=None, **base):
scheme = self.schemes[self.scheme]
return dict(
{
scheme['username']: username,
scheme['service']: service,
}
if username
else {
scheme['service']: service,
},
**base,
)
|
Python
| 0.999997
|
@@ -7486,24 +7486,36 @@
if username
+ is not None
%0A
|
5776162edb630e8de7967d6bc2319f253581b16c
|
Check to see whether trackInfo is null before getting track number.
|
PlayItem.py
|
PlayItem.py
|
#
# # ### / ###
# / /### / #/ ###
# / / ###/ ## ##
# / ## ## ## ##
# / ### ## ##
# ## ## ## /## /### ## #### ##
# ## ## ### ## / ### / ### / ## ### / ##
# ## ## /### / ##/ ### / ###/ ## ###/ ##
# ## ## / ###/ ## ## ## ## ## ## ##
# ## ##/ ## ## ## ## ## ## ## ##
# ## ## # ## ## ## ## ## ## ##
# ## # / ## ## ## ## ## ## ##
# ### / ## ## ## ## ## /# ##
# ######/ ## ## ###### ######/ ## ### /
# ### ## ## #### ##### ## ##/
# /
# /
# /
# /
#
# Haunting Three D Radio's Graveyard Slots
# Copyright 2014 Michael Marner <michael@20papercups.net>
# Release under MIT Licence
import urllib
class PlayItem:
##
# Store the local path for the file for this song. Used by FileManager.
# @param path The path to the file for this song.
def setLocalPath(self, path):
self.path = path
##
# Returns the local path for this file
def getLocalPath(self):
return self.path
##
# Returns a friendly string for printing to log files, etc.
def getDetails(self):
pass
##
# Returns this song's info as a CSV line
def getCSVLine(self):
pass
class Message(PlayItem):
def __init__(self, category, code, filename):
self.category = category
self.filename = filename
catPath = category.lower()[:12]
#self.setLocalPath(urllib.quote(Message.basePath + catPath + '/' + filename ))
self.setLocalPath(Message.basePath + catPath + '/' + filename)
self.code = code
def getDetails(self):
return self.category + " - " + self.code
##
# Represents a single song from the music catalogue
#
class Song(PlayItem):
##
# Constructor, takes the data from the database to encapsulate
# @param cdInfo the dictionary of data fro the row in the CD table
# @param trackInfo the dictionary of data fro the row in the cdtrack table
def __init__(self, cdInfo, trackInfo):
self.cdInfo = cdInfo
self.trackInfo = trackInfo
##
# Returns the title of the track.
def getTrackTitle(self):
return self.trackInfo['tracktitle']
##
# Returns the name of the artist.
def getArtistName(self):
if not self.trackInfo['trackartist']:
return self.cdInfo['artist']
else:
return self.trackInfo['trackartist']
##
# Returns a friendly string for printing to log files, etc.
def getDetails(self):
return self.getArtistName() + ' - ' + self.getTrackTitle()
##
# Returns this song's info as a CSV line
def getCSVLine(self):
return '"' + self.getArtistName() + '",' + \
'"' + self.getTrackTitle() + '",' + \
'"' + `self.isDemo()` + '",' + \
'"' + `self.isLocal()` + '",' + \
'"' + `self.isAustralian()` + '",' + \
'"' + `self.hasFemale()` +'"'
##
# Returns the release ID
def getReleaseID(self):
return self.cdInfo['id']
##
# Returns the track ID used by the database.
def getTrackID(self):
return self.trackInfo['trackid']
##
# Returns the track number from this song's release.
def getTrackNumber(self):
return self.trackInfo['tracknum']
def isLocal(self):
return self.cdInfo['local'] == 2
def isAustralian(self):
return self.cdInfo['cpa'] in Song.ausNames
def isDemo(self):
return self.cdInfo['demo'] == 2
def hasFemale(self):
return self.cdInfo['female'] == 2
|
Python
| 0
|
@@ -3977,32 +3977,88 @@
ckNumber(self):%0A
+ if self.trackInfo == None:%0A return 0%0A
return s
|
29b23bc1cc29f9d611fd06bc34d46fafab001504
|
add {{primary sources}} to template list
|
trunk/pywiki/datebot.py
|
trunk/pywiki/datebot.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This script is a clone of [[en:User:SmackBot]]
Syntax: python datebot.py
"""
#
# (C) Legoktm 2008-2009, MIT License
#
import re, sys, time
import os
import wiki, pagegen, config
# Define global constants
readDelay = 20 # seconds
writeDelay = 60 # seconds
usernames = {
'en.wikipedia':'Legobot II'
}
def checktalk():
page = wiki.Page('%s/Stop' %(usernames[config.wiki]))
try:
wikitext = page.get()
except:
sys.exit()
if wikitext.lower() != 'run':
sys.exit()
def process_article(page):
try:
wikitext = state1 = page.get()
except wiki.IsRedirectPage:
return
# Fix Casing (Reduces the number of possible expressions)
wikitext = re.compile(r'\{\{\s*(template:|)fact', re.IGNORECASE).sub(r'{{Fact', wikitext)
# Fix some redirects
wikitext = re.compile(r'\{\{\s*(template:|)cn\}\}', re.IGNORECASE).sub(r'{{Fact}}', wikitext)
wikitext = re.compile(r'\{\{\s*(template:|)citation needed', re.IGNORECASE).sub(r'{{Fact', wikitext)
wikitext = re.compile(r'\{\{\s*(template:|)proveit', re.IGNORECASE).sub(r'{{Fact', wikitext)
wikitext = re.compile(r'\{\{\s*(template:|)sourceme', re.IGNORECASE).sub(r'{{Fact', wikitext)
wikitext = re.compile(r'\{\{\s*(template:|)fct', re.IGNORECASE).sub(r'{{Fact', wikitext)
# State point. Count any changes as needing an update if they're after this line
state0 = wikitext
# Date the tags
wikitext = re.compile(r'\{\{\s*fact\}\}', re.IGNORECASE).sub(r'{{Fact|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*wikify\}\}', re.IGNORECASE).sub(r'{{Wikify|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*orphan\}\}', re.IGNORECASE).sub(r'{{Orphan|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*uncategorized\}\}', re.IGNORECASE).sub(r'{{Uncategorized|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*uncatstub\}\}', re.IGNORECASE).sub(r'{{Uncatstub|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*cleanup\}\}', re.IGNORECASE).sub(r'{{Cleanup|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*unreferenced\}\}', re.IGNORECASE).sub(r'{{Unreferenced|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*importance\}\}', re.IGNORECASE).sub(r'{{importance|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*Expand\}\}', re.IGNORECASE).sub(r'{{Expand|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
# wikitext = re.compile(r'\{\{\s*merge(.*?)\}\}', re.IGNORECASE).sub(r'{{Merge\\1|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*copyedit\}\}', re.IGNORECASE).sub(r'{{Copyedit|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
wikitext = re.compile(r'\{\{\s*refimprove\}\}', re.IGNORECASE).sub(r'{{Refimprove|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}', wikitext)
EditMsg = "Date maintenance tags"
if state1 != state0:
EditMsg = EditMsg + " and general fixes"
# If the text has changed at all since the state point, upload it
if wikitext != state0:
# try:
print 'Editing ' + page.title()
print 'WRITE: Adding %s bytes.' % str(len(wikitext)-len(state0))
# wikipedia.showDiff(state1, wikitext)
try:
page.put(wikitext, EditMsg)
except wiki.LockedPage:
print 'SKIP: ' + page.title() + ' is locked.'
# except KeyboardInterrupt:
# quit()
# except:
# print 'ERROR: Except raised while writing.'
else:
print 'Skipping ' + page.title() + ' due to no changes made after state point.'
def docat(cat2):
gen = pagegen.category(wiki.Page('Category:' + cat2))
for page in gen:
if page.namespace() == 0:
process_article(page)
checktalk()
else:
print 'Skipping %s because it is not in the mainspace' %(page.title())
print 'Done with Category:%s' %(cat2)
def main():
docat("Articles with unsourced statements")
docat("Articles that need to be wikified")
docat("Orphaned articles")
docat("Category needed")
docat("Uncategorized stubs")
docat("Wikipedia cleanup")
docat("Articles lacking sources")
docat("Articles to be expanded")
docat("Articles with topics of unclear notability")
# docat("Articles to be merged")
docat("Wikipedia articles needing copy edit")
docat("Articles needing additional references")
print 'Done'
if __name__ == "__main__":
while True:
main()
print 'Sleeping 60 seconds'
time.sleep(60)
|
Python
| 0.000005
|
@@ -3187,16 +3187,179 @@
kitext)%0A
+%09%09wikitext = re.compile(r'%5C%7B%5C%7B%5Cs*primary sources%5C%7D%5C%7D', re.IGNORECASE).sub(r'%7B%7BPrimary sources%7Cdate=%7B%7Bsubst:CURRENTMONTHNAME%7D%7D %7B%7Bsubst:CURRENTYEAR%7D%7D%7D%7D', wikitext)%0A%0A
%09%09EditMs
@@ -4755,16 +4755,63 @@
ences%22)%0A
+%09docat(%22Articles lacking reliable references%22)%0A
%09print '
|
05ee29728d26cbdbab2e23bc89e0c5ffc4f9369b
|
Fix long message decryption error
|
simplecrypto/__init__.py
|
simplecrypto/__init__.py
|
import hashlib
import math
from os import path
from base64 import b64encode, b64decode
from binascii import hexlify, unhexlify
from Crypto.Cipher import DES, AES, PKCS1_OAEP
from Crypto.Signature import PKCS1_PSS
from Crypto.PublicKey import RSA as _RSA
from Crypto.Hash import SHA as RSA_SHA
from Crypto import Random
_random_instance = Random.new()
def md5(message):
"""
Returns the hexadecimal representation of the MD5 hash digest.
"""
return hashlib.md5(to_bytes(message)).hexdigest()
def sha1(message):
"""
Returns the hexadecimal representation of the SHA1 hash digest.
"""
return hashlib.sha1(to_bytes(message)).hexdigest()
def sha256(message):
"""
Returns the hexadecimal representation of the SHA256 hash digest.
"""
return hashlib.sha256(to_bytes(message)).hexdigest()
def sha512(message):
"""
Returns the hexadecimal representation of the SHA512 hash digest.
"""
return hashlib.sha512(to_bytes(message)).hexdigest()
# Available hash functions.
hashes = [sha1, md5, sha256, sha512]
# Default hash function.
hash = sha1
def to_base64(message):
"""
Returns the base64 representation of a string or bytes.
"""
return b64encode(to_bytes(message)).decode('ascii')
def from_base64(message):
"""
Returns the bytes from a base64 representation.
"""
return b64decode(to_bytes(message), validate=True)
def to_hex(message):
"""
Returns the (string) hexadecimal representation of a string or bytes.
"""
return hexlify(to_bytes(message)).decode('ascii')
def from_hex(message):
"""
Returns the bytes from a (string) hexadecimal representation.
"""
return unhexlify(message)
def to_bytes(message):
"""
Returns the bytes representation of an arbitrary message.
"""
if isinstance(message, str):
return message.encode('utf-8')
else:
return bytes(message)
def to_str(message):
"""
Returns the string representation of an arbitrary message.
"""
if isinstance(message, str):
return message
else:
return message.decode('utf-8')
def _append_newline(s):
return s + '\n'
def _replace_backslashes(s):
return s.replace('\\', '/')
# Shorthands.
base64 = to_base64
hex = to_hex
def pad(message, length, padding=b'0'):
"""
Pads a message with binary zeroes until a given length is reached.
"""
message = to_bytes(message)
return message + (length - len(message)) * padding
def pad_multiple(message, len_multiple, padding=b'0'):
"""
Pads a message with binary zeroes until the length is a desired multiple.
"""
next_length = math.ceil(len(message) / float(len_multiple)) * len_multiple
return pad(message, int(next_length), padding)
def random(n_bytes):
"""
Returns `n_bytes` of cryptographically secure random bytes.
"""
return _random_instance.read(n_bytes)
def encrypt(message, key):
"""
Encrypts `message` with the `key`. If `key` is bytes or str, it is used as
symmetric AES256 key.
"""
if type(key) in [str, bytes]:
key = AesKey(key)
return key.encrypt(message)
def decrypt(message, key):
"""
Decrypts `message` with the `key`. If `key` is bytes or str, it is used as
symmetric AES256 key.
"""
if type(key) in [str, bytes]:
key = AesKey(key)
return key.decrypt(message)
class AesKey(object):
"""
Class for symmetric AES with 256 bits block size.
"""
def __init__(self, key):
self.key = key
self.algorithm = 'AES-256'
self.block_size = 256 / 8
def encrypt(self, message):
iv = random(AES.block_size)
instance = AES.new(pad_multiple(self.key, 16),
AES.MODE_CFB,
iv)
return to_base64(iv + instance.encrypt(to_bytes(message)))
def decrypt(self, message):
message = from_base64(message)
iv, message = message[:AES.block_size], message[AES.block_size:]
instance = AES.new(pad_multiple(self.key, 16),
AES.MODE_CFB,
iv)
return instance.decrypt(message)
class RsaPublicKey(object):
"""
Class for asymmetric public RSA key.
"""
def __init__(self, key, algorithm, block_size):
self.oaep = PKCS1_OAEP.new(key)
self.pss = PKCS1_PSS.new(key)
self.algorithm = algorithm
self.block_size = block_size
def encrypt(self, message):
m = to_bytes(message)
if len(m) <= self.block_size:
return self.oaep.encrypt(m)
symmetric_key = random(AES.block_size)
encrypted_symmetric_key = self.oaep.encrypt(symmetric_key)
return encrypted_symmetric_key + from_base64(encrypt(m, symmetric_key))
def verify(self, message, signature):
h = RSA_SHA.new()
h.update(to_bytes(message))
return self.pss.verify(h, signature)
class RsaKeypair(object):
"""
Class for asymmetric RSA keypair.
"""
def __init__(self, nbits=2048):
self.rsa = _RSA.generate(nbits, random)
self.oaep = PKCS1_OAEP.new(self.rsa)
self.pss = PKCS1_PSS.new(self.rsa)
self.algorithm = 'RSA-' + str(nbits)
self.block_size = nbits // 8
self.publickey = RsaPublicKey(self.rsa.publickey(),
self.algorithm,
self.block_size)
def encrypt(self, message):
# Delegate to public key.
return self.publickey.encrypt(message)
def verify(self, message, signature):
# Delegate to public key.
return self.publickey.verify(message, signature)
def decrypt(self, message):
m = to_bytes(message)
if len(message) <= self.block_size:
return self.oaep.decrypt(message)
encrypted_symmetric_key, m = m[:self.block_size], m[self.block_size:]
symmetric_key = self.oaep.decrypt(encrypted_symmetric_key)
return decrypt(m, symmetric_key)
def sign(self, message):
h = RSA_SHA.new()
h.update(to_bytes(message))
return self.pss.sign(h)
def encrypt_to(self, message, recipient):
raise NotImplementedError
|
Python
| 0.999994
|
@@ -6040,25 +6040,33 @@
urn decrypt(
-m
+base64(m)
, symmetric_
|
acac9582ba7a948b1662d3e4a6a2a06db11ce59f
|
FIX assert_greater message
|
sklearn/utils/testing.py
|
sklearn/utils/testing.py
|
"""Testing utilities."""
# Copyright (c) 2011 Pietro Berkes
# License: Simplified BSD
from .fixes import savemat
import urllib2
from StringIO import StringIO
import scipy as sp
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
from nose.tools import assert_true, assert_false
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def fake_mldata_cache(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set in the cache_path.
Parameters
----------
columns_dict: contains data as
columns_dict[column_name] = array of data
dataname: name of data set
matfile: file-like object or file name
ordering: list of column_names, determines the ordering in the data set
Note: this function transposes all arrays, while fetch_mldata only
transposes 'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
savemat(matfile, datasets, oned_as='column')
class mock_urllib2(object):
def __init__(self, mock_datasets):
"""Object that mocks the urllib2 module to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata_cache` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an URLError.
"""
self.mock_datasets = mock_datasets
class HTTPError(urllib2.URLError):
code = 404
def urlopen(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
matfile = StringIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata_cache(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise mock_urllib2.HTTPError('%s not found.' % urlname)
def quote(self, string, safe='/'):
return urllib2.quote(string, safe)
|
Python
| 0.000001
|
@@ -700,16 +700,17 @@
essage%0A%0A
+%0A
def _ass
@@ -755,35 +755,37 @@
ge = %22%25r is not
-low
+great
er than %25r%22 %25 (a
|
5978ab00f200ea48216d8c67bf2f8684bdddaf00
|
Update dahlquist.py
|
pySDC/projects/Resilience/dahlquist.py
|
pySDC/projects/Resilience/dahlquist.py
|
# script to run a simple advection problem
from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right
from pySDC.implementations.problem_classes.TestEquation_0D import testequation0d
from pySDC.implementations.sweeper_classes.generic_implicit import generic_implicit
from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI
from pySDC.core.Hooks import hooks
from pySDC.helpers.stats_helper import get_sorted
import numpy as np
import matplotlib.pyplot as plt
class log_data(hooks):
def post_iteration(self, step, level_number):
super(log_data, self).post_iteration(step, level_number)
# some abbreviations
L = step.levels[level_number]
L.sweep.compute_end_point()
self.add_to_stats(
process=step.status.slot,
time=L.time + L.dt,
level=L.level_index,
iter=step.status.iter,
sweep=L.status.sweep,
type='u',
value=L.uend,
)
self.add_to_stats(
process=step.status.slot,
time=L.time,
level=L.level_index,
iter=0,
sweep=L.status.sweep,
type='dt',
value=L.dt,
)
def pre_run(self, step, level_number):
super(log_data, self).pre_run(step, level_number)
L = step.levels[level_number]
self.add_to_stats(process=0, time=0, level=0, iter=0, sweep=0, type='lambdas', value=L.prob.params.lambdas)
def run_dahlquist(
custom_description=None,
num_procs=1,
Tend=1.0,
hook_class=log_data,
fault_stuff=None,
custom_controller_params=None,
custom_problem_params=None,
):
# initialize level parameters
level_params = dict()
level_params['dt'] = 1.0
# initialize sweeper parameters
sweeper_params = dict()
sweeper_params['collocation_class'] = CollGaussRadau_Right
sweeper_params['num_nodes'] = 3
sweeper_params['QI'] = 'LMMpar'
# build lambdas
re = np.linspace(-30, 30, 400)
im = np.linspace(-50, 50, 400)
lambdas = np.array([[complex(re[i], im[j]) for i in range(len(re))] for j in range(len(im))]).reshape(
(len(re) * len(im))
)
problem_params = {
'lambdas': lambdas,
'u0': 1.0,
}
if custom_problem_params is not None:
problem_params = {**problem_params, **custom_problem_params}
# initialize step parameters
step_params = dict()
step_params['maxiter'] = 5
# initialize controller parameters
controller_params = dict()
controller_params['logger_level'] = 30
controller_params['hook_class'] = hook_class
controller_params['mssdc_jac'] = False
if custom_controller_params is not None:
controller_params = {**controller_params, **custom_controller_params}
# fill description dictionary for easy step instantiation
description = dict()
description['problem_class'] = testequation0d # pass problem class
description['problem_params'] = problem_params # pass problem parameters
description['sweeper_class'] = generic_implicit # pass sweeper
description['sweeper_params'] = sweeper_params # pass sweeper parameters
description['level_params'] = level_params # pass level parameters
description['step_params'] = step_params
if custom_description is not None:
for k in custom_description.keys():
if k == 'sweeper_class':
description[k] = custom_description[k]
continue
description[k] = {**description.get(k, {}), **custom_description.get(k, {})}
# set time parameters
t0 = 0.0
# instantiate controller
controller = controller_nonMPI(num_procs=num_procs, controller_params=controller_params, description=description)
# insert faults
if fault_stuff is not None:
raise NotImplementedError('No fault stuff here...')
# get initial values on finest level
P = controller.MS[0].levels[0].prob
uinit = P.u_exact(t0)
# call main function to get things done...
uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
return stats, controller, Tend
def plot_stability(stats, ax=None, iter=None, colors=None, crosshair=True, fill=False):
lambdas = get_sorted(stats, type='lambdas')[0][1]
u = get_sorted(stats, type='u', sortby='iter')
# decorate
if crosshair:
ax.axhline(0, color='black', alpha=1.0)
ax.axvline(0, color='black', alpha=1.0)
if ax is None:
fig, ax = plt.subplots(1, 1)
iter = [1] if iter is None else iter
colors = ['blue', 'red', 'violet', 'green'] if colors is None else colors
for i in iter:
# isolate the solutions from the iteration you want
U = np.reshape([me[1] for me in u if me[0] == i], (len(np.unique(lambdas.real)), len(np.unique(lambdas.imag))))
# get a grid for plotting
X, Y = np.meshgrid(np.unique(lambdas.real), np.unique(lambdas.imag))
if fill:
ax.contourf(X, Y, abs(U), levels=[-np.inf, 1 - np.finfo(float).eps], colors=colors[i - 1], alpha=0.5)
ax.contour(X, Y, abs(U), levels=[1], colors=colors[i - 1])
ax.plot([None], [None], color=colors[i - 1], label=f'k={i}')
ax.legend(frameon=False)
if __name__ == '__main__':
custom_description = None
stats, controller, Tend = run_dahlquist(custom_description=custom_description)
plot_stability(stats, iter=[1, 2, 3])
plt.show()
|
Python
| 0
|
@@ -40,101 +40,8 @@
lem%0A
-from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right%0A
from
@@ -1797,50 +1797,35 @@
ms%5B'
-collocation_class'%5D = CollGaussRadau_Right
+quad_type'%5D = 'RADAU-RIGHT'
%0A
|
f9f01b57e6e2085786908a5ebcf0be61fefe3a51
|
Change version
|
slack_client/__init__.py
|
slack_client/__init__.py
|
from .api import SlackAPI
from .channel import SlackChannel
__version__ = '0.2.16'
|
Python
| 0
|
@@ -75,10 +75,9 @@
'0.
-2.16
+3.0
'%0A
|
646a073aa1e2e63f06a8cfd56e467bd2f67bceff
|
use one timezone
|
SunCycle.py
|
SunCycle.py
|
import sublime
from datetime import datetime
from timezone import LocalTimezone
from sun import Sun
class Settings():
def __init__(self, onChange=None):
self.loaded = False
self.onChange = onChange
self.load()
def load(self):
settings = sublime.load_settings(__name__ + '.sublime-settings')
settings.clear_on_change(__name__)
settings.add_on_change(__name__, self.load)
if not settings.has('day'):
raise KeyError('SunCycle: missing day setting')
if not settings.has('night'):
raise KeyError('SunCycle: missing night setting')
self.day = settings.get('day')
self.night = settings.get('night')
self.sun = Sun(settings.get('latitude', 0), settings.get('longitude', 0))
print('SunCycle: sunrise at %s' % self.sun.sunrise())
print('SunCycle: sunset at %s' % self.sun.sunset())
if self.loaded and self.onChange:
self.onChange()
self.loaded = True
class SunCycle():
def __init__(self):
self.dayPart = None
self.halt = False
sublime.set_timeout(self.start, 500) # delay execution so settings can load
def getDayOrNight(self):
s = self.settings.sun
now = datetime.now(tz=LocalTimezone())
return 'day' if now >= s.sunrise(now) and now <= s.sunset(now) else 'night'
def cycle(self):
sublimeSettings = sublime.load_settings('Preferences.sublime-settings')
config = getattr(self.settings, self.getDayOrNight())
if sublimeSettings is None:
raise Exception('Preferences not loaded')
sublimeSettingsChanged = False
newColorScheme = config.get('color_scheme')
if newColorScheme and newColorScheme != sublimeSettings.get('color_scheme'):
print('SunCycle: switching to new color scheme: %s' % newColorScheme)
sublimeSettings.set('color_scheme', newColorScheme)
sublimeSettingsChanged = True
newTheme = config.get('theme')
if newTheme and newTheme != sublimeSettings.get('theme'):
print('SunCycle: switching to new theme: %s' % newTheme)
sublimeSettings.set('theme', newTheme)
sublimeSettingsChanged = True
if sublimeSettingsChanged:
sublime.save_settings('Preferences.sublime-settings')
def start(self):
self.settings = Settings(onChange=self.cycle)
self.run()
def run(self):
if not self.halt:
sublime.set_timeout(self.run, 60000) # check time every minute
self.cycle()
def stop(self):
self.halt = True
if 'sunCycle' in globals():
globals()['sunCycle'].stop()
sunCycle = SunCycle()
|
Python
| 0.977404
|
@@ -94,16 +94,73 @@
rt Sun%0A%0A
+def logToConsole(str):%0A print(__name__ + ': ' + str)%0A%0A
class Se
@@ -286,24 +286,93 @@
elf.load()%0A%0A
+ def getTimeZone(self, lat, lon):%0A return LocalTimezone()%0A%0A
def load
@@ -841,23 +841,14 @@
-self.sun = Sun(
+lat =
sett
@@ -870,17 +870,30 @@
ude', 0)
-,
+%0A lon =
setting
@@ -917,35 +917,149 @@
, 0)
-)%0A print('SunCycle:
+%0A self.sun = Sun(lat, lon)%0A self.tz = self.getTimeZone(lat, lon)%0A%0A now = datetime.now(tz=self.tz)%0A logToConsole('
sunr
@@ -1088,16 +1088,19 @@
sunrise(
+now
))%0A
@@ -1102,33 +1102,30 @@
-print('SunCycle:
+logToConsole('
sunset a
@@ -1148,16 +1148,19 @@
.sunset(
+now
))%0A%0A
@@ -1524,31 +1524,32 @@
.now(tz=
-LocalTimezone()
+self.settings.tz
)%0A
@@ -2071,33 +2071,30 @@
-print('SunCycle:
+logToConsole('
switchin
@@ -2248,21 +2248,9 @@
rue%0A
-
%0A
+
@@ -2366,25 +2366,22 @@
-print('SunCycle:
+logToConsole('
swit
@@ -2899,16 +2899,41 @@
= True%0A%0A
+# stop previous instance%0A
if 'sunC
@@ -2986,16 +2986,30 @@
stop()%0A%0A
+# start cycle%0A
sunCycle
|
e710200d6b589fb149e0dcadf84513c0bfd9382c
|
Fix some default field values
|
discovery/domain/apis.py
|
discovery/domain/apis.py
|
# -*- coding: utf-8 -*-
"""
discovery.domain.apis.py
~~~~~~~~~~~~~~~~~~~~~~~~
'apis' resource and schema settings.
:copyright: (c) 2015 by Nicola Iarocci and CIR2000.
:license: BSD, see LICENSE for more details.
"""
_schema = {
'name': {
'type': 'string',
'required': True,
'unique': True
},
'title': {
'type': 'string',
},
'description': {
'type': 'string',
},
'owner': {
'type': 'dict',
'schema': {
'name': {'type': 'string', 'required': True},
'uri': {'type': 'string'},
'contact': {'type': 'string'}
}
},
'kind': {
'type': 'string',
'required': True,
'allowed': ['Authentication', 'Discovery', 'UserData']
},
'services': {
'type': 'list',
'required': True,
'schema': {
'type': 'dict',
'schema': {
'base_address': {
'type': 'string',
'required': True
},
'status': {
'type': 'string',
'default': 'Undetermined',
'allowed': ['Active', 'Inactive', 'Suspended',
'Undetermined']
},
'documentation': {'type': 'string'},
'version': {
'type': 'dict',
'default': {'major': 0, 'minor': 0, 'build': 1},
'schema': {
'major': {'type': 'integer', 'required': True},
'minor': {'type': 'integer', 'default': 0},
'build': {'type': 'integer', 'default': 0}
}
},
'deprecated': {'type': 'boolean', 'default': False},
'discovery': {'type': 'string'},
'authentication': {
'type': 'string',
'required': True,
'allowed': ['BearerToken', 'Basic', 'None'],
}
}
}
}
}
url = 'apis'
definition = {
'url': url,
'schema': _schema,
}
|
Python
| 0.000018
|
@@ -1490,17 +1490,17 @@
build':
-1
+0
%7D,%0A
@@ -1983,39 +1983,40 @@
'
-required': True
+default': 'None'
,%0A
|
d3847357c446c4a1ac50735b983b20cf57f9c7c6
|
Fix args and return of CounterController functions
|
malcolm/controllers/countercontroller.py
|
malcolm/controllers/countercontroller.py
|
from malcolm.core.controller import Controller
from malcolm.core.attribute import Attribute
from malcolm.core.numbermeta import NumberMeta
from malcolm.core.method import takes
import numpy as np
class CounterController(Controller):
def create_attributes(self):
self.counter = Attribute(NumberMeta("counter", "A counter", np.int32))
self.counter.set_put_function(self.counter.set_value)
self.counter.set_value(0)
yield self.counter
@takes()
def reset(self):
self.counter.set_value(0)
@takes()
def increment(self):
self.counter.set_value(self.counter.value + 1)
|
Python
| 0
|
@@ -169,16 +169,25 @@
rt takes
+, returns
%0A%0Aimport
@@ -479,32 +479,47 @@
r%0A%0A @takes()%0A
+ @returns()%0A
def reset(se
@@ -516,24 +516,35 @@
f reset(self
+, args=None
):%0A s
@@ -567,16 +567,34 @@
value(0)
+%0A return %7B%7D
%0A%0A @t
@@ -596,24 +596,39 @@
@takes()%0A
+ @returns()%0A
def incr
@@ -633,24 +633,35 @@
crement(self
+, args=None
):%0A s
@@ -706,8 +706,26 @@
ue + 1)%0A
+ return %7B%7D%0A
|
270af43ffbe8974698d17ff6d5cae20fbf410f73
|
Add url enter delete element on riak
|
admin/urls.py
|
admin/urls.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .views import CubeHandler, ConnectionHandler
from .views import ElementHandler, DashboardHandler, APIElementCubeHandler
INCLUDE_URLS = [
(r"/admin/connection/?(?P<slug>[\w-]+)?", ConnectionHandler),
(r"/admin/cube/?(?P<slug>[\w-]+)?", CubeHandler),
(r"/admin/api/element/cube/?(?P<slug>[\w-]+)?", APIElementCubeHandler),
(r"/admin/element/?(?P<slug>[\w-]+)?", ElementHandler),
(r"/admin/dashboard/?(?P<slug>[\w-]+)?", DashboardHandler),
]
|
Python
| 0
|
@@ -88,16 +88,31 @@
nHandler
+, DeleteHandler
%0Afrom .v
@@ -198,16 +198,91 @@
RLS = %5B%0A
+ (r%22/admin/delete/(?P%3Cbucket%3E%5B%5Cw-%5D+)/(?P%3Cslug%3E%5B%5Cw-%5D+)%22, DeleteHandler),%0A
(r%22/
|
c08222ee89916d46d23310ca28ac32f6e69fce37
|
Fix adding software to agent
|
pyfarm/master/user_interface/agents.py
|
pyfarm/master/user_interface/agents.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment GmbH & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from httplib import BAD_REQUEST, NOT_FOUND, SEE_OTHER
except ImportError: # pragma: no cover
from http.client import BAD_REQUEST, NOT_FOUND, SEE_OTHER
from flask import render_template, request, url_for, redirect, flash
from sqlalchemy import or_
from pyfarm.core.enums import WorkState
from pyfarm.models.agent import Agent
from pyfarm.models.tag import Tag
from pyfarm.models.task import Task
from pyfarm.models.software import Software, SoftwareVersion
from pyfarm.master.application import db
def agents():
agents_query = Agent.query
filters = {}
if "tags" in request.args:
filters["tags"] = request.args.get("tags")
tags = request.args.get("tags").split(",")
tags = [x for x in tags if not x == ""]
if tags:
agents_query = agents_query.filter(Agent.tags.any(Tag.tag.in_(tags)))
if "state" in request.args:
state = request.args.get("state")
filters["state"] = state
# TODO Use the actual AgentState enum here
if state not in ["online", "offline", "disabled", "running", ""]:
return (render_template(
"pyfarm/error.html", error="unknown state"), BAD_REQUEST)
if state != "":
agents_query = agents_query.filter(Agent.state == state)
if "hostname" in request.args:
hostname = request.args.get("hostname")
filters["hostname"] = hostname
if hostname != "":
agents_query = agents_query.filter(
Agent.hostname.ilike("%%%s%%" % hostname))
order_dir = "asc"
order_by = "hostname"
if "order_by" in request.args:
order_by = request.args.get("order_by")
if order_by not in ["hostname", "remote_ip", "state"]:
return (render_template(
"pyfarm/error.html", error="unknown order key"), BAD_REQUEST)
if "order_dir" in request.args:
order_dir = request.args.get("order_dir")
if order_dir not in ["asc", "desc"]:
return (render_template(
"pyfarm/error.html", error="unknown order dir"), BAD_REQUEST)
agents_query = agents_query.order_by("%s %s" % (order_by, order_dir))
agents = agents_query.all()
return render_template("pyfarm/user_interface/agents.html",
agents=agents, filters=filters, order_by=order_by,
order_dir=order_dir,
order={"order_by": order_by, "order_dir": order_dir})
def single_agent(agent_id):
agent = Agent.query.filter_by(id=agent_id).first()
if not agent:
return (render_template(
"pyfarm/error.html", error="Agent %s not found" % agent_id),
NOT_FOUND)
tasks = Task.query.filter(Task.agent == agent,
or_(Task.state == None,
Task.state == WorkState.RUNNING)).\
order_by(Task.job_id, Task.frame)
return render_template("pyfarm/user_interface/agent.html", agent=agent,
tasks=tasks, software_items=Software.query)
def delete_single_agent(agent_id):
agent = Agent.query.filter_by(id=agent_id).first()
if not agent:
return (render_template(
"pyfarm/error.html", error="Agent %s not found" % agent_id),
NOT_FOUND)
db.session.delete(agent)
db.session.commit()
flash("Agent %s has been deleted" % agent.hostname)
return redirect(url_for("agents_index_ui"), SEE_OTHER)
def agent_add_software(agent_id):
agent = Agent.query.filter_by(id=agent_id).first()
if not agent:
return (render_template(
"pyfarm/error.html", error="Agent %s not found" % agent_id),
NOT_FOUND)
software = Software.query.filter_by(id=int(request.form["software"])).first()
if not software:
return (render_template(
"pyfarm/error.html", error="Software %s not found" %
request.form["software"]), NOT_FOUND)
version = SoftwareVersion.query.filter_by(
id=int(request.form["software"]), software=software).first()
if not version:
return (render_template(
"pyfarm/error.html", error="Software version %s not found" %
request.form["version"]), NOT_FOUND)
agent.software_versions.append(version)
db.session.add(agent)
db.session.add(version)
db.session.commit()
flash("Software %s %s has been added to agent %s" %
(software.software, version.version, agent.hostname))
return redirect(url_for("single_agent_ui", agent_id=agent.id), SEE_OTHER)
def agent_delete_software(agent_id, version_id):
agent = Agent.query.filter_by(id=agent_id).first()
if not agent:
return (render_template(
"pyfarm/error.html", error="Agent %s not found" % agent_id),
NOT_FOUND)
version = SoftwareVersion.query.filter_by(id=version_id).first()
if not version:
return (render_template(
"pyfarm/error.html", error="Software version %s not found" %
version_id), NOT_FOUND)
agent.software_versions.remove(version)
db.session.add(agent)
db.session.commit()
flash("Software %s %s removed from agent %s" %
(version.software.software, version.version, agent.hostname))
return redirect(url_for("single_agent_ui", agent_id=agent.id), SEE_OTHER)
|
Python
| 0.000005
|
@@ -4804,32 +4804,31 @@
quest.form%5B%22
-software
+version
%22%5D), softwar
|
c4fadf89161e99514037e8af7953fca0ab13b28e
|
Fix import.
|
pymatgen/symmetry/tests/test_groups.py
|
pymatgen/symmetry/tests/test_groups.py
|
#!/usr/bin/env python
"""
TODO: Modify unittest doc.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "ongsp@ucsd.edu"
__date__ = "4/10/14"
import unittest
import numpy as np
from symmetry.groups import PointGroup, SpaceGroup
class PointGroupTest(unittest.TestCase):
def test_order(self):
order = {"mmm": 8, "432": 24, "-6m2": 12}
for k, v in order.items():
pg = PointGroup(k)
self.assertEqual(order[k], len(pg.symmetry_ops))
class SpaceGroupTest(unittest.TestCase):
def test_order_symm_ops(self):
for name in SpaceGroup.SG_SYMBOLS:
sg = SpaceGroup(name)
self.assertEqual(len(sg.symmetry_ops), sg.order)
def test_crystal_system(self):
sg = SpaceGroup("R-3c")
self.assertEqual(sg.crystal_system, "Trigonal")
sg = SpaceGroup("R-3cH")
self.assertEqual(sg.crystal_system, "Trigonal")
def test_get_orbit(self):
sg = SpaceGroup("Fm-3m")
p = np.random.random_integers(0, 100, size=(3,))
p /= 100
self.assertLessEqual(len(sg.get_orbit(p)), sg.order)
if __name__ == '__main__':
unittest.main()
|
Python
| 0.999591
|
@@ -321,16 +321,25 @@
p%0A%0Afrom
+pymatgen.
symmetry
|
48e589b200894121f32bd96b39f29ad5c0120991
|
add test_delete_task_id_not_integer
|
tests/test_agent/test_http_api_tasks.py
|
tests/test_agent/test_http_api_tasks.py
|
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import uuid
from json import dumps
try:
from httplib import OK
except ImportError: # pragma: no cover
from http.client import OK
from json import loads
from datetime import datetime
from twisted.web.server import NOT_DONE_YET
from pyfarm.agent.config import config
from pyfarm.agent.testutil import BaseAPITestCase
from pyfarm.agent.http.api.tasks import Tasks
class TestGetTasks(BaseAPITestCase):
URI = "/tasks/"
CLASS = Tasks
def test_master_contacted(self):
try:
last_master_contact = config["last_master_contact"]
except KeyError:
last_master_contact = None
request = self.get(headers={"User-Agent": config["master_user_agent"]})
tasks = Tasks()
tasks.render(request)
self.assertNotEqual(last_master_contact, config["last_master_contact"])
def test_returns_current_assignments(self):
# NOTE: current_assignments is improperly constructed here but we
# only care about the values.
config["current_assignments"] = {
"a": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 1}]},
"b": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 2}]},
"c": {u"tasks": [{u"id": unicode(uuid.uuid4()), u"frame": 3}]}
}
current_tasks = []
for item in config["current_assignments"].values():
current_tasks += item["tasks"]
request = self.get()
tasks = Tasks()
tasks.render(request)
self.assertEqual(request.written, [dumps(current_tasks)])
|
Python
| 0.000587
|
@@ -691,16 +691,29 @@
mport OK
+, BAD_REQUEST
%0Aexcept
@@ -779,107 +779,22 @@
t OK
-%0A%0Afrom json import loads%0Afrom datetime import datetime%0A%0Afrom twisted.web.server import NOT_DONE_YET
+, BAD_REQUEST%0A
%0A%0Afr
@@ -942,11 +942,8 @@
Test
-Get
Task
@@ -2111,12 +2111,399 @@
t_tasks)%5D)%0A%0A
+ def test_delete_task_id_not_integer(self):%0A request = self.delete(%0A uri=%5B%22aaa%22%5D,%0A headers=%7B%22User-Agent%22: config%5B%22master_user_agent%22%5D%7D)%0A%0A tasks = Tasks()%0A tasks.render(request)%0A self.assertEqual(%0A request.written, %5B'%7B%22error%22: %22Task id was not an integer%22%7D'%5D)%0A self.assertEqual(request.responseCode, BAD_REQUEST)%0A%0A%0A%0A%0A
|
222067de26b3c43e719896df27ced9dc2398655a
|
make sirepo default
|
sirepo/feature_config.py
|
sirepo/feature_config.py
|
# -*- coding: utf-8 -*-
u"""List of features available
:copyright: Copyright (c) 2016 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
# defer all imports so *_CODES is available to testing functions
#: Codes that depend on other codes. [x][0] depends on [x][1]
_DEPENDENT_CODES = [
['jspec', 'elegant'],
['controls', 'madx'],
]
#: Codes on prod
PROD_FOSS_CODES = frozenset((
'controls',
'elegant',
'jspec',
'madx',
'ml',
'opal',
'radia',
'shadow',
'srw',
'synergia',
'warppba',
'warpvnd',
'zgoubi',
))
#: Codes on dev, alpha, and beta
_NON_PROD_FOSS_CODES = frozenset((
'irad',
'myapp',
'rcscon',
'rs4pi',
'silas',
))
#: All possible open source codes
FOSS_CODES = PROD_FOSS_CODES.union(_NON_PROD_FOSS_CODES)
#: Configuration
_cfg = None
def auth_controlled_sim_types():
"""All sim types that require granted authentication to access
Returns:
frozenset: enabled sim types that require role
"""
return frozenset(
cfg().proprietary_sim_types.union(cfg().default_proprietary_sim_types),
)
def cfg():
"""global configuration
Returns:
dict: configurated features
"""
global _cfg
return _cfg or _init()
def for_sim_type(sim_type):
"""Get cfg for simulation type
Args:
sim_type (str): srw, warppba, etc.
Returns:
dict: application specific config
"""
import pykern.pkcollections
c = cfg()
return pykern.pkcollections.PKDict(
c[sim_type] if sim_type in c else {}
)
def _init():
from pykern import pkconfig
global _cfg
def b(msg, dev=False):
return (
pkconfig.channel_in('dev') if dev else pkconfig.channel_in_internal_test(),
bool,
msg,
)
_cfg = pkconfig.init(
# No secrets should be stored here (see sirepo.job.agent_env)
api_modules=((), set, 'optional api modules, e.g. status'),
default_proprietary_sim_types=(set(), set, 'codes where all users are authorized by default but that authorization can be revoked'),
jspec=dict(
derbenevskrinsky_force_formula=b('Include Derbenev-Skrinsky force formula'),
),
package_path=(
tuple(),
tuple,
'Names of root packages that should be checked for codes and resources. Order is important, the first package with a matching code/resource will be used. sirepo added automatically.',
),
proprietary_sim_types=(set(), set, 'codes that require authorization'),
#TODO(robnagler) make this a sim_type config like srw and warpvnd
rs4pi_dose_calc=(False, bool, 'run the real dose calculator'),
sim_types=(set(), set, 'simulation types (codes) to be imported'),
srw=dict(
app_url=('/en/xray-beamlines.html', str, 'URL for SRW link'),
beamline3d=b('Show 3D beamline plot'),
hide_guest_warning=b('Hide the guest warning in the UI', dev=True),
mask_in_toolbar=b('Show the mask element in toolbar'),
show_open_shadow=(pkconfig.channel_in_internal_test(), bool, 'Show "Open as a New Shadow Simulation" menu item'),
show_rsopt_ml=(pkconfig.channel_in_internal_test(), bool, 'Show "Export ML Script" menu item'),
),
warpvnd=dict(
allow_3d_mode=(True, bool, 'Include 3D features in the Warp VND UI'),
display_test_boxes=b('Display test boxes to visualize 3D -> 2D projections'),
),
)
i = _cfg.proprietary_sim_types.intersection(_cfg.default_proprietary_sim_types)
assert not i, \
f'{i}: cannot be in proprietary_sim_types and default_proprietary_sim_types'
s = set(
_cfg.sim_types or (
PROD_FOSS_CODES if pkconfig.channel_in('prod') else FOSS_CODES
)
)
s.update(_cfg.proprietary_sim_types, _cfg.default_proprietary_sim_types)
for v in _DEPENDENT_CODES:
if v[0] in s:
s.add(v[1])
_cfg.sim_types = frozenset(s)
_check_packages(_cfg.package_path)
return _cfg
def _check_packages(packages):
import importlib
for p in packages:
importlib.import_module(p)
|
Python
| 0.000009
|
@@ -2473,16 +2473,26 @@
tuple(
+%5B'sirepo'%5D
),%0A
|
c59c91200331c8981aa8bc95eccff1e418f5b332
|
Fix tests
|
tests/test_archives/test_serializers.py
|
tests/test_archives/test_serializers.py
|
import pytest
from api.archives.serializers import (
ArchivedBuildJobSerializer,
ArchivedExperimentSerializer,
ArchivedExperimentGroupSerializer,
ArchivedJobSerializer,
ArchivedProjectSerializer,
)
from api.build_jobs.serializers import BookmarkedBuildJobSerializer
from api.experiment_groups.serializers import BookmarkedExperimentGroupSerializer
from api.experiments.serializers import BookmarkedExperimentSerializer
from api.jobs.serializers import BookmarkedJobSerializer
from api.projects.serializers import BookmarkedProjectSerializer
from db.models.build_jobs import BuildJob
from db.models.experiment_groups import ExperimentGroup
from db.models.experiments import Experiment
from db.models.jobs import Job
from db.models.projects import Project
from factories.factory_build_jobs import BuildJobFactory
from factories.factory_experiment_groups import ExperimentGroupFactory
from factories.factory_experiments import ExperimentFactory
from factories.factory_jobs import JobFactory
from factories.factory_projects import ProjectFactory
from factories.factory_users import UserFactory
from tests.utils import BaseTest
@pytest.mark.archives_mark
class TestArchiveSerializer(BaseTest):
serializer_class = None
model_serializer_class = None
model_class = None
factory_class = None
def setUp(self):
super().setUp()
self.user = UserFactory()
self.obj1 = self.factory_class() # pylint:disable=not-callable
self.obj2 = self.factory_class() # pylint:disable=not-callable
def test_serialize_one(self):
data = self.serializer_class(self.obj1).data # pylint:disable=not-callable
assert 'deleted' in set(data.keys())
data.pop('deleted')
assert data == self.model_serializer_class(self.obj1).data # noqa
def test_serialize_many(self):
data = self.serializer_class(self.model_class.archived.all(), many=True).data # noqa
assert len(data) == 2
for d in data:
assert 'deleted' in set(d.keys())
@pytest.mark.archives_mark
class TestArchiveBuildJobSerializer(TestArchiveSerializer):
serializer_class = ArchivedBuildJobSerializer
model_serializer_class = BookmarkedBuildJobSerializer
model_class = BuildJob
factory_class = BuildJobFactory
@pytest.mark.archives_mark
class TestArchiveJobSerializer(TestArchiveSerializer):
serializer_class = ArchivedJobSerializer
model_serializer_class = BookmarkedJobSerializer
model_class = Job
factory_class = JobFactory
@pytest.mark.archives_mark
class TestArchiveExperimentSerializer(TestArchiveSerializer):
serializer_class = ArchivedExperimentSerializer
model_serializer_class = BookmarkedExperimentSerializer
model_class = Experiment
factory_class = ExperimentFactory
@pytest.mark.archives_mark
class TestArchiveExperimentGroupSerializer(TestArchiveSerializer):
serializer_class = ArchivedExperimentGroupSerializer
model_serializer_class = BookmarkedExperimentGroupSerializer
model_class = ExperimentGroup
factory_class = ExperimentGroupFactory
@pytest.mark.archives_mark
class TestArchiveProjectSerializer(TestArchiveSerializer):
serializer_class = ArchivedProjectSerializer
model_serializer_class = BookmarkedProjectSerializer
model_class = Project
factory_class = ProjectFactory
del TestArchiveSerializer
|
Python
| 0.000003
|
@@ -1426,32 +1426,44 @@
f.factory_class(
+deleted=True
) # pylint:disa
@@ -1518,16 +1518,28 @@
y_class(
+deleted=True
) # pyl
|
cb4752c102ddaa11be24b60081ce32e0c749fa73
|
Use actual dbus-python API to call GetHandleOwners in test-muc-ownership
|
tests/twisted/muc/test-muc-ownership.py
|
tests/twisted/muc/test-muc-ownership.py
|
"""
Test support for the HANDLE_OWNERS_NOT_AVAILABLE group flag, and calling
GetHandleOwners on MUC members.
By default, MUC channels should have the flag set. The flag should be unset
when presence is received that includes the MUC JID's owner JID.
"""
import dbus
from twisted.words.xish import domish, xpath
from gabbletest import go, make_result_iq, exec_test
from servicetest import call_async, lazy, match, tp_name_prefix, EventPattern
def test(q, bus, conn, stream):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged', args=[0, 1])
# Need to call this asynchronously as it involves Gabble sending us a
# query
call_async(q, conn, 'RequestHandles', 2, ['chat@conf.localhost'])
event = q.expect('stream-iq', to='conf.localhost',
query_ns='http://jabber.org/protocol/disco#info')
result = make_result_iq(stream, event.stanza)
feature = result.firstChildElement().addElement('feature')
feature['var'] = 'http://jabber.org/protocol/muc'
stream.send(result)
event = q.expect('dbus-return', method='RequestHandles')
room_handle = event.value[0][0]
call_async(q, conn, 'RequestChannel',
'org.freedesktop.Telepathy.Channel.Type.Text', 2, room_handle, True)
gfc, _, _ = q.expect_many(
EventPattern('dbus-signal', signal='GroupFlagsChanged'),
EventPattern('dbus-signal', signal='MembersChanged',
args=[u'', [], [], [], [2], 0, 0]),
EventPattern('stream-presence', to='chat@conf.localhost/test'))
assert gfc.args[1] == 0
event = q.expect('dbus-signal', signal='GroupFlagsChanged')
assert event.args == [0, 1]
# Send presence for anonymous other member of room.
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/bob'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'owner'
item['role'] = 'moderator'
stream.send(presence)
# Send presence for anonymous other member of room (2)
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/brian'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'owner'
item['role'] = 'moderator'
stream.send(presence)
# Send presence for nonymous other member of room.
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/che'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'none'
item['role'] = 'participant'
item['jid'] = 'che@foo.com'
stream.send(presence)
# Send presence for nonymous other member of room (2)
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/chris'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'none'
item['role'] = 'participant'
item['jid'] = 'chris@foo.com'
stream.send(presence)
# Send presence for own membership of room.
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/test'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'none'
item['role'] = 'participant'
stream.send(presence)
event = q.expect('dbus-signal', signal='GroupFlagsChanged')
# Since we received MUC presence that contains an owner JID, the
# OWNERS_NOT_AVAILABLE flag should be removed.
assert event.args == [0, 1024]
event = q.expect('dbus-signal', signal='HandleOwnersChanged',
args=[{2: 0, 3: 0, 4: 0, 5: 6, 7: 8}, []])
event = q.expect('dbus-signal', signal='MembersChanged',
args=[u'', [2, 3, 4, 5, 7], [], [], [], 0, 0])
assert conn.InspectHandles(1, [2]) == [
'chat@conf.localhost/test']
assert conn.InspectHandles(1, [3]) == [
'chat@conf.localhost/bob']
assert conn.InspectHandles(1, [4]) == [
'chat@conf.localhost/brian']
assert conn.InspectHandles(1, [5]) == [
'chat@conf.localhost/che']
assert conn.InspectHandles(1, [6]) == [
'che@foo.com']
assert conn.InspectHandles(1, [7]) == [
'chat@conf.localhost/chris']
assert conn.InspectHandles(1, [8]) == [
'chris@foo.com']
event = q.expect('dbus-return', method='RequestChannel')
# Check that GetHandleOwners works.
# FIXME: using non-API!
bus = conn._bus
chan = bus.get_object(conn._named_service, event.value[0])
group = dbus.Interface(chan,
'org.freedesktop.Telepathy.Channel.Interface.Group')
assert group.GetHandleOwners([5, 7]) == [6, 8]
conn.Disconnect()
q.expect('dbus-signal', signal='StatusChanged', args=[2, 1])
if __name__ == '__main__':
exec_test(test)
|
Python
| 0.000001
|
@@ -4577,51 +4577,31 @@
-# FIXME: using non-API!%0A bus = conn._bus
+bus = dbus.SessionBus()
%0A
@@ -4632,22 +4632,16 @@
onn.
+bus
_name
-d_service
, ev
|
7bdf0ba2ffa74d5a768274573171b11441179713
|
Add processLine() function
|
TwircBot.py
|
TwircBot.py
|
import socket
import sys
class TwircBot(object):
"""
Basic Bot class that reads in a config file, connects to chat rooms,
and logs the results.
"""
def __init__(self, config_file_name):
"""Parse the configuration file to retrieve the config parameters """
self.host='irc.twitch.tv'
self.port=6667
config_file = open(config_file_name,"r")
for line in config_file:
words = line.split()
if words[0] == "oauth:":
self.oauth = line.split()[1]
elif words[0] == "nick:":
self.nick = line.split()[1]
elif words[0] == "channels:":
self.channel_list = line.split()[1:]
elif words[0] == "log:":
self.log_file_name = line.split()[1]
config_file.close()
def connect(self):
"""Connect to twitch chat"""
user_string = bytes('USER ' + self.nick + ' \r\n', 'utf-8')
nick_string = bytes('NICK ' + self.nick + ' \r\n', 'utf-8')
oauth_string = bytes('PASS oauth:' + self.oauth + ' \r\n', 'utf-8')
cap_req_string = bytes('CAP REQ :twitch.tv/membership \r\n', 'utf-8')
irc = socket.socket()
irc.connect((self.host, self.port))
irc.send(user_string)
irc.send(oauth_string)
irc.send(nick_string)
irc.send(cap_req_string)
for channels in self.channel_list:
channel_string = bytes('JOIN #' + channels + ' \r\n', 'utf-8')
irc.send(channel_string)
while True:
data = irc.recv(4096)
if data:
print(data.decode('utf-8'))
log_file = open(self.log_file_name,"a")
log_file.write(data.decode('utf-8'))
log_file.close()
def print_config(self):
"""
Prints a string that contains all the configuration variables
for a given TwircBot instance.
"""
config_string = "\n***** TwircBot config *****\n"
config_string += "Connecting to " + self.nick + "@"
config_string += self.host + ":" + str(self.port) + "\n"
config_string += "Channels: "
for channels in self.channel_list:
config_string += str(channels) + ", "
config_string = config_string[:-2] #Remove last comma and space
config_string += "\nLog file: " + self.log_file_name
config_string += "\n***** TwircBot config *****\n"
print(config_string)
|
Python
| 0.000015
|
@@ -277,24 +277,59 @@
ameters %22%22%22%0A
+ self.irc = socket.socket()%0A
self
@@ -864,32 +864,505 @@
g_file.close()%0A%0A
+ def processLine(self, line):%0A words = line.split()%0A if words%5B0%5D == 'PING': %0A self.irc.send(bytes('PONG :tmi.twitch.tv%5Cr%5Cn', 'utf-8'))%0A print('I have received a ping!!')%0A irc.send(bytes('PRIVMSG #' + self.nick + ' :I received a PING!!%5Cr%5Cn', 'utf-8'));%0A for word in words:%0A if word == bytes('smart','utf-8'):%0A self.irc.send(bytes('PRIVMSG #' + self.nick + ' :You are smart%5Cr%5Cn', 'utf-8'));%0A%0A
%0A def connect
@@ -1709,38 +1709,13 @@
-irc = socket.socket()%0A
+self.
irc.
@@ -1746,32 +1746,37 @@
.port))%0A
+self.
irc.send(user_st
@@ -1782,32 +1782,37 @@
tring) %0A
+self.
irc.send(oauth_s
@@ -1819,32 +1819,37 @@
tring) %0A
+self.
irc.send(nick_st
@@ -1855,32 +1855,37 @@
tring) %0A
+self.
irc.send(cap_req
@@ -2017,32 +2017,37 @@
8')%0A
+self.
irc.send(channel
@@ -2097,16 +2097,21 @@
data =
+self.
irc.recv
@@ -2324,16 +2324,55 @@
close()%0A
+ self.processLine(data)%0A
%0A%0A de
@@ -3066,8 +3066,9 @@
string)%0A
+%0A
|
4d04a9f96c994e4718a036f9c984231bf350918a
|
Optimize single term search
|
TxtIndex.py
|
TxtIndex.py
|
from TxtReader import TxtReader
from StopWords import StopWords
import re
import string
class TxtIndex:
def __init__(self, fh):
self.stop_words = StopWords()
self.__fh = fh
self.__reader = TxtReader(fh)
self.build_index()
def build_index(self):
self.keyword2pointers = {}
self.__reader.seek(0)
while True:
word = self.__reader.nextWord()
if word == None:
break
word = self.norm_word(word)
if self.stop_words.is_stop_word(word):
continue
if word not in self.keyword2pointers:
self.keyword2pointers[word] = []
self.keyword2pointers[word].append(self.__reader.startPointer())
def norm_word(self, word):
word = word.lower()
word = word.rstrip(string.punctuation)
return word
def norm_phrase(self, phrase):
phrase = re.sub('\s+', ' ', phrase)
return ' '.join([self.norm_word(x) for x in phrase.split(' ')])
def get_pointers(self, word):
return self.keyword2pointers.get(self.norm_word(word))
def exact_search(self, phrase):
phrase = self.norm_phrase(phrase)
words = phrase.split(' ')
try_word = None
try_word_idx = None
try_word_pointers = []
for i in range(len(words)):
word = words[i]
if self.stop_words.is_stop_word(word):
continue
pointers = self.get_pointers(word)
if pointers == None:
return None
if try_word == None or len(try_word_pointers) > len(pointers):
try_word = word
try_word_idx = i
try_word_pointers = pointers
extend_left_by = len(' '.join(words[0:i])) + len(words[0:i])
extend_right_by = len(' '.join(words[i:])) + len(words[i:])
phrase_re = re.compile(phrase.replace(' ', '\s+'), re.I)
found = []
for pointer in try_word_pointers:
s = pointer - extend_left_by
l = extend_left_by + extend_right_by
if s < 0:
s = 0
self.__fh.seek(s)
excerpt = self.__fh.read(l)
m = phrase_re.search(excerpt)
if m:
found.append(s + m.start())
if len(found) > 0:
return found
else:
return None
|
Python
| 0.000653
|
@@ -1246,16 +1246,93 @@
it(' ')%0A
+%0A if len(words) == 1:%0A return self.get_pointers(words%5B0%5D)%0A%0A
|
ddcd57017fa9451e85fccf92ec716ae18f91467c
|
Set default model argument
|
examples/memnn/train_memnn.py
|
examples/memnn/train_memnn.py
|
#!/usr/bin/env python
import argparse
import collections
import chainer
from chainer.training import extensions
import babi
import memnn
def train(train_data_path, test_data_path, args):
vocab = collections.defaultdict(lambda: len(vocab))
vocab['<unk>'] = 0
train_data = babi.read_data(vocab, train_data_path)
test_data = babi.read_data(vocab, test_data_path)
print('Training data: %s: %d' % (train_data_path, len(train_data)))
print('Test data: %s: %d' % (test_data_path, len(test_data)))
train_data = memnn.convert_data(train_data, args.max_memory)
test_data = memnn.convert_data(test_data, args.max_memory)
encoder = memnn.make_encoder(args.sentence_repr)
network = memnn.MemNN(
args.unit, len(vocab), encoder, args.max_memory, args.hop)
model = chainer.links.Classifier(network, label_key='answer')
opt = chainer.optimizers.Adam()
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use()
model.to_gpu()
opt.setup(model)
train_iter = chainer.iterators.SerialIterator(
train_data, args.batchsize)
test_iter = chainer.iterators.SerialIterator(
test_data, args.batchsize, repeat=False, shuffle=False)
updater = chainer.training.StandardUpdater(
train_iter, opt, device=args.gpu)
trainer = chainer.training.Trainer(updater, (args.epoch, 'epoch'))
@chainer.training.make_extension()
def fix_ignore_label(trainer):
network.fix_ignore_label()
trainer.extend(fix_ignore_label)
trainer.extend(extensions.Evaluator(test_iter, model, device=args.gpu))
trainer.extend(extensions.LogReport())
trainer.extend(extensions.PrintReport(
['epoch', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy']))
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.run()
if args.model:
memnn.save_model(args.model, model, vocab)
def main():
parser = argparse.ArgumentParser(
description='Chainer example: End-to-end memory networks')
parser.add_argument('TRAIN_DATA',
help='Path to training data in bAbI dataset '
'(e.g. "qa1_single-supporting-fact_train.txt")')
parser.add_argument('TEST_DATA',
help='Path to test data in bAbI dataset '
'(e.g. "qa1_single-supporting-fact_test.txt")')
parser.add_argument('--model', '-m',
help='Model directory where it stors trained model')
parser.add_argument('--batchsize', '-b', type=int, default=100,
help='Number of images in each mini batch')
parser.add_argument('--epoch', '-e', type=int, default=100,
help='Number of sweeps over the dataset to train')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--unit', '-u', type=int, default=20,
help='Number of units')
parser.add_argument('--hop', '-H', type=int, default=3,
help='Number of hops')
parser.add_argument('--max-memory', type=int, default=50,
help='Maximum number of memory')
parser.add_argument('--sentence-repr',
choices=['bow', 'pe'], default='bow',
help='Sentence representation. '
'Select from BoW ("bow") or position encoding ("pe")')
args = parser.parse_args()
train(args.TRAIN_DATA, args.TEST_DATA, args)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -2459,16 +2459,33 @@
', '-m',
+ default='model',
%0A
@@ -2536,16 +2536,17 @@
it stor
+e
s traine
|
60504711a3685a0842d66cc5b9beac1c3f5fbf71
|
Fix duplicate COPY lines in Dockerfile
|
cage/container/handler.py
|
cage/container/handler.py
|
import os
import re
import urllib.request
from docker import Client
# TODO: Check if Docker daemon is running. Start it if it's not.
class DockerNotInstalledError(Exception):
pass
class ContainerHandler:
def __init__(self, cage_path, app_path):
self.__path = cage_path
self.__app_path = app_path
self.__name = os.path.basename(os.path.normpath(cage_path))
self.__client = Client(base_url='unix://var/run/docker.sock')
self.__container = None
self.__image_name = "cage/" + self.__name
@staticmethod
def get_python_versions():
# TODO: Download in a better place. Current dir does not seem like a good idea
manifest_url = "https://raw.githubusercontent.com/docker-library/official-images/master/library/python"
local_manifest_path = "manifest.txt"
if not os.path.exists(local_manifest_path):
response = urllib.request.urlopen(manifest_url)
data = response.read()
text = data.decode("utf-8")
with open(local_manifest_path, "w") as manifest_file:
manifest_file.write(text)
versions = None
with open(local_manifest_path, "r") as manifest_file:
versions = ContainerHandler.__parse_manifest_file(manifest_file)
return versions
@staticmethod
def __parse_manifest_file(manifest):
regex = re.compile("^Directory: (?P<version>\d.\d)$")
versions = []
for line in manifest:
match = regex.search(line)
if match is not None:
versions.append(match.group("version"))
return versions
@staticmethod
def check_docker_installed():
# TODO: Add check here
pass
def create_image(self, python_version=None):
if python_version is not None and not os.path.exists(os.path.join(self.__app_path, "Dockerfile")):
dockerfile_content = "FROM python:{}\n" \
"RUN mkdir -p /usr/src/app\n" \
"WORKDIR /usr/src/app\n" \
.format(python_version)
with open(os.path.join(self.__app_path, "Dockerfile"), "w") as dockerfile:
dockerfile.write(dockerfile_content)
response = self.__client.build(path=self.__app_path, tag=self.__image_name, rm=True)
return response
def start(self, command):
res = self.create_image()
for line in res:
print(line)
container = self.__client.create_container(self.__image_name, command=command)
self.__client.start(container)
return self.redirect_logs(container)
def add_files(self, path):
self.__write_to_dockerfile("COPY {} /usr/src/app\n".format(path))
def redirect_logs(self, container):
logs = self.__client.logs(container, stream=True)
return logs
def __write_to_dockerfile(self, line):
with open(os.path.join(self.__app_path, "Dockerfile"), "a+") as dockerfile:
if line not in dockerfile.read():
dockerfile.write(line)
|
Python
| 0.000107
|
@@ -2759,18 +2759,16 @@
/src/app
-%5Cn
%22.format
@@ -2775,16 +2775,16 @@
(path))%0A
+
%0A def
@@ -3006,10 +3006,9 @@
), %22
-a+
+r
%22) a
@@ -3037,40 +3037,193 @@
-if line not in dockerfile.read()
+dockerfile.seek(0)%0A line_exists = line in dockerfile.read()%0A%0A if not line_exists:%0A with open(os.path.join(self.__app_path, %22Dockerfile%22), %22a%22) as dockerfile
:%0A
@@ -3255,12 +3255,51 @@
write(line)%0A
+ dockerfile.write(%22%5Cn%22)%0A
|
6c3fffe4a30e4664d44418081f008c0b47537bc0
|
Add `noplot` option to MNIST example
|
examples/mnist/train_mnist.py
|
examples/mnist/train_mnist.py
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import chainer
import chainer.functions as F
import chainer.links as L
from chainer import training
from chainer.training import extensions
# Network definition
class MLP(chainer.Chain):
def __init__(self, n_units, n_out):
super(MLP, self).__init__()
with self.init_scope():
# the size of the inputs to each layer will be inferred
self.l1 = L.Linear(None, n_units) # n_in -> n_units
self.l2 = L.Linear(None, n_units) # n_units -> n_units
self.l3 = L.Linear(None, n_out) # n_units -> n_out
def __call__(self, x):
h1 = F.relu(self.l1(x))
h2 = F.relu(self.l2(h1))
return self.l3(h2)
def main():
parser = argparse.ArgumentParser(description='Chainer example: MNIST')
parser.add_argument('--batchsize', '-b', type=int, default=100,
help='Number of images in each mini-batch')
parser.add_argument('--epoch', '-e', type=int, default=20,
help='Number of sweeps over the dataset to train')
parser.add_argument('--frequency', '-f', type=int, default=-1,
help='Frequency of taking a snapshot')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--out', '-o', default='result',
help='Directory to output the result')
parser.add_argument('--resume', '-r', default='',
help='Resume the training from snapshot')
parser.add_argument('--unit', '-u', type=int, default=1000,
help='Number of units')
args = parser.parse_args()
print('GPU: {}'.format(args.gpu))
print('# unit: {}'.format(args.unit))
print('# Minibatch-size: {}'.format(args.batchsize))
print('# epoch: {}'.format(args.epoch))
print('')
# Set up a neural network to train
# Classifier reports softmax cross entropy loss and accuracy at every
# iteration, which will be used by the PrintReport extension below.
model = L.Classifier(MLP(args.unit, 10))
if args.gpu >= 0:
# Make a specified GPU current
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu() # Copy the model to the GPU
# Setup an optimizer
optimizer = chainer.optimizers.Adam()
optimizer.setup(model)
# Load the MNIST dataset
train, test = chainer.datasets.get_mnist()
train_iter = chainer.iterators.SerialIterator(train, args.batchsize)
test_iter = chainer.iterators.SerialIterator(test, args.batchsize,
repeat=False, shuffle=False)
# Set up a trainer
updater = training.StandardUpdater(train_iter, optimizer, device=args.gpu)
trainer = training.Trainer(updater, (args.epoch, 'epoch'), out=args.out)
# Evaluate the model with the test dataset for each epoch
trainer.extend(extensions.Evaluator(test_iter, model, device=args.gpu))
# Dump a computational graph from 'loss' variable at the first iteration
# The "main" refers to the target link of the "main" optimizer.
trainer.extend(extensions.dump_graph('main/loss'))
# Take a snapshot for each specified epoch
frequency = args.epoch if args.frequency == -1 else max(1, args.frequency)
trainer.extend(extensions.snapshot(), trigger=(frequency, 'epoch'))
# Write a log of evaluation statistics for each epoch
trainer.extend(extensions.LogReport())
# Save two plot images to the result dir
if extensions.PlotReport.available():
trainer.extend(
extensions.PlotReport(['main/loss', 'validation/main/loss'],
'epoch', file_name='loss.png'))
trainer.extend(
extensions.PlotReport(
['main/accuracy', 'validation/main/accuracy'],
'epoch', file_name='accuracy.png'))
# Print selected entries of the log to stdout
# Here "main" refers to the target link of the "main" optimizer again, and
# "validation" refers to the default name of the Evaluator extension.
# Entries other than 'epoch' are reported by the Classifier link, called by
# either the updater or the evaluator.
trainer.extend(extensions.PrintReport(
['epoch', 'main/loss', 'validation/main/loss',
'main/accuracy', 'validation/main/accuracy', 'elapsed_time']))
# Print a progress bar to stdout
trainer.extend(extensions.ProgressBar())
if args.resume:
# Resume from a snapshot
chainer.serializers.load_npz(args.resume, trainer)
# Run the training
trainer.run()
if __name__ == '__main__':
main()
|
Python
| 0.000005
|
@@ -1731,16 +1731,148 @@
units')%0A
+ parser.add_argument('--noplot', dest='plot', action='store_false',%0A help='Disable PlotReport extension')%0A
args
@@ -3759,16 +3759,30 @@
%0A if
+args.plot and
extensio
|
a639d3313d5a4d3134461fd51376777c4898d4cc
|
make the exception catching generic =(
|
examples/n2hp_cube_example.py
|
examples/n2hp_cube_example.py
|
import astropy
import pyspeckit
import os
import astropy.units as u
import warnings
import socket
from astropy import wcs
if not os.path.exists('n2hp_cube.fit'):
import astropy.utils.data as aud
from astropy.io import fits
try:
f = aud.download_file('ftp://cdsarc.u-strasbg.fr/pub/cats/J/A%2BA/472/519/fits/opha_n2h.fit')
except socket.timeout as ex:
# travis-ci can't handle ftp:
# https://blog.travis-ci.com/2018-07-23-the-tale-of-ftp-at-travis-ci
print("Failed to download from ftp. Exception was: {0}".format(ex))
f = aud.download_file('http://cdsarc.u-strasbg.fr/ftp/cats/J/A+A/472/519/fits/opha_n2h.fit')
with fits.open(f) as ff:
ff[0].header['CUNIT3'] = 'm/s'
for kw in ['CTYPE4','CRVAL4','CDELT4','CRPIX4','CROTA4']:
if kw in ff[0].header:
del ff[0].header[kw]
ff.writeto('n2hp_cube.fit')
# Load the spectral cube cropped in the middle for efficiency
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=wcs.FITSFixedWarning)
spc = pyspeckit.Cube('n2hp_cube.fit')[:,25:28,12:15]
# Set the velocity convention: in the future, this may be read directly from
# the file, but for now it cannot be.
spc.xarr.refX = 93176265000.0*u.Hz
spc.xarr.velocity_convention = 'radio'
spc.xarr.convert_to_unit('km/s')
# Register the fitter
# The N2H+ fitter is 'built-in' but is not registered by default; this example
# shows how to register a fitting procedure
# 'multi' indicates that it is possible to fit multiple components and a
# background will not automatically be fit 4 is the number of parameters in the
# model (excitation temperature, optical depth, line center, and line width)
spc.Registry.add_fitter('n2hp_vtau', pyspeckit.models.n2hp.n2hp_vtau_fitter, 4)
# Get a measurement of the error per pixel
errmap = spc.slice(20, 28, unit='km/s').cube.std(axis=0)
# A good way to write a cube fitter is to have it load from disk if the cube
# fit was completed successfully in the past
if os.path.exists('n2hp_fitted_parameters.fits'):
spc.load_model_fit('n2hp_fitted_parameters.fits', npars=4, npeaks=1)
else:
# Run the fitter
# Estimated time to completion ~ 2 minutes
spc.fiteach(fittype='n2hp_vtau',
guesses=[5,0.5,3,1], # Tex=5K, tau=0.5, v_center=12, width=1 km/s
signal_cut=3, # minimize the # of pixels fit for the example
start_from_point=(2,2), # start at a pixel with signal
errmap=errmap,
)
# There are a huge number of parameters for the fiteach procedure. See:
# http://pyspeckit.readthedocs.org/en/latest/example_nh3_cube.html
# http://pyspeckit.readthedocs.org/en/latest/cubes.html?highlight=fiteach#pyspeckit.cubes.SpectralCube.Cube.fiteach
#
# Unfortunately, a complete tutorial on this stuff is on the to-do list;
# right now the use of many of these parameters is at a research level.
# However, pyspeckit@gmail.com will support them! They are being used
# in current and pending publications
# Save the fitted parameters to a FITS file, and overwrite one if one exists
spc.write_fit('n2hp_fitted_parameters.fits', overwrite=True)
# Show an integrated image
spc.mapplot()
# you can click on any pixel to see its spectrum & fit
# plot one of the fitted spectra
spc.plot_spectrum(2, 2, plot_fit=True)
# spc.parcube[:,27,14] = [ 14.82569198, 1.77055642, 3.15740051, 0.16035407]
# Note that the optical depth is the "total" optical depth, which is
# distributed among 15 hyperfine components. You can see this in
# pyspeckit.spectrum.models.n2hp.line_strength_dict
# As a sanity check, you can see that the brightest line has 0.259 of the total
# optical depth, so the peak line brightness is:
# (14.825-2.73) * (1-np.exp(-1.77 * 0.259)) = 4.45
# which matches the peak of 4.67 pretty well
# Show an image of the best-fit velocity
spc.mapplot.plane = spc.parcube[2,:,:]
spc.mapplot(estimator=None)
# running in script mode, the figures won't show by default on some systems
# import pylab as pl
# pl.draw()
# pl.show()
|
Python
| 0.000001
|
@@ -81,22 +81,8 @@
ngs%0A
-import socket%0A
from
@@ -333,16 +333,115 @@
except
+ Exception as ex:%0A # this might be any number of different timeout errors (urllib2.URLError,
socket.
@@ -447,23 +447,22 @@
.timeout
- as ex:
+, etc)
%0A
|
49ac27c3863f1dc372ae0e44abaf5398205aeb91
|
add subhead
|
boxpub/__init__.py
|
boxpub/__init__.py
|
import logging
import config
from datetime import datetime
import jinja2
import markdown
import re
import dropbox
from dropbox import client, session
# from dropbox.rest import ErrorResponse
from werkzeug.routing import BaseConverter
from flask import Flask, request
from postutils import split_markdown, process_markdown
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s %(module)s.%(funcName)s (%(lineno)d) %(levelname)s: %(message)s'
)
log = logging.getLogger('boxpub')
boxpub = Flask('boxpub')
boxpub.debug = True
def render_template(template_string, context):
template_globals = {
'HOST': request.host,
# 'PAGE_URL_FULL': request.path_url,
'QUERY_STRING': request.query_string,
'URL': request.url,
'PATH': request.path,
'settings': config,
'config': config,
'site': {
'title': 'monkinetic',
'subhead': '',
'description': '',
'url': '',
'time': datetime.now(),
},
}
template_globals.update(context)
jinja_environment = jinja2.Environment(
extensions=['jinja2.ext.autoescape'])
template = jinja_environment.from_string(template_string)
resp_body = template.render(template_globals)
return resp_body
def render_file_with_template(target_file, target_template):
"""
"""
client = dropbox.client.DropboxClient(config.DROPBOX_PRIVATE_TOKEN)
file_response, dropbox_meta = client.get_file_and_metadata(
target_file)
file_content = file_response.read()
f = process_markdown(
target_file, file_content)
log.debug(f)
if 'meta' in f:
fmeta = f['meta']
fmeta.update(dropbox_meta)
if 'Title' in fmeta:
fmeta['title'] = fmeta['Title']
f['meta'] = fmeta
else:
f['meta'] = dropbox_meta
# data['published'] = data['modified']
# data['created'] = data['modified']
# fix title
f.update(f['meta'])
if 'Title' in f:
f['title'] = f['Title']
# permalink
f['permalink'] = url_for_path(f['path'])
template_response, meta = client.get_file_and_metadata(
'templates/%s' % target_template)
template_content = template_response.read()
page_content = render_template(template_content, {
'page': f,
'post': f,
})
return page_content
def url_for_path(path):
if 'posts' in path:
year, month, day, filename = re.match(
'/posts/([\d]{4})-([\d]{2})-([\d]{2})-([\w-]+)\.md',
path).groups()
return "/%s/%s/%s/%s" % (year, month, day, filename)
elif 'page' in path:
filename = re.match(
'/pages/([\w-]+)\.md',
path).group(1)
return "/pages/%s" % filename
class RegexConverter(BaseConverter):
def __init__(self, url_map, *items):
super(RegexConverter, self).__init__(url_map)
self.regex = items[0]
boxpub.url_map.converters['regex'] = RegexConverter
############################################################
# web handlers
#
@boxpub.route('/webhooks/dropbox', methods=['GET'])
def dropbox_webhook_verify():
log.info('Dropbox verification request')
return request.args.get('challenge')
@boxpub.route('/webhooks/dropbox', methods=['POST'])
def dropbox_webhook_handle():
log.info('Dropbox post request')
return ""
@boxpub.route('/')
def blog_index_handle(template='index.html'):
log.debug('blog_index_handle()')
target_file = "posts"
client = dropbox.client.DropboxClient(config.DROPBOX_PRIVATE_TOKEN)
dropbox_response = client.metadata(
target_file, list=True)
files = dropbox_response['contents']
files = sorted(
files,
key=lambda f: f['path'],
reverse=True)
files = files[:10]
log.debug(files)
for f in files:
log.debug(f['path'])
file_response, dropbox_meta = client.get_file_and_metadata(
f['path'])
f.update(dropbox_meta)
log.debug(f['path'])
file_content = file_response.read()
fdata = process_markdown(
target_file, file_content)
log.debug(fdata)
f.update(fdata)
log.debug(f['path'])
# fix title
f.update(f['meta'])
if 'Title' in f:
f['title'] = f['Title']
# permalink
f['permalink'] = url_for_path(f['path'])
log.debug(f)
# log.debug(files)
template_response, meta = client.get_file_and_metadata(
'templates/%s' % template)
template_content = template_response.read()
page_content = render_template(template_content, {
'posts': files,
})
return page_content
@boxpub.route('/page/<page>')
def blog_page_handle(page, template='post.html'):
log.debug('blog_page_handle()')
target_file = "/pages/%s.md" % (page)
page_content = render_file_with_template(target_file, template)
return page_content
@boxpub.route('/<regex("[\d]{4}"):year>/<regex("[\d]{2}"):month>/<regex("[\d]{2}"):day>/<filename>')
def blog_post_handle(year, month, day, filename, template='post.html'):
log.debug('blog_post_handle()')
log.info('Dropbox post request')
target_file = "/posts/%s-%s-%s-%s.md" % (year, month, day, filename)
page_content = render_file_with_template(target_file, template)
return page_content
if __name__ == "__main__":
boxpub.run()
|
Python
| 0.00018
|
@@ -923,24 +923,51 @@
'subhead': '
+Steve Ivy%5C's weblog, XI Ed.
',%0A
@@ -1004,24 +1004,45 @@
'url': '
+http://monkinetic.com
',%0A
|
cdc99912ef99718d587aa21dd1b55b230ff8745b
|
Thinking AboutWithStatements: DONE
|
python2/koans/about_with_statements.py
|
python2/koans/about_with_statements.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutSandwichCode in the Ruby Koans
#
from runner.koan import *
import re # For regular expression string comparisons
class AboutWithStatements(Koan):
def count_lines(self, file_name):
try:
f = open(file_name)
try:
count = 0
for line in f.readlines():
count += 1
return count
finally:
f.close()
except IOError:
# should never happen
self.fail()
def test_counting_lines(self):
self.assertEqual(__, self.count_lines("example_file.txt"))
# ------------------------------------------------------------------
def find_line(self, file_name):
try:
f = open(file_name)
try:
for line in f.readlines():
match = re.search('e', line)
if match:
return line
finally:
f.close()
except IOError:
# should never happen
self.fail()
def test_finding_lines(self):
self.assertEqual(__, self.find_line("example_file.txt"))
## ------------------------------------------------------------------
## THINK ABOUT IT:
##
## The count_lines and find_line are similar, and yet different.
## They both follow the pattern of "sandwich code".
##
## Sandwich code is code that comes in three parts: (1) the top slice
## of bread, (2) the meat, and (3) the bottom slice of bread.
## The bread part of the sandwich almost always goes together, but
## the meat part changes all the time.
##
## Because the changing part of the sandwich code is in the middle,
## abstracting the top and bottom bread slices to a library can be
## difficult in many languages.
##
## (Aside for C++ programmers: The idiom of capturing allocated
## pointers in a smart pointer constructor is an attempt to deal with
## the problem of sandwich code for resource allocation.)
##
## Python solves the problem using Context Managers. Consider the
## following code:
##
class FileContextManager():
def __init__(self, file_name):
self._file_name = file_name
self._file = None
def __enter__(self):
self._file = open(self._file_name)
return self._file
def __exit__(self, cls, value, tb):
self._file.close()
# Now we write:
def count_lines2(self, file_name):
with self.FileContextManager(file_name) as f:
count = 0
for line in f.readlines():
count += 1
return count
def test_counting_lines2(self):
self.assertEqual(__, self.count_lines2("example_file.txt"))
# ------------------------------------------------------------------
def find_line2(self, file_name):
# Rewrite find_line using the Context Manager.
pass
def test_finding_lines2(self):
self.assertEqual(__, self.find_line2("example_file.txt"))
self.assertNotEqual(None, self.find_line2("example_file.txt"))
# ------------------------------------------------------------------
def count_lines3(self, file_name):
with open(file_name) as f:
count = 0
for line in f.readlines():
count += 1
return count
def test_open_already_has_its_own_built_in_context_manager(self):
self.assertEqual(__, self.count_lines3("example_file.txt"))
|
Python
| 0.998796
|
@@ -620,34 +620,33 @@
elf.assertEqual(
-__
+4
, self.count_lin
@@ -1181,34 +1181,40 @@
elf.assertEqual(
-__
+'test%5Cn'
, self.find_line
@@ -2833,34 +2833,33 @@
elf.assertEqual(
-__
+4
, self.count_lin
@@ -3007,67 +3007,195 @@
-# Rewrite find_line using the
+with self.File
Context
-
Manager
-.%0A pass
+(file_name) as f:%0A for line in f.readlines():%0A match = re.search('e', line)%0A if match:%0A return line
%0A%0A
@@ -3248,26 +3248,32 @@
assertEqual(
-__
+'test%5Cn'
, self.find_
@@ -3728,18 +3728,17 @@
rtEqual(
-__
+4
, self.c
|
6148e22d2056c37be300db482b37753cb94cae78
|
Change database#get_source() to return bytes, always.
|
database.py
|
database.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
>>> db = Database()
>>> rev = '1b8f23c763d08130ec2081c35e7f9fe0d392d700'
>>> repo = Repository.create('github', 'example', 'mit', rev)
>>> ret = db.add_repository(repo)
>>> ret == repo
True
>>> source_a = SourceFile.create(repo, b'void 0;', 'index.js')
>>> ret = db.add_source_file(source_a)
>>> ret == source_a
True
>>> source_b = SourceFile.create(repo, b'void 0;', 'undefined.js')
>>> source_a != source_b
True
>>> ret = db.add_source_file(source_b)
Traceback (most recent call last):
...
database.DuplicateFileError: duplicate file contents
>>> parsed = ParsedSource(source_a.hash, [], {})
>>> ret = db.add_parsed_source(parsed)
>>> parsed == ret
True
>>> db.set_failure(source_b)
>>> db.get_source(source_a.hash)
'void 0;'
"""
import logging
import sqlite3
from path import Path
from datatypes import Repository, SourceFile, ParsedSource
from utils import is_hash
logger = logging.getLogger(__name__)
SCHEMA_FILENAME = Path(__file__).parent / 'schema.sql'
with open(SCHEMA_FILENAME, encoding='UTF-8') as schema_file:
SCHEMA = schema_file.read()
del schema_file
class DuplicateFileError(Exception):
def __init__(self, hash_):
assert is_hash(hash_)
self.hash = hash_
super(DuplicateFileError, self).__init__("duplicate file contents")
class Database:
def __init__(self, connection=None):
if connection is None:
logger.warn("Using in memory database!")
self.conn = sqlite3.connect(':memory:')
else:
self.conn = connection
self._initialize_db()
def _initialize_db(self):
conn = self.conn
if self._is_database_empty():
conn.executescript(SCHEMA)
conn.commit()
def _is_database_empty(self):
cur = self.conn.cursor()
cur.execute("SELECT COUNT(*) FROM sqlite_master WHERE type='table'")
answer, = cur.fetchone()
return int(answer) == 0
def add_repository(self, repo):
assert isinstance(repo, Repository)
cur = self.conn.cursor()
with self.conn:
cur.execute(r"""
INSERT INTO repository (owner, repo, license, revision)
VALUES (?, ?, ?, ?);
""", (repo.owner, repo.name, repo.license, repo.revision))
return repo
def add_source_file(self, source_file):
assert isinstance(source_file, SourceFile)
cur = self.conn.cursor()
try:
with self.conn:
cur.execute(r"""
INSERT INTO source_file (hash, owner, repo, path, source)
VALUES (?, ?, ?, ?, ?);
""", (source_file.hash, source_file.owner, source_file.name,
source_file.path, source_file.source))
except sqlite3.IntegrityError:
raise DuplicateFileError(source_file.hash)
return source_file
def get_source(self, hash_):
assert is_hash(hash_)
cur = self.conn.cursor()
cur.execute('SELECT source FROM source_file WHERE hash = ?', (hash_,))
source, = cur.fetchone()
return source
def add_parsed_source(self, parsed_source):
assert isinstance(parsed_source, ParsedSource)
cur = self.conn.cursor()
with self.conn:
cur.execute(r"""
INSERT INTO parsed_source (hash, ast, tokens)
VALUES (?, ?, ?)
""", (parsed_source.hash,
parsed_source.ast_as_json, parsed_source.tokens_as_json))
return parsed_source
def set_failure(self, source_file):
assert isinstance(source_file, SourceFile)
cur = self.conn.cursor()
with self.conn:
cur.execute(r"""
INSERT INTO failure (hash) VALUES (?)
""", (source_file.hash,))
|
Python
| 0
|
@@ -763,16 +763,17 @@
a.hash)%0A
+b
'void 0;
@@ -3124,32 +3124,128 @@
cur.fetchone()%0A
+ if isinstance(source, str):%0A return source.encode('utf-8')%0A else:%0A
return s
|
4e12aea0a5479bad8289cbf6c9f460931d51f701
|
Add autocommit to 1 to avoid select cache ¿WTF?
|
database.py
|
database.py
|
import MySQLdb
class database(object):
def __init__(self):
config = {}
execfile("config.py",config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
|
Python
| 0
|
@@ -202,16 +202,45 @@
abase%22%5D)
+%0A self.db.autocommit(True)
%0A%0A def
|
e5d2ed715d83be506ec452ecdd0a22748a84a007
|
Fix test_pull_doc (missing request id when creating messages)
|
bokeh/server/protocol/messages/tests/test_pull_doc.py
|
bokeh/server/protocol/messages/tests/test_pull_doc.py
|
from __future__ import absolute_import, print_function
import unittest
import bokeh.document as document
from bokeh.plot_object import PlotObject
from bokeh.properties import Int, Instance
from bokeh.server.protocol import Protocol
class AnotherModel(PlotObject):
bar = Int(1)
class SomeModel(PlotObject):
foo = Int(2)
child = Instance(PlotObject)
class TestPullDocument(unittest.TestCase):
def _sample_doc(self):
doc = document.Document()
another = AnotherModel()
doc.add_root(SomeModel(child=another))
doc.add_root(SomeModel())
return doc
def test_create_req(self):
msg = Protocol("1.0").create("PULL-DOC-REQ", 'fakesession')
def test_create_reply(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakesession', sample)
def test_create_reply_then_parse(self):
sample = self._sample_doc()
msg = Protocol("1.0").create("PULL-DOC-REPLY", 'fakesession', sample)
copy = document.Document()
msg.push_to_document(copy)
assert len(sample.roots) == 2
assert len(copy.roots) == 2
|
Python
| 0
|
@@ -812,32 +812,45 @@
PULL-DOC-REPLY%22,
+ 'fakereqid',
'fakesession',
@@ -988,24 +988,37 @@
-DOC-REPLY%22,
+ 'fakereqid',
'fakesessio
|
80d8f68d7eccdb114c47196d6359fe370fe922d7
|
Remove a couple of spurious test dependencies in production code.
|
ceee/ie/common/common.gyp
|
ceee/ie/common/common.gyp
|
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../../../build/common.gypi',
],
'targets': [
{
'target_name': 'ie_common_settings',
'type': 'none',
'direct_dependent_settings': {
'defines': [
# TODO(joi@chromium.org) Put into an include somewhere.
'_WIN32_WINDOWS=0x0410',
'_WIN32_IE=0x0600',
'_ATL_CSTRING_EXPLICIT_CONSTRUCTORS',
'_ATL_STATIC_REGISTRY',
'_WTL_NO_CSTRING',
],
'include_dirs': [
'../../../third_party/wtl/include',
],
},
},
{
'target_name': 'ie_guids',
'type': 'static_library',
'dependencies': [
'ie_common_settings',
'../plugin/toolband/toolband.gyp:toolband_idl',
'<(DEPTH)/chrome_frame/chrome_frame.gyp:chrome_tab_idl',
],
'sources': [
'ie_guids.cc',
],
'include_dirs': [
'../../..',
# For chrome_tab.h
'<(SHARED_INTERMEDIATE_DIR)',
],
},
{
'target_name': 'ie_common',
'type': 'static_library',
'dependencies': [
'ie_common_settings',
'../../../base/base.gyp:base',
'../../../breakpad/breakpad.gyp:breakpad_handler',
'../../../build/temp_gyp/googleurl.gyp:googleurl',
'../../../net/net.gyp:net_base',
'../../../ceee/common/common.gyp:initializing_coclass',
'../../../ceee/common/common.gyp:ceee_common',
# TODO(joi@chromium.org) Why? Can we remove?
'../../../ceee/testing/utils/test_utils.gyp:test_utils',
'../../../ceee/testing/utils/test_utils.gyp:mshtml_mocks',
'<(DEPTH)/chrome/chrome.gyp:chrome_version_header',
'<(DEPTH)/chrome_frame/chrome_frame.gyp:chrome_tab_idl',
'../plugin/toolband/toolband.gyp:toolband_idl',
],
'sources': [
'api_registration.h',
'chrome_frame_host.cc',
'chrome_frame_host.h',
'constants.cc',
'constants.h',
'crash_reporter.cc',
'crash_reporter.h',
'extension_manifest.cc',
'extension_manifest.h',
'ie_tab_interfaces.cc',
'ie_tab_interfaces.h',
'ie_util.cc',
'ie_util.h',
'mock_ie_tab_interfaces.h',
'ceee_module_util.cc',
'ceee_module_util.h',
'metrics_util.h',
# TODO(joi@chromium.org) Refactor to use chrome/common library.
'../../../chrome/browser/automation/extension_automation_constants.cc',
'../../../chrome/browser/extensions/'
'extension_bookmarks_module_constants.cc',
'../../../chrome/browser/extensions/extension_event_names.cc',
'../../../chrome/browser/extensions/'
'extension_page_actions_module_constants.cc',
'../../../chrome/browser/extensions/extension_cookies_api_constants.cc',
'../../../chrome/browser/extensions/'
'extension_infobar_module_constants.cc',
'../../../chrome/browser/extensions/extension_tabs_module_constants.cc',
'../../../chrome/browser/extensions/'
'extension_webnavigation_api_constants.cc',
'../../../chrome/browser/extensions/'
'extension_webrequest_api_constants.cc',
'../../../chrome/common/chrome_switches.cc',
'../../../chrome/common/chrome_switches.h',
'../../../chrome/common/url_constants.cc',
'../../../chrome/common/url_constants.h',
'../../../chrome/common/extensions/extension_constants.cc',
'../../../chrome/common/extensions/extension_constants.h',
'../../../chrome/common/extensions/extension_error_utils.cc',
'../../../chrome/common/extensions/extension_error_utils.h',
'../../../chrome/common/extensions/url_pattern.cc',
'../../../chrome/common/extensions/url_pattern.h',
'../../../chrome/common/extensions/user_script.cc',
'../../../chrome/common/extensions/user_script.h',
],
'include_dirs': [
# For chrome_tab.h and version.h
'<(SHARED_INTERMEDIATE_DIR)',
],
'direct_dependent_settings': {
'include_dirs': [
# Because we use some of the chrome files above directly, we need
# to specify thess include paths which they depend on.
'../../../skia/config/win',
'../../../third_party/skia/include/config',
],
},
},
]
}
|
Python
| 0.999991
|
@@ -1637,193 +1637,8 @@
n',%0A
- # TODO(joi@chromium.org) Why? Can we remove?%0A '../../../ceee/testing/utils/test_utils.gyp:test_utils',%0A '../../../ceee/testing/utils/test_utils.gyp:mshtml_mocks',%0A
|
a74fbbd6c822b1384d9cd5f1501c8a01fb2ed9fe
|
Update deauthorization callback
|
django4facebook/views.py
|
django4facebook/views.py
|
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseBadRequest
def deauthorize_callback(request):
"""
When user deauthorize this application from facebook then
we deactivate the user from our system
"""
if not request.facebook:
return HttpResponseBadRequest()
User.objects.filter(pk=request.facebook.uid).update(is_active=False)
return HttpResponse('Ok')
|
Python
| 0
|
@@ -98,17 +98,127 @@
Request%0A
-%0A
+from django.views.decorators.csrf import csrf_exempt%0A%0Aimport facebook%0Afrom .conf import settings%0A%0A%0A@csrf_exempt
%0Adef dea
@@ -374,31 +374,283 @@
-if not request.facebook
+signed_request = request.REQUEST.get('signed_request')%0A if not signed_request:%0A return HttpResponseBadRequest()%0A data = facebook.parse_signed_request(signed_request,%0A settings.SECRET_KEY)%0A if not data.get('user_id')
:%0A
@@ -718,28 +718,23 @@
(pk=
-request.facebook.uid
+data%5B'user_id'%5D
).up
|
19a3ead211cc4c00b219329ac63177420cdb71e6
|
Make all functions available from raysect.core.math.function.
|
raysect/core/math/function/__init__.py
|
raysect/core/math/function/__init__.py
|
# cython: language_level=3
# Copyright (c) 2014-2018, Dr Alex Meakins, Raysect Project
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Raysect Project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from .function1d import Function1D
from .function2d import Function2D
from .function3d import Function3D
|
Python
| 0
|
@@ -1655,18 +1655,9 @@
ort
-Function1D
+*
%0Afro
@@ -1681,18 +1681,9 @@
ort
-Function2D
+*
%0Afro
@@ -1707,17 +1707,8 @@
ort
-Function3D
+*
%0A%0A%0A
|
f25e0fe435f334e19fc84a9c9458a1bea4a051f9
|
Allow to reverse the order of the CSV for a proper reading
|
money/parser/__init__.py
|
money/parser/__init__.py
|
import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
|
Python
| 0.000186
|
@@ -86,20 +86,44 @@
_lines=0
-):%0A%09
+, reverse_order=False):%0A
reader =
@@ -173,17 +173,20 @@
ar='%22')%0A
-%09
+
rows = %5B
@@ -188,17 +188,20 @@
s = %5B%5D%0A%0A
-%09
+
for row
@@ -211,18 +211,24 @@
reader:%0A
-%09%09
+
if reade
@@ -262,19 +262,28 @@
nd row:%0A
-%09%09%09
+
rows.app
@@ -309,17 +309,65 @@
w(row))%0A
-%09
+ if reverse_order:%0A rows.reverse()%0A
return r
@@ -414,17 +414,20 @@
count):%0A
-%09
+
rejected
@@ -432,17 +432,20 @@
ed = %5B%5D%0A
-%09
+
accepted
@@ -449,17 +449,20 @@
ted = 0%0A
-%09
+
for row
@@ -470,18 +470,24 @@
n data:%0A
-%09%09
+
obj, cre
@@ -525,19 +525,28 @@
create(%0A
-%09%09%09
+
bank_acc
@@ -564,19 +564,28 @@
ccount,%0A
-%09%09%09
+
descript
@@ -608,19 +608,28 @@
tion%22%5D,%0A
-%09%09%09
+
amount=r
@@ -642,19 +642,28 @@
ount%22%5D,%0A
-%09%09%09
+
date=row
@@ -676,54 +676,90 @@
%22%5D,%0A
-%09%09)%0A%09%09if created:%0A%09%09%09accepted += 1%0A%09%09else:%0A%09%09%09
+ )%0A if created:%0A accepted += 1%0A else:%0A
reje
@@ -779,9 +779,12 @@
ow)%0A
-%09
+
retu
@@ -796,16 +796,17 @@
cepted, rejected
+%0A
|
52ca7c024caa83dadbbf9b7bc74297f2d02d2fe7
|
Add cleanup casks step
|
HOME/bin/lib/homebrew.py
|
HOME/bin/lib/homebrew.py
|
"""A library for controlling Homebrew through Python."""
import getpass
import logging
import os
import pwd
import shutil
import subprocess
log = logging.getLogger(__name__)
def workflow(settings, fix_repo=False):
"""Run an entire Homebrew update workflow."""
if not is_installed():
# todo: install homebrew if not installed
raise Exception("Homebrew must be installed")
if fix_repo:
fix_repository()
ensure_correct_permissions()
ensure_command_line_tools_installed()
update()
update_taps(settings.get('taps', []))
update_formulas(settings.get('formulas', []))
update_casks(settings.get('casks', []))
run_post_install(settings['post_install'])
def get_installed_formulas():
log.info("Getting installed formulas")
return _get_command_output(['brew', 'list'])
def get_installed_casks():
log.info("Getting installed casks")
return _get_command_output(['brew', 'cask', 'list'])
def get_installed_taps():
log.info("Getting installed taps")
return _get_command_output(['brew', 'tap'])
def install_formula(formula):
log.info("Installing formula: {}".format(formula))
_execute(['brew', 'install', formula])
def install_cask(cask):
# note: brew cask doesn't support upgrade yet:
# https://github.com/caskroom/homebrew-cask/issues/4678
log.info("Installing cask: {}".format(cask))
_execute(['brew', 'cask', 'install', cask])
def install_tap(tap):
log.info("Installing tap: {}".format(tap))
_execute(['brew', 'tap', tap])
def cleanup():
log.info("Running cleanup")
_execute(['brew', 'cleanup'])
def update():
log.info("Updating Homebrew")
_execute(['brew', 'update'])
def upgrade():
log.info("Running upgrade")
# Homebrew can return an error code in cases that aren't errors:
# https://github.com/Homebrew/homebrew/issues/27048
# so, catch any error here and make sure to inspect the output for problems
try:
_execute(['brew', 'upgrade', '--all'])
except:
pass
def install_missing(type, expected):
assert type in ('formula', 'cask', 'tap')
get_installed = globals()['get_installed_{}s'.format(type)]
install = globals()['install_{}'.format(type)]
log.info("Expected {}s are: {}".format(type, ', '.join(sorted(expected))))
installed = get_installed()
log.info("Currently installed {}s are: {}".format(type, ', '.join(installed)))
missing = sorted(set(expected) - set(installed))
log.info("Missing {}s are: {}".format(type, ', '.join(missing)))
for item in missing:
install(item)
def update_formulas(formulas):
log.info("Updating formulas")
upgrade()
install_missing('formula', formulas)
cleanup()
# possible todo: remove things not in settings, but that'd delete things you installed manually
# maybe provide option to list things that "shouldn't" be installed so they can be
# removed manually
def update_casks(casks):
install_missing('cask', casks)
def update_taps(taps):
install_missing('tap', taps)
def ensure_command_line_tools_installed():
"""Ensure command line tools are installed."""
log.info("Ensuring command line tools are installed")
try:
_execute(['xcode-select', '--install'])
except subprocess.CalledProcessError as error:
if error.returncode == 1:
log.info("Command line tools already installed")
else:
raise
def ensure_correct_permissions(*args, **kwargs):
"""Ensure that the Homebrew formula installation directory has correct permissions."""
user = getpass.getuser()
uid = os.stat('/usr/local').st_uid
local_owner = pwd.getpwuid(uid).pw_name
log.debug("Currently logged in user is {!r}, owner of /usr/local is {!r}".format(
user, local_owner))
if user != local_owner:
log.info("Fixing permissions on /usr/local before running Homebrew")
# stupid that there's a shutil.chown but no shutil.chown -R
cmd = ['sudo', 'chown', '-R', user, '/usr/local']
_execute(cmd)
def is_installed():
"""Return True if Homebrew command is installed."""
return bool(shutil.which('brew'))
def install():
"""Install Homebrew on a system that doesn't have it."""
# http://brew.sh/
# ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
pass
def run_post_install(post_install):
"""
Run any post-install operations.
'post_install' is a list of shell commands. Each shell command can be a string or a list
of strings. If a string, will pass shell=True to the subprocess call.
"""
if not post_install:
return
log.info("Running post-install operations")
for cmd in post_install:
_execute(cmd)
def fix_repository():
# http://stackoverflow.com/questions/14113427/brew-update-failed
log.info("Fixing Homebrew repository")
_execute('cd `brew --prefix`; git reset --hard origin/master')
def _get_command_output(cmd):
"""Execute the specified command, parse its output, and return a list of items in the output."""
# bytes.decode defaults to utf-8, which *should* also be the default system encoding
# but I suppose to really do this correctly I should check that. However, pretty sure
# all Homebrew package names should be ascii anyway so it's fine
log.debug("Executing: {}".format(cmd))
return subprocess.check_output(cmd).decode().split()
def _execute(cmd, shell=False):
log.debug("Executing: {}".format(cmd))
subprocess.check_call(cmd, shell=isinstance(cmd, str))
|
Python
| 0
|
@@ -1559,16 +1559,25 @@
cleanup
+_formulas
():%0A
@@ -1601,16 +1601,26 @@
cleanup
+: formulas
%22)%0A _
@@ -1646,24 +1646,128 @@
leanup'%5D)%0A%0A%0A
+def cleanup_casks():%0A log.info(%22Running cleanup: casks%22)%0A _execute(%5B'brew', 'cask', 'cleanup'%5D)%0A%0A%0A
def update()
@@ -2868,16 +2868,25 @@
cleanup
+_formulas
()%0A%0A
@@ -3153,16 +3153,36 @@
casks)%0A
+ cleanup_casks()%0A
%0A%0Adef up
|
fa067545657d3b1bb80a4047f175353c4856dd7c
|
Implement extension normalizer for NamedAccess
|
thinglang/parser/values/named_access.py
|
thinglang/parser/values/named_access.py
|
from thinglang.compiler.buffer import CompilationBuffer
from thinglang.compiler.opcodes import OpcodePopDereferenced, OpcodeDereference
from thinglang.lexer.tokens.access import LexicalAccess
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.numeric import NumericValue
from thinglang.parser.errors import InvalidIndexedAccess
from thinglang.parser.nodes.base_node import BaseNode
from thinglang.parser.rule import ParserRule
from thinglang.utils.type_descriptors import ValueType
class NamedAccess(BaseNode, ValueType):
"""
Represents a named dereference operation.
Examples:
person.walk
person.info.age
"""
def __init__(self, target):
super(NamedAccess, self).__init__(target)
self.target = target
self.type = None
self.arguments = []
def __repr__(self):
return '{}'.format('.'.join(str(x) for x in self.target))
def transpile(self):
return '->'.join(x.transpile() for x in self.target)
def compile(self, context: CompilationBuffer, pop_last=False, without_last=False):
if without_last and not self.extensions:
return self[0].compile(context)
ref = context.push_ref(context.resolve(self.root), self.source_ref)
for ext, last in self.extensions:
if last and without_last:
break
ref = context.symbols.resolve_partial(ref, ext)
cls = OpcodePopDereferenced if pop_last and last else OpcodeDereference
context.append(cls(ref.element_index), self.source_ref)
return ref
@property
def root(self):
return NamedAccess(self.target[:2])
@property
def extensions(self):
last = self.target[-1]
return [(x, x is last) for x in self.target[2:]]
def __getitem__(self, item):
return self.target[item]
def __eq__(self, other):
return type(self) == type(other) and self.target == other.target
def __len__(self):
size = len(self.target)
assert size >= 2
return size
def append(self, other):
self.target.append(other)
return self
|
Python
| 0
|
@@ -2172,8 +2172,240 @@
rn self%0A
+%0A @classmethod%0A def extend(cls, base, extension: Identifier) -%3E 'NamedAccess':%0A if isinstance(base, NamedAccess):%0A return NamedAccess(base.target + %5Bextension%5D)%0A%0A return NamedAccess(%5Bbase, extension%5D)%0A
|
a259a5f2c42b58c236f3ec1fa28ea9fa5218fc29
|
Exclude EMC plugin since it cannot be run non-interactively
|
testcases/cloud_admin/run_sos_report.py
|
testcases/cloud_admin/run_sos_report.py
|
#!/usr/bin/python
import os
import time
from eucaops import Eucaops
from eutester.eutestcase import EutesterTestCase
from eutester.machine import Machine
class SOSreport(EutesterTestCase):
def __init__(self):
self.setuptestcase()
self.setup_parser()
self.start_time = int(time.time())
self.parser.add_argument("--ticket-number", default=str(self.start_time))
self.parser.add_argument("--remote-dir", default="/root/")
self.parser.add_argument("--local-dir", default=os.getcwd())
self.parser.add_argument("--package-url", default="http://mongo.beldurnik.com/RPMS/eucalyptus-sos-plugins-0.1.1-0.el6.noarch.rpm")
self.get_args()
self.remote_dir = self.args.remote_dir + "/euca-sosreport-" + self.args.ticket_number + "/"
# Setup basic eutester object
self.tester = Eucaops( config_file=self.args.config,password=self.args.password, download_creds=False)
def clean_method(self):
pass
def Install(self):
"""
This is where the test description goes
"""
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
if machine.distro.name is "vmware":
continue
machine.install("sos")
machine.sys("yum install -y " + self.args.package_url)
def Run(self):
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
if machine.distro.name is "vmware":
continue
machine.sys("mkdir -p " + self.args.remote_dir)
machine.sys("sosreport --batch --tmp-dir " + self.args.remote_dir + " --ticket-number " + str(self.args.ticket_number),code=0)
def Download(self):
for machine in self.tester.get_component_machines():
assert isinstance(machine, Machine)
if machine.distro.name is "vmware":
continue
remote_tarball_path = machine.sys("ls -1 " + self.args.remote_dir + "*" + str(self.args.ticket_number) + "*", code=0)[0]
tarball = remote_tarball_path.split("/")[-1]
local_tarball_path = self.args.local_dir + '/' + tarball
self.tester.debug("Downloading file to: " + local_tarball_path)
machine.sftp.get(remote_tarball_path, local_tarball_path)
def RunAll(self):
self.Install()
self.Run()
self.Download()
if __name__ == "__main__":
testcase = SOSreport()
### Use the list of tests passed from config/command line to determine what subset of tests to run
### or use a predefined list
list = testcase.args.tests or ["RunAll"]
### Convert test suite methods to EutesterUnitTest objects
unit_list = [ ]
for test in list:
unit_list.append( testcase.create_testunit_by_name(test) )
### Run the EutesterUnitTest objects
result = testcase.run_test_case_list(unit_list,clean_on_exit=True)
exit(result)
|
Python
| 0.000002
|
@@ -1666,16 +1666,35 @@
--batch
+--skip-plugins=emc
--tmp-di
|
308c778e1846769e8bba799b6d0bca5679e1161c
|
move code
|
examples/tests/test_cp_api.py
|
examples/tests/test_cp_api.py
|
# Various calls to CP api from python to verify they work.
from ortools.constraint_solver import pywrapcp
from ortools.constraint_solver import model_pb2
from ortools.constraint_solver import search_limit_pb2
def test_member():
solver = pywrapcp.Solver('test member')
x = solver.IntVar(1, 10, 'x')
ct = x.Member([1, 2, 3, 5])
print(ct)
def test_sparse_var():
solver = pywrapcp.Solver('test sparse')
x = solver.IntVar([1, 3, 5], 'x')
print(x)
def test_modulo():
solver = pywrapcp.Solver('test modulo')
x = solver.IntVar(0, 10, 'x')
y = solver.IntVar(2, 4, 'y')
print(x % 3)
print(x % y)
def test_limit():
solver = pywrapcp.Solver('test limit')
limit_proto = search_limit_pb2.SearchLimitProto()
limit_proto.time = 10000
limit_proto.branches = 10
print limit_proto
limit = solver.Limit(limit_proto)
print limit
def test_export():
solver = pywrapcp.Solver('test export')
x = solver.IntVar(1, 10, 'x')
ct = x.Member([1, 2, 3, 5])
solver.Add(ct)
proto = model_pb2.CPModelProto()
proto.model = 'wrong name'
solver.ExportModel(proto)
print repr(proto)
print str(proto)
class SearchMonitorTest(pywrapcp.SearchMonitor):
def __init__(self, solver, nexts):
pywrapcp.SearchMonitor.__init__(self, solver)
self._nexts = nexts
def BeginInitialPropagation(self):
print self._nexts
def EndInitialPropagation(self):
print self._nexts
def test_search_monitor():
solver = pywrapcp.Solver('test export')
x = solver.IntVar(1, 10, 'x')
ct = (x == 3)
solver.Add(ct)
db = solver.Phase([x], solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE)
monitor = SearchMonitorTest(solver, x)
solver.Solve(db, monitor)
class DemonTest(pywrapcp.PyDemon):
def __init__(self, x):
pywrapcp.Demon.__init__(self)
self._x = x
print 'Demon built'
def Run(self, solver):
print 'in Run(), saw ' + str(self._x)
class ConstraintTest(pywrapcp.PyConstraint):
def __init__(self, solver, x):
pywrapcp.Constraint.__init__(self, solver)
self._x = x
print 'Constraint built'
def Post(self):
print 'in Post()'
self._demon = DemonTest(self._x)
self._x.WhenBound(self._demon)
print 'out of Post()'
def InitialPropagate(self):
print 'in InitialPropagate()'
self._x.SetMin(5)
print self._x
print 'out of InitialPropagate()'
class InitialPropagateDemon(pywrapcp.PyDemon):
def __init__(self, ct):
pywrapcp.Demon.__init__(self)
self._ct = ct
def Run(self, solver):
self._ct.InitialPropagate()
def test_demon():
solver = pywrapcp.Solver('test export')
x = solver.IntVar(1, 10, 'x')
demon = DemonTest(x)
demon.Run(solver)
def test_constraint():
solver = pywrapcp.Solver('test export')
x = solver.IntVar(1, 10, 'x')
myct = ConstraintTest(solver, x)
solver.Add(myct)
db = solver.Phase([x], solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE)
solver.Solve(db)
class DumbGreaterOrEqualToFive(pywrapcp.PyConstraint):
def __init__(self, solver, x):
pywrapcp.Constraint.__init__(self, solver)
self._x = x
def Post(self):
self._demon = InitialPropagateDemon(self)
self._x.WhenBound(self._demon)
def InitialPropagate(self):
if self._x.Bound():
if self._x.Value() < 5:
print 'Reject %d' % self._x.Value()
self.solver().Fail()
else:
print 'Accept %d' % self._x.Value()
def test_failing_constraint():
solver = pywrapcp.Solver('test export')
x = solver.IntVar(1, 10, 'x')
myct = DumbGreaterOrEqualToFive(solver, x)
solver.Add(myct)
db = solver.Phase([x], solver.CHOOSE_FIRST_UNBOUND, solver.ASSIGN_MIN_VALUE)
solver.Solve(db)
def main():
test_member()
test_sparse_var()
test_modulo()
test_limit()
test_export()
test_search_monitor()
test_demon()
test_constraint()
test_failing_constraint()
if __name__ == '__main__':
main()
|
Python
| 0.000002
|
@@ -2342,192 +2342,8 @@
)'%0A%0A
-class InitialPropagateDemon(pywrapcp.PyDemon):%0A def __init__(self, ct):%0A pywrapcp.Demon.__init__(self)%0A self._ct = ct%0A%0A def Run(self, solver):%0A self._ct.InitialPropagate()%0A%0A
%0Adef
@@ -2723,24 +2723,209 @@
Solve(db)%0A%0A%0A
+class InitialPropagateDemon(pywrapcp.PyDemon):%0A def __init__(self, ct):%0A pywrapcp.Demon.__init__(self)%0A self._ct = ct%0A%0A def Run(self, solver):%0A self._ct.InitialPropagate()%0A%0A%0A
class DumbGr
|
a48e50da90765b65f754a6c6eaefce7cb7e22521
|
Modify unicode text for Crew model
|
ITDB/ITDB_Main/models.py
|
ITDB/ITDB_Main/models.py
|
from django.db import models
import datetime
# Create your models here.
class Theater(models.Model):
name = models.CharField(max_length=100)
street_address = models.CharField(max_length=40, blank=True)
city = models.CharField(max_length=40)
state_or_province = models.CharField(max_length=50, blank=True)
country = models.CharField(max_length=50)
def __unicode__(self):
return "{0}: {1}, {2}".format(self.name, self.city, self.state_or_province)
class Play(models.Model):
title = models.CharField(max_length=100)
synopsis = models.TextField(blank=True)
def __unicode__(self):
return self.title
class People(models.Model):
name = models.CharField(max_length=40)
short_bio = models.TextField(blank=True)
headshot = models.ImageField(blank=True, upload_to='photos')
def __unicode__(self):
return self.name
# A production is defined as a Play that ran at a particular Theater between given Dates
class Production(models.Model):
start_date = models.DateField()
end_date = models.DateField()
production_company = models.CharField(max_length=100)
play = models.ForeignKey(Play)
theater = models.ForeignKey(Theater)
notes = models.TextField(blank=True)
def display_year(self):
start = self.start_date.year
end = self.end_date.year
if end == start:
return "{0}".format(start)
else:
return "{0} - {1}".format(start, end)
def __unicode__(self):
return "{0} at The {1} ({2}) : {3}".format(self.play.title, self.theater.name, self.theater.city, self.start_date.year)
# A cast is defined as the list of People playing characters in a particular Production
class Cast(models.Model):
person = models.ForeignKey(People)
character = models.CharField(max_length=40)
production = models.ForeignKey(Production)
billed_as = models.CharField(max_length = 40, blank=True)
def __unicode__(self):
return "{0} in {1} at The {2} ({3}), played by {4}".format(self.character, self.production.play.title, self.production.theater.name, self.production.theater.city, self.person)
# A crew is defined as the list of off-stage People working on a particular Production
class Crew(models.Model):
person = models.ForeignKey(People)
job = models.CharField(max_length=40)
production = models.ForeignKey(Production)
is_director = models.BooleanField(default=False)
is_writer = models.BooleanField(default=False)
is_producer = models.BooleanField(default=False)
def __unicode__(self):
return "{0} in {1} at The {2} ({3}), played by {4}".format(self.job, self.production.play.title, self.production.theater.name, self.production.theater.city, self.person)
class Theater_pictures(models.Model):
image = models.ImageField(blank=True, upload_to='photos')
theater = models.ForeignKey(Theater)
# kinds of photos:
# headshots (person)
# scenes (several persons)
# theaters
# playbills
# each person, theater, play, or production can have multiple pictures, but one primary picture
# people will use headshots and scenes
# plays will use ?
# productions will use scenes and playbills
# theaters will use only theater pictures
# Want to target images at average of 20KB each
#TODO: Add class for production companies
#TODO: Add class for musical numbers, connect them to plays
|
Python
| 0.000013
|
@@ -2583,33 +2583,38 @@
return %22
+%7B1%7D,
%7B0%7D
-in %7B1
+by %7B4
%7D at The %7B2%7D
@@ -2615,39 +2615,24 @@
he %7B2%7D (%7B3%7D)
-, played by %7B4%7D
%22.format(sel
@@ -2740,16 +2740,17 @@
erson)%0A%0A
+%0A
class Th
|
27fe88a325251c4b12a4b5f020c1d6c5e83b4b59
|
Change var to be more consistent
|
untz_manager/encoder.py
|
untz_manager/encoder.py
|
"""Encoding related operations"""
import logging
import subprocess
import sys
import taglib
LOGGER = logging.getLogger(__name__)
def _get_vorbis_comments(audio_file, pattern):
macros = (('%g', 'GENRE'),
('%n', 'TRACKNUMBER'),
('%t', 'TITLE'),
('%d', 'DATE'))
params_dict = {'%g': '-G',
'%n': '-N',
'%t': '-t',
'%d': '-d'}
vorbis_comments = {}
afp = taglib.File(audio_file) # pylint: disable=E1103
for macro, tag in macros:
if macro in pattern:
vorbis_comments[params_dict[macro]] = afp.tags.get(tag)[0] or '(none)'
vorbis_comments['-a'] = (afp.tags.get('ALBUM ARTIST', [None])[0] or
afp.tags.get('ARTIST', [None])[0] or
'Unknown artist')
vorbis_comments['-l'] = afp.tags.get('ALBUM', [None])[0] or 'Unknown album'
afp.close()
return vorbis_comments
def encode_file(audio_file, output_dir, pattern, quality, passthrough):
"""Run oggenc and encode file, storing in a logical manner."""
process_args = ['oggenc',
'-q', str(quality),
'-n', '{output_dir}/%a/%l/{pattern}.ogg'.format(output_dir=output_dir,
pattern=pattern)]
if passthrough:
process_args.append(passthrough)
vorbis_comments = _get_vorbis_comments(audio_file, pattern)
for tag, value in vorbis_comments.items():
process_args.append(tag)
process_args.append(value)
process_args.append(audio_file)
LOGGER.debug('Running "%s"', ' '.join(process_args))
process = subprocess.Popen(process_args)
process.communicate()
if process.returncode:
LOGGER.critical('Non-zero return code. Exiting.')
sys.exit(process.returncode)
|
Python
| 0
|
@@ -996,22 +996,20 @@
o_file,
-output
+base
_dir, pa
@@ -1204,22 +1204,20 @@
'-n', '%7B
-output
+base
_dir%7D/%25a
@@ -1246,33 +1246,27 @@
mat(
-output_dir=output
+base_dir=base
_dir,%0A
-
|
234609000de3da9449dacb363e58bf60c0e3a4d8
|
Change DATABASES default db to PostgreSQL
|
site/litlong/settings.py
|
site/litlong/settings.py
|
"""
Django settings for litlong project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from YamJam import yamjam
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_PATH = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
CFG = yamjam(os.path.join(PROJECT_PATH, 'etc/yamjam/config.yaml'))['litlong']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = CFG['secret-key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'litlong.urls'
WSGI_APPLICATION = 'litlong.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
Python
| 0
|
@@ -1823,90 +1823,250 @@
E':
-'django.db.backends.sqlite3',%0A 'NAME': os.path.join(BASE_DIR, 'db.sqlite3')
+CFG%5B'database'%5D%5B'engine'%5D,%0A 'NAME': CFG%5B'database'%5D%5B'name'%5D,%0A 'USER': CFG%5B'database'%5D%5B'username'%5D,%0A 'PASSWORD': CFG%5B'database'%5D%5B'password'%5D,%0A 'HOST': CFG%5B'database'%5D%5B'host'%5D,%0A 'PORT': CFG%5B'database'%5D%5B'port'%5D
,%0A
|
d6452848521bba37fa01fd7b82fe27d725edd2cf
|
The real PER_PAGE limit is 100.
|
uservoice/collection.py
|
uservoice/collection.py
|
PER_PAGE = 500
class Collection:
def __init__(self, client, query, limit=2**60):
self.client = client
self.query = query
self.limit = limit
self.per_page = min(self.limit, PER_PAGE)
self.pages = {}
self.response_data = None
def __len__(self):
if not self.response_data:
try:
self[0]
except IndexError:
pass
return min(self.response_data['total_records'], self.limit)
def __getitem__(self, i):
if i == 0 or (i > 0 and i < len(self)):
return self.load_page(int(i/float(PER_PAGE)) + 1)[i % PER_PAGE]
else:
raise IndexError
def __iter__(self):
for index in range(len(self)):
yield self[index]
def load_page(self, i):
if not i in self.pages:
url = self.query
if '?' in self.query:
url += '&'
else:
url += '?'
result = self.client.get(url + "per_page=" + str(self.per_page) + "&page=" + str(i))
if 'response_data' in result:
self.response_data = result.pop('response_data')
if len(result.values()) > 0:
self.pages[i] = result.values()[0]
else:
raise uservoice.NotFound.new('The resource you requested is not a collection')
return self.pages[i]
|
Python
| 0.999177
|
@@ -8,9 +8,9 @@
E =
-5
+1
00%0Ac
|
438bc2afc5802bed737fb88c38dcf1eabe4b568d
|
Correct test
|
myria/test/test_plans.py
|
myria/test/test_plans.py
|
import unittest
import myria.plans
from myria.schema import MyriaSchema
QUALIFIED_NAME = {'userName': 'public',
'programName': 'adhoc',
'relationName': 'relation'}
SCHEMA = MyriaSchema({'columnNames': ['column'],
'columnTypes': ['INT_TYPE']})
WORK = [(0, 'http://input-uri-0'), (1, 'http://input-uri-1')]
class TestPlans(unittest.TestCase):
def test_parallel_plan(self):
text = 'This is logical relational algebra'
plan = myria.plans.get_parallel_import_plan(SCHEMA,
WORK,
QUALIFIED_NAME,
text=text)
self.assertDictContainsSubset({'rawQuery': text,
'logicalRa': text}, plan)
self.assertEquals(len(plan['fragments']), len(WORK))
def test_worker_assignment(self):
plan = myria.plans.get_parallel_import_plan(SCHEMA,
WORK,
QUALIFIED_NAME)
fragments = plan['fragments']
workers = reduce(lambda a, f: a + f['overrideWorkers'], fragments, [])
self.assertListEqual(workers, [worker for worker, _ in WORK])
def test_scan(self):
scan_type = 'UNITTEST-SCAN'
scan_parameters = {'metadata': 'foo'}
plan = myria.plans.get_parallel_import_plan(
SCHEMA, WORK, QUALIFIED_NAME,
scan_type=scan_type,
scan_parameters=scan_parameters)
for fragment in plan['fragments']:
scan_operator = fragment['operators'][0]
self.assertEquals(scan_operator['opType'], scan_type)
self.assertEquals(scan_operator['metadata'], 'foo')
def test_insert(self):
insert_type = 'UNITTEST-INSERT'
insert_parameters = {'metadata': 'bar'}
plan = myria.plans.get_parallel_import_plan(
SCHEMA, WORK, QUALIFIED_NAME,
insert_type=insert_type,
insert_parameters=insert_parameters)
for fragment in plan['fragments']:
insert_operator = fragment['operators'][-1]
self.assertEquals(insert_operator['opType'], insert_type)
self.assertEquals(insert_operator['metadata'], 'bar')
|
Python
| 0.001917
|
@@ -1368,24 +1368,39 @@
scan_type =
+ %7B'readerType':
'UNITTEST-S
@@ -1403,16 +1403,17 @@
ST-SCAN'
+%7D
%0A
@@ -1776,16 +1776,86 @@
pType'%5D,
+ 'TupleSource')%0A self.assertEquals(scan_operator%5B'reader'%5D,
scan_ty
|
b13edc289905dd4d2c331eddffa490305f9ef827
|
fix a typo
|
bugzilla/agents.py
|
bugzilla/agents.py
|
import urllib
from bugzilla.models import *
from bugzilla.utils import *
class InvalidAPI_ROOT(Exception):
def __str__(self):
return "Invalid API url specified. " + \
"Please set BZ_API_ROOT in your environment " + \
"or pass it to the agent constructor"
class BugzillaAgent(object):
def __init__(self, api_root=None, api_key=None)
if not api_root:
api_root = os.environ.get('BZ_API_ROOT')
if not api_root:
raise InvalidAPI_ROOT
self.API_ROOT = api_root
self.api_key = api_key
def get_bug(self, bug, include_fields='_default,token,cc,keywords,whiteboard,comments', exclude_fields=None, params={}):
params['include_fields'] = [include_fields]
params['exclude_fields'] = [exclude_fields]
url = urljoin(self.API_ROOT, 'bug/%s?%s' % (bug, self.qs(**params)))
return Bug.get(url)
def get_bug_list(self, params={}):
params = urllib.urlencode(params) + '&Bugzilla_api_key=%s' % self.api_key
url = self.API_ROOT + 'bug/?' + params
return BugSearch.get(url).bugs
def qs(self, **params):
if self.api_key:
params['api_key'] = [self.api_key]
return params
class BMOAgent(BugzillaAgent):
def __init__(self, api_key=None):
super(BMOAgent, self).__init__('https://bugzilla.mozilla.org/bzapi/', api_key)
|
Python
| 1
|
@@ -375,16 +375,17 @@
ey=None)
+:
%0A%0A
|
c4ef7fe24477d9160214c1cd2938aa8f5135d84b
|
Add other needed method stubs
|
utils/database_setup.py
|
utils/database_setup.py
|
import pandas
def load_excel(filepath):
"""
Returns a Pandas datafile that contains the contents of a Microsoft Excel
Spreadsheet
Params:
filepath - A string containing the path to the file
Returns:
A Pandas datafile
"""
return pandas.read_excel(filepath)
def get_column_names(datafile):
"""
Returns a list containing the column names of a Pandas datafile as Python
strings
Params:
datafile - A Pandas datafile
Returns:
A list of strings
"""
return [ str(s) for s in datafile.columns ]
def check_if_mysql_installed():
"""
Verifies if MySQL is installed on the current system. Will throw an
error if MySQL fails to run
"""
pass
def set_up_mysql_schema():
"""
Sets up MySQL with a table with a set schema
"""
pass
|
Python
| 0.000001
|
@@ -10,18 +10,33 @@
ndas
+%0Aimport argparse
%0A%0Adef
-load
+get
_exc
@@ -759,102 +759,1053 @@
def
-set_up_mysql_schema():%0A %22%22%22%0A Sets up MySQL with a table with a set schema%0A %22%22%22%0A pass
+is_table_set_up():%0A %22%22%22%0A Returns True if this project's MySQL table is set up, False otherwise%0A %22%22%22%0A pass%0A%0Adef create_project_table(column_names):%0A %22%22%22%0A Sets up MySQL with a table with a set schema given a list of column_names.%0A Does nothing if the table is already set up.%0A%0A Params:%0A column_names - A list of strings containing column names%0A%0A Returns:%0A None%0A %22%22%22%0A pass%0A%0Adef delete_project_table():%0A %22%22%22%0A Deletes the table in MySQL that this project uses. Will do nothing if the%0A table does not yet exist.%0A %22%22%22%0A pass%0A%0Adef reset_project_table():%0A %22%22%22%0A Resets the table for this project by calling 'delete' and 'setup'%0A %22%22%22%0A delete_project_table()%0A create_project_table()%0A%0Adef load_excel_file(datafile):%0A %22%22%22%0A Takes a Pandas datafile and inserts the data into the project's MySQL table.%0A If the project's table is not yet created, this function will call 'create'.%0A %22%22%22%0A if not is_table_set_up():%0A create_project_table()%0A # TODO: Fill in this part
%0A%0A
|
e838370958c90ce1123aa1a5ab0823169257cfa9
|
Make configuration per model instead of per project.
|
adminfilters/admin.py
|
adminfilters/admin.py
|
from django.contrib.admin.views.main import ChangeList
from django.contrib.admin.options import ModelAdmin
from django.contrib.admin.filterspecs import FilterSpec
from django.conf import settings
GENERIC_FILTERS_ON_TOP = getattr(settings, "GENERIC_FILTERS_ON_TOP", False)
class GenericFilterSpec(FilterSpec):
def __init__(self, data, request, title):
self.data = data
self.request = request
self._title = title
def title(self):
return self._title
def has_output(self):
return True
def choices(self, changelist):
if callable(self.data):
choices = list(self.data())
else:
choices = list(self.data)
for choice in [dict(zip(['selected', 'query_string', 'display'], x)) for x in choices]:
yield choice
class GenericFilterChangeList(ChangeList):
def __init__(self, request, *args, **kwargs):
self.request = request
super(GenericFilterChangeList, self).__init__(request, *args, **kwargs)
@property
def generic_filters(self):
return getattr(self.model_admin, 'generic_filters', None)
def build_filter_spec(self, choices, title):
return GenericFilterSpec(choices, self.request, title)
def get_filters(self, request):
"""
Extend ChangeList.get_filters to include generic_filters.
"""
filter_specs = super(GenericFilterChangeList, self).get_filters(request)[0]
generic_filters = []
if self.generic_filters:
for fname in self.generic_filters:
func = getattr(self.model_admin, fname)
spec = func(request, self)
if spec and spec.has_output():
generic_filters.append(spec)
if GENERIC_FILTERS_ON_TOP:
filter_specs = generic_filters + filter_specs
else:
filter_specs = filter_specs + generic_filters
return filter_specs, bool(filter_specs)
class GenericFilterAdmin(ModelAdmin):
def get_changelist(self, request, **kwargs):
return GenericFilterChangeList
|
Python
| 0
|
@@ -160,118 +160,8 @@
pec%0A
-from django.conf import settings%0A%0AGENERIC_FILTERS_ON_TOP = getattr(settings, %22GENERIC_FILTERS_ON_TOP%22, False)%0A
%0A%0Acl
@@ -1017,24 +1017,151 @@
rs', None)%0A%0A
+ @property%0A def generic_filters_on_top(self):%0A return getattr(self.model_admin, 'generic_filters_on_top', False)%0A%0A
def buil
@@ -1790,30 +1790,35 @@
if
-GENERIC_FILTERS_ON_TOP
+self.generic_filters_on_top
:%0A
|
e99d60975bca123f001fd9a536931a3c41798eb5
|
Fix syntax error
|
validatish/validator.py
|
validatish/validator.py
|
from validatish import validate
from error import Invalid
#####
# Validator base classes.
#
class Validator(object):
""" Abstract Base class for all validators """
def __call__(self, value):
""" A method that will raise an Invalid error """
def __repr__(self):
return 'validatish.%s()'%self.__class__.__name__
class CompoundValidator(Validator):
""" Abstract Base class for compound validators """
validators = None
#####
# Validators types.
#
class Required(Validator):
""" Checks that the value is not empty
"""
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_required(v, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class String(Validator):
""" Checks whether value can be converted to an integer """
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_string(v)
except Invalid, e:
raise Invalid(e.message, validator=self)
class PlainText(Validator):
""" Checks whether value is a 'simple' string"""
def __init__(self, extra='', messages=None):
self.extra = extra
self.messages = messages
def __call__(self, v):
try:
validate.is_plaintext(v,extra=self.extra, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class Email(Validator):
""" Checks whether value looks like an email address. """
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_email(v, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class DomainName(Validator):
""" Checks whether value looks like a domain name. """
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_domain_name(v, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class URL(Validator):
""" Checks whether value is a url"""
def __init__(self, full=True, absolute=True, relative=True, with_scheme=False, messages=None)
self.full = full
self.absolute = absolute
self.relative = relative
if with_schema:
self.absolute = False
self.relative = False
self.messages = messages
def __call__(self, v):
try:
validate.is_url(v, full=self.full, absolute=self.absolute, relative=self.relative, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
def __repr__(self):
return 'validatish.%s(with_schema=%s)'%(self.__class__.__name__, self.with_scheme)
class Integer(Validator):
""" Checks whether value can be converted to an integer """
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_integer(v, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class Number(Validator):
""" Checks whether value can be converted to a number and is not a string """
def __init__(self, messages=None):
self.messages = messages
def __call__(self, v):
try:
validate.is_number(v, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
class Equal(Validator):
"""
Validator that checks a value is equal to the comparison value, equal_to.
"""
def __init__(self, compared_to, messages=None):
self.compared_to = compared_to
self.messages = messages
def __call__(self, v):
try:
validate.is_equal(v, self.compared_to, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
def __repr__(self):
return 'validatish.%s(%s)'%(self.__class__.__name__, self.compared_to)
class OneOf(Validator):
""" Checks whether value is one of a supplied list of values"""
def __init__(self, set_of_values, messages=None):
self.set_of_values = set_of_values
self.messages=messages
def __call__(self, v):
try:
validate.is_one_of(v, self.set_of_values, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
def __repr__(self):
return 'validatish.%s(%s)'%(self.__class__.__name__, self.set_of_values)
class Length(Validator):
""" Check whether the length of the value is not outside min/max bound(s) """
def __init__(self, min=None, max=None, messages=None):
self.max = max
self.min = min
self.messages = messages
def __call__(self, v):
try:
validate.has_length(v, min=self.min, max=self.max, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
def __repr__(self):
return 'validatish.%s(min=%s, max=%s)'%(self.__class__.__name__, self.min, self.max)
class Range(Validator):
""" Check whether the value is not outside min/max bound(s) """
def __init__(self, min=None, max=None, messages=None):
self.max = max
self.min = min
self.messages = messages
def __call__(self, v):
try:
validate.is_in_range(v, min=self.min, max=self.max, messages=self.messages)
except Invalid, e:
raise Invalid(e.message, validator=self)
def __repr__(self):
return 'validatish.%s(min=%s, max=%s)'%(self.__class__.__name__, self.min, self.max)
class Any(CompoundValidator):
"""
Combines multiple validators together, raising an exception only if they
all fail (i.e. validation succeeds if any validator passes).
"""
def __init__(self, *args, **kw):
self.validators=args
self.messages = kw.get('messages')
def __call__(self, v):
exceptions = []
for validator in self.validators:
try:
validator(v)
except Invalid, e :
exceptions.append(Invalid(e.message, e.exceptions, validator))
else:
return
message = '; '.join(e.message for e in exceptions)
_messages = {
'please-fix': "Please fix any of: %(errors)s",
}
if self.messages:
_messages.update(messages)
raise Invalid(_messages['please-fix']%{'errors':message}, exceptions, self)
def __repr__(self):
return 'validatish.%s%s'%(self.__class__.__name__, self.validators)
class All(CompoundValidator):
""" Combines multiple validators together, raising an exception unless they all pass """
def __init__(self, *args):
self.validators = args
def __call__(self, v):
exceptions = []
for validator in self.validators:
try:
validator(v)
except Invalid, e:
exceptions.append(Invalid(e.message, e.exceptions, validator))
if len(exceptions):
message = '; '.join(e.message for e in exceptions)
raise Invalid(message, exceptions, self)
def __repr__(self):
return 'validatish.%s%s'%(self.__class__.__name__, self.validators)
class Always(Validator):
"""
A validator that always passes, mostly useful as a default.
This validator tests False to make it seem "invisible" to discourage anyone
bothering actually calling it.
"""
def __call__(self, v):
pass
def __nonzero__(self):
return False
|
Python
| 0.000585
|
@@ -2380,16 +2380,17 @@
es=None)
+:
%0A
|
81f6bbbd52acc1aa8eba6d5f14d21988f86549e2
|
Fix formatting of exceptions
|
alltheitems/obtaining.py
|
alltheitems/obtaining.py
|
import bottle
import more_itertools
import alltheitems.util
METHODS = {}
def method(name):
def wrapper(f):
METHODS[name] = f
return f
return wrapper
@method('craftingShaped')
def crafting_shaped(i, item_info, method, **kwargs):
return bottle.template("""
<p>{{item_info['name']}} can {{'' if i == 0 else 'also '}}be crafted using the following recipe:</p>
{{!inventory_table(chunked(method['recipe'], 3), style={'margin-left': 'auto', 'margin-right': 'auto'})}}
%if method.get('outputAmount', 1) > 1:
<p>This will create {{method['outputAmount']}} items per crafting process.</p>
%end
""", chunked=more_itertools.chunked, i=i, inventory_table=alltheitems.util.inventory_table, item_info=item_info, method=method)
@method('craftingShapeless')
def crafting_shapeless(i, item_info, method, **kwargs):
if len(method['recipe']) == 9 and all(l == r for l, r in more_itertools.stagger(method['recipe'], offsets=(0, 1))):
return crafting_shaped(i, item_info, method, **kwargs)
return bottle.template("""
<p>{{item_info['name']}} can {{'' if i == 0 else 'also '}}be crafted using the following shapeless recipe:</p>
{{!inventory_table([method['recipe']], style={'margin-left': 'auto', 'margin-right': 'auto'})}}
%if method.get('outputAmount', 1) > 1:
<p>This will create {{method['outputAmount']}} items per crafting process.</p>
%end
""", i=i, inventory_table=alltheitems.util.inventory_table, item_info=item_info, method=method)
def render(**kwargs):
method_type = kwargs['method']['type']
if method_type in METHODS:
try:
return METHODS[method_type](**kwargs)
except Exception as e:
return bottle.template("""
%import io, json, traceback
<p>There was an error rendering this obtaining method:</p>
<pre>{{e.__class__.__name__}}: {{e}}</pre>
<h2>Traceback:</h2>
%buf = io.StringIO()
%traceback.print_exc(file=buf)
<pre style="text-align: left;">{{buf.getvalue()}}</pre>
<p>Sorry about that. Here's the raw method data:</p>
<pre style="text-align: left;">{{json.dumps(method, indent=4, sort_keys=True)}}</pre>
""", e=e, **kwargs)
else:
return bottle.template("""
%import json
<p>{{item_info['name']}} can {{'' if i == 0 else 'also '}}be obtained via a method called <code>{{method['type']}}</code> in the database. All The Items does not currently support rendering it, so here's the raw data::</p>
<pre style="text-align: left;">{{json.dumps(method, indent=4, sort_keys=True)}}</pre>
""", **kwargs)
|
Python
| 0.000017
|
@@ -1986,18 +1986,17 @@
%3C
-h2
+p
%3ETraceba
@@ -2004,10 +2004,9 @@
k:%3C/
-h2
+p
%3E%0A
@@ -2661,17 +2661,16 @@
aw data:
-:
%3C/p%3E%0A
|
8860810f9643b5647402ac2ff774245d18c08924
|
fix comment
|
scripts/colab_install.py
|
scripts/colab_install.py
|
"""
Original code by @philopon
https://gist.github.com/philopon/a75a33919d9ae41dbed5bc6a39f5ede2
"""
import sys
import os
import requests
import subprocess
import shutil
from logging import getLogger, StreamHandler, INFO
logger = getLogger(__name__)
logger.addHandler(StreamHandler())
logger.setLevel(INFO)
def install(
chunk_size=4096,
file_name="Miniconda3-latest-Linux-x86_64.sh",
url_base="https://repo.continuum.io/miniconda/",
conda_path=os.path.expanduser(os.path.join("~", "miniconda")),
version=None,
gpu=True,
add_python_path=True,
force=False):
"""install deepchem from miniconda on Google Colab
For GPU notebook
(if you don't set the version, this script installs the latest package)
```
import deepchem_installer
deepchem_installer.install()
```
For CPU notebook
```
import deepchem_installer
deepchem_installer.install(version="2.3.0", gpu=False)
```
"""
python_path = os.path.join(
conda_path,
"lib",
"python{0}.{1}".format(*sys.version_info),
"site-packages",
)
if add_python_path and python_path not in sys.path:
logger.info("add {} to PYTHONPATH".format(python_path))
sys.path.append(python_path)
if os.path.isdir(os.path.join(python_path, "deepchem")):
logger.info("deepchem is already installed")
if not force:
return
logger.info("force re-install")
url = url_base + file_name
python_version = "{0}.{1}.{2}".format(*sys.version_info)
logger.info("python version: {}".format(python_version))
if os.path.isdir(conda_path):
logger.warning("remove current miniconda")
shutil.rmtree(conda_path)
elif os.path.isfile(conda_path):
logger.warning("remove {}".format(conda_path))
os.remove(conda_path)
logger.info('fetching installer from {}'.format(url))
res = requests.get(url, stream=True)
res.raise_for_status()
with open(file_name, 'wb') as f:
for chunk in res.iter_content(chunk_size):
f.write(chunk)
logger.info('done')
logger.info('installing miniconda to {}'.format(conda_path))
subprocess.check_call(["bash", file_name, "-b", "-p", conda_path])
logger.info('done')
logger.info("installing deepchem")
deepchem_package = "deepchem-gpu" if gpu else "deepchem"
subprocess.check_call([
os.path.join(conda_path, "bin", "conda"),
"install",
"--yes",
"-c", "deepchem",
"-c", "rdkit",
"-c", "conda-forge",
"-c", "omnia",
"python=={}".format(python_version),
deepchem_package if version is None else "{}=={}".format(deepchem_package, version)])
logger.info("done")
import deepchem
logger.info("deepchem-{} installation finished!".format(deepchem.__version__))
if __name__ == "__main__":
install()
|
Python
| 0
|
@@ -746,16 +746,20 @@
ipt
+will
install
-s
the
|
aff03ceb63ddc37227c4302c4bd43549c71591b5
|
Change the name for get_final_E to read_final_E
|
vasp_tool/patch_vasp.py
|
vasp_tool/patch_vasp.py
|
#####################################################################
# The patcher for factory ase.calculator.vasp.Vasp class #
# will change the behavior of the POSCAR writer to use vasp5 format #
#####################################################################
from ase.calculators.vasp.create_input import GenerateVaspInput
from ase.calculators.vasp import Vasp
from pymatgen.io.vasp import Vasprun
import os, os.path, shutil
# Tell vasp calculator to write the POSCAR using vasp5 style
def _new_write_input(self, atoms, directory='./', direct=True, vasp5=True):
from ase.io.vasp import write_vasp
from os.path import join
write_vasp(join(directory, 'POSCAR'),
self.atoms_sorted,
direct=direct,
symbol_count=self.symbol_count, vasp5=vasp5)
self.write_incar(atoms, directory=directory)
self.write_potcar(directory=directory)
self.write_kpoints(directory=directory)
self.write_sort_file(directory=directory)
# Hot patch for the GenerateVaspInput class
GenerateVaspInput.write_input = _new_write_input
def _load_vasprun(self, filename="vasprun.xml"):
self.vasprun = Vasprun(filename)
# read the bandgap from vasprun.xml
def _read_bandgap(self):
if not hasattr(self, "vasprun"):
self.load_vasprun()
# From DOS
dos = self.vasprun.complete_dos
bg_dos = dos.get_gap()
# From Band structure
bs = self.vasprun.get_band_structure()
bg_bs = bs.get_band_gap()
# Return the bandgaps calculated by DOS or band structure
return (bg_dos, bg_bs)
def _read_extern_stress(self, form="kB", filename="OUTCAR"):
stress = None
for line in open(filename):
if line.find('external pressure') != -1:
stress = line.split()[3]
if form != "kB":
# in GPa
stress = stress * 0.1 * GPa
return stress
def _copy_files(self, tag="tag"):
# copy_file is supposed to be used only after the calculation!
if hasattr(self, "tag"):
tag = self.tag
for fname in ["INCAR", "OUTCAR", "WAVECAR", "CONTCAR",
"WAVEDER", "DOSCAR", "vasprun.xml"]:
if os.path.exists(fname):
f_new = ".".join((fname, tag))
shutil.copy(fname, f_new)
# Get the final potential from vasprun.xml
def _get_final_E(self, filename="vasprun.xml"):
v = Vasprun(filename)
fe = v.final_energy()
return fe
# Hot patch to the Vasp class
Vasp.read_bandgap = _read_bandgap
Vasp.load_vasprun = _load_vasprun
Vasp.read_extern_stress = _read_extern_stress
Vasp.copy_files = _copy_files
Vasp.get_final_E = _get_final_E
|
Python
| 0.999997
|
@@ -2710,19 +2710,20 @@
es%0AVasp.
-get
+read
_final_E
|
bb12ec846e6c7f1a4ce458645595c2c30ffb46bc
|
Rename fixture
|
tests/mock_vws/test_invalid_given_id.py
|
tests/mock_vws/test_invalid_given_id.py
|
"""
Tests for passing invalid endpoints which require a target ID to be given.
"""
import uuid
from urllib.parse import urljoin
import pytest
import requests
from _pytest.fixtures import SubRequest
from requests import codes
from requests_mock import GET
from common.constants import ResultCodes
from tests.conftest import VuforiaServerCredentials
from tests.mock_vws.utils import assert_vws_failure
from vws._request_utils import authorization_header, rfc_1123_date
class Endpoint:
"""
Details of endpoints to be called in tests.
"""
def __init__(self, path: str, method: str) -> None:
"""
Args:
path: The path for the endpoint.
method: The HTTP method for the endpoint.
"""
self.path = path
self.method = method
@pytest.fixture()
def target_list() -> Endpoint:
"""
Return details of the endpoint for getting a list of targets.
"""
return Endpoint(path='/targets', method=GET)
@pytest.fixture()
def get_duplicates() -> Endpoint:
"""
Return details of the endpoint for getting details of a target.
"""
return Endpoint(path='/duplicates', method=GET)
@pytest.fixture(params=['target_list', 'get_duplicates'])
def endpoint(request: SubRequest) -> Endpoint:
"""
Return details of an endpoint which takes a target ID in the path.
"""
return request.getfixturevalue(request.param)
@pytest.mark.usefixtures('verify_mock_vuforia')
class TestInvalidGivenID:
"""
Tests for giving an invalid ID to endpoints which require a target ID to
be given.
"""
def test_not_real_id(self,
vuforia_server_credentials: VuforiaServerCredentials,
endpoint: Endpoint,
) -> None:
"""
A `NOT_FOUND` error is returned when an endpoint is given a target ID
of a target which does not exist.
"""
request_path = endpoint.path + '/' + uuid.uuid4().hex
date = rfc_1123_date()
authorization_string = authorization_header(
access_key=vuforia_server_credentials.access_key,
secret_key=vuforia_server_credentials.secret_key,
method=GET,
content=b'',
content_type='',
date=date,
request_path=request_path,
)
headers = {
"Authorization": authorization_string,
"Date": date,
}
url = urljoin('https://vws.vuforia.com/', request_path)
response = requests.request(
method=GET,
url=url,
headers=headers,
data=b'',
)
assert_vws_failure(
response=response,
status_code=codes.NOT_FOUND,
result_code=ResultCodes.UNKNOWN_TARGET,
)
|
Python
| 0.000001
|
@@ -1237,16 +1237,38 @@
endpoint
+_which_takes_target_id
(request
@@ -1758,16 +1758,38 @@
endpoint
+_which_takes_target_id
: Endpoi
@@ -1995,24 +1995,25 @@
_path =
+(
endpoint
.path +
@@ -2008,21 +2008,67 @@
oint
-.path + '/' +
+_which_takes_target_id.path + '/' +%0A
uui
@@ -2080,16 +2080,17 @@
d4().hex
+)
%0A
|
cce88a16cc367ef8df9533b848e6fae29ac8a4d1
|
update build setup
|
build_win_setup.py
|
build_win_setup.py
|
"""
@file
@brief Builds a setup for the teachings: ensae_teaching_cs
"""
try:
import pymyinstall
except ImportError:
import sys
sys.path.append("../pymyinstall/src")
import pymyinstall
try:
import pyquickhelper
except ImportError:
import sys
sys.path.append("../pyquickhelper/src")
import pyquickhelper
if __name__ == "__main__":
import sys
sys.path.append("src")
from pyquickhelper import fLOG
fLOG(OutputPrint=True)
from actuariat_python.automation.win_setup_helper import last_function
from pymyinstall import win_python_setup
from pymyinstall.packaged import ensae_fullset
list_modules = ensae_fullset()
win_python_setup(module_list=list_modules, verbose=True,
download_only=False,
no_setup=False,
last_function=last_function,
selection={"R", "VS"},
documentation=False,
fLOG=fLOG)
|
Python
| 0.000001
|
@@ -329,16 +329,20 @@
khelper%0A
+
%0A%0Aif __n
@@ -480,24 +480,25 @@
rom
-actuariat_python
+ensae_teaching_cs
.aut
|
ed3b48c1cccc080fa209e35bf42b757b29d8693a
|
Remove unused includes
|
butter/fanotify.py
|
butter/fanotify.py
|
#!/usr/bin/env python
"""fanotify: wrapper aroudn the fanotify family of syscalls for watching for file modifcation"""
from cffi import FFI as _FFI
from os import O_RDONLY, O_WRONLY, O_RDWR
from os import fdopen
import errno as _errno
READ_EVENTS_MAX = 10
_ffi = _FFI()
_ffi.cdef("""
#define FAN_CLOEXEC ...
#define FAN_NONBLOCK ...
#define FAN_CLASS_NOTIF ...
#define FAN_CLASS_CONTENT ...
#define FAN_CLASS_PRE_CONTENT ...
#define FAN_UNLIMITED_QUEUE ...
#define FAN_UNLIMITED_MARKS ...
#define FAN_MARK_ADD ...
#define FAN_MARK_REMOVE ...
#define FAN_MARK_DONT_FOLLOW ...
#define FAN_MARK_ONLYDIR ...
#define FAN_MARK_MOUNT ...
#define FAN_MARK_IGNORED_MASK ...
#define FAN_MARK_IGNORED_SURV_MODIFY ...
#define FAN_MARK_FLUSH ...
#define FAN_ALL_MARK_FLAGS ...
#define FAN_ACCESS ...
#define FAN_MODIFY ...
#define FAN_CLOSE_WRITE ...
#define FAN_CLOSE_NOWRITE ...
#define FAN_OPEN ...
#define FAN_Q_OVERFLOW ...
#define FAN_OPEN_PERM ...
#define FAN_ACCESS_PERM ...
#define FAN_ONDIR ...
#define FAN_EVENT_ON_CHILD ...
// FAN_CLOSE_WRITE|FAN_CLOSE_NOWRITE
#define FAN_CLOSE ...
// Access control flags
#define FAN_ALLOW ...
#define FAN_DENY ...
// #define FAN_EVENT_OK ...
// #define FAN_EVENT_NEXT ...
struct fanotify_response {
int32_t fd;
uint32_t response;
};
//#define __aligned_u64 __u64 __attribute__((aligned(8)))
struct fanotify_event_metadata {
uint32_t event_len;
uint8_t vers;
uint8_t reserved;
uint16_t metadata_len;
uint64_t mask;
int32_t fd;
int32_t pid;
};
int fanotify_init(unsigned int flags, unsigned int event_f_flags);
int fanotify_mark (int fanotify_fd, unsigned int flags, uint64_t mask, int dfd, const char *pathname);
""")
_C = _ffi.verify("""
#include <linux/fcntl.h>
#include <sys/fanotify.h>
""", libraries=[])
#include <stdint.h>
#include <asm-generic/int-ll64.h>
#include <linux/types.h>
class FANotify(object):
def __init__(self, flags=0, mode='r'):
"""Create a new fanotify context to track file modification/creation/deleteion
:param integer flags: Define what type of fanotify context you wish to open, can be a mix of the following
FAN_CLOEXEC, FAN_NONBLOCK, FAN_CLASS_NOTIF, FAN_CLASS_CONTENT, FAN_CLASS_PRE_CONTENT
FAN_UNLIMITED_QUEUE, FAN_UNLIMITED_MARKS
:param integer mode: str or int. when used with a str, behaves identically to the 'mode' keyword in open()
:raises ValueError: If ethier flags or mode contains an invalid value, this will be raised
:raises IOError: Raised for any of the following conditions:
Number of listeners exceeds FANOTIFY_DEFAULT_MAX_LISTENERS
flag FAN_UNLIMITED_QUEUE was set without the CAP_SYS_ADMIN capability
flag FAN_UNLIMITED_MARKS was set without the CAP_SYS_ADMIN capability
:raises OSError: Raised on permissions issue or Non memory avalible
"""
if isinstance(mode, str):
if '+' in mode:
mode = O_RDWR
elif 'w' in mode:
mode = O_WRONLY
elif 'r' in mode:
mode = O_RDONLY
fd = _C.fanotify_init(flags, mode)
if fd < 0:
# handle error cases
error = _ffi.errno
if error == _errno.EINVAL:
# EINVAL: Flags contains invalid options $FAN_ALL_INIT_FLAGS indicates valid flags
raise ValueError('"flags" contains invalid values')
elif error == _errno.EMFILE:
# EMFILE: indicates one of the following situations:
# - The number of listeners exceeds FANOTIFY_DEFAULT_MAX_LISTENERS.
# - Flag FAN_UNLIMITED_QUEUE was set without owning the CAP_SYS_ADMIN capability.
# - Flag FAN_UNLIMITED_MARKS was set without owning the CAP_SYS_ADMIN capability.
raise IOError('Max listeners exceeded or do not have CAP_SYS_ADMIN')
elif error == _errno.ENOMEM:
# ENOMEM: No mem avalible
raise OSError('No Mem avalibel to service request')
elif error == _errno.EPERM:
# EPERM: Operation not permitted, may need root/CAP_SYS_ADMIN
raise OSError('Operation not permitted')
else:
raise Exception('Unknown Error')
self._fd = fd
pass
def fileno(self):
"""Returns the file descriptor associated with the fanotify handle
:returns int: The file descriptor used for the fanotify handle
"""
return self._fd
def add_watch(self):
pass
def del_watch(self):
pass
def flush_watches(self):
pass
def fanotify_mark():
"""Add a file to a fanotify context"""
"""
EBADF: an invalid fd was passed in
EINVAL: an invalid flag or mask was passed in
ENOENT: directory is invalid or directory/mount not marked
ENOMEM: no mem avalible
ENOSPC: Too many marks
"""
pass
def main():
fd = _C.fanotify_init(_C.FAN_CLASS_NOTIF, O_RDONLY)
if fd < 0:
print 'fd error'
exit(1)
f = fdopen(fd)
err = _C.fanotify_mark(f.fileno(), _C.FAN_MARK_ADD, _C.FAN_MODIFY, 0, '/tmp/testing')
import errno
if err > 0:
print err, _ffi.errno, errno.errorcode[_ffi.errno]
else:
print "all good"
# read_size = READ_EVENTS_MAX * _ffi.sizeof('fanotify_event_metadata')
read_size = 1 * _ffi.sizeof('struct fanotify_event_metadata')
print 'Read size: {}'.format(read_size)
buf = f.read(read_size)
str_buf = _ffi.new('char[]', len(buf))
str_buf[0:len(buf)] = buf
# events = _ffi.new('struct fanotify_event_metadata *',)
events = _ffi.cast('struct fanotify_event_metadata *', str_buf)
print 'Writer PID:', events.pid
# make things a tiny bit more accsessable rather than going via the '__C' object
import fanotify
for key, val in _C.__dict__.iteritems():
if key.startswith('FAN_'):
fanotify.__dict__[key] = val
del fanotify
if __name__ == "__main__":
main()
|
Python
| 0
|
@@ -1791,88 +1791,8 @@
%5B%5D)%0A
-#include %3Cstdint.h%3E%0A#include %3Casm-generic/int-ll64.h%3E%0A#include %3Clinux/types.h%3E%0A%0A
%0Acla
|
2ef5fa81d1c1edf00078c6d038e8b1ff494e9074
|
Set up app.py for future updates
|
RTDT/app.py
|
RTDT/app.py
|
from flask import Flask, render_template, jsonify, request, g
from flask_googlemaps import GoogleMaps
from flask_googlemaps import Map
from transit import get_bus_data_from_csv
from transit import convert_df_to_list
from transit import get_entities
from transit import get_gtfs_data
from transit import get_markers_for_list_entities
from transit import get_real_time_data_request_response
import json
from collections import namedtuple
import datetime
import os
from helper import merge_two_dicts
app = Flask(__name__, template_folder="./templates")
GoogleMaps(app)
@app.route("/update")
def update():
get_gtfs_data(force=True)
return("Update complete")
@app.route("/data")
def data():
try:
current_location = json.loads(request.args.get('pos'))
except:
current_location = {u'lat': 39.7433814, u'lng': -104.98910989999999}
if current_location is None:
current_location = {u'lat': 39.7433814, u'lng': -104.98910989999999}
print(current_location)
print("before gtfs - {}".format(datetime.datetime.now()))
get_gtfs_data()
print("after gtfs - {}".format(datetime.datetime.now()))
print("before csv - {}".format(datetime.datetime.now()))
bus20_east_df, bus20_west_df, all_buses_df, stops_df = get_bus_data_from_csv()
print("after csv - {}".format(datetime.datetime.now()))
print("before list - {}".format(datetime.datetime.now()))
bus20_east_list = convert_df_to_list(bus20_east_df)
bus20_west_list = convert_df_to_list(bus20_west_df)
# all_buses_list = convert_df_to_list(all_buses_df)
print("after list - {}".format(datetime.datetime.now()))
print("before header - {}".format(datetime.datetime.now()))
headers = get_real_time_data_request_response(header=True)
last_modified = headers['Last-Modified']
print("after header - {}".format(datetime.datetime.now()))
print("before entities - {}".format(datetime.datetime.now()))
l1 = get_entities(bus20_east_list)
l2 = get_entities(bus20_west_list)
# l3 = get_entities(all_buses_list)
print("after entities - {}".format(datetime.datetime.now()))
bus_20_east_dict = {'/static/transit-east.png': get_markers_for_list_entities(l1, stops_df, current_location),
}
bus_20_west_dict = {'/static/transit-west.png': get_markers_for_list_entities(l2, stops_df, current_location),
}
print("Created dicts")
# jall_buses_dict = {'/static/transit.png': get_markers_for_list_entities(l3, stops_df, current_location)}
all_buses_dict = {'/static/transit.png': []}
print("Created all dicts")
markers = merge_two_dicts(all_buses_dict, merge_two_dicts(bus_20_east_dict, bus_20_west_dict))
print("Merged everything")
lat_lng = {'lat': 39.7392, 'lng': -104.9903} # Denver downtown
UTC_OFFSET = int(os.getenv("OFFSET", 7))
dt = datetime.datetime.strptime(last_modified, '%a, %d %b %Y %H:%M:%S GMT')
dt = dt - datetime.timedelta(hours=UTC_OFFSET)
last_modified = dt.strftime('%a, %d %b %Y %H:%M:%S MST')
print("return everything")
data = {'last_modified': last_modified,
'location': lat_lng,
'markers': markers}
# data = {"last_modified": "Wed, 10 Feb 2016 20:13:39 MST", "location": {"lat": 39.7434915, "lng": -104.9890398}, "markers": {"/static/transit-west.png": [], "/static/transit-east.png": [[39.7434915, -104.9890398]]}}
return json.dumps(data)
@app.route("/")
def mapview():
# creating a map in the view
headers = get_real_time_data_request_response(header=True)
last_modified = headers['Last-Modified']
UTC_OFFSET = 7
dt = datetime.datetime.strptime(last_modified, '%a, %d %b %Y %H:%M:%S GMT')
dt = dt - datetime.timedelta(hours=UTC_OFFSET)
last_modified = dt.strftime('%a, %d %b %Y %H:%M:%S MST')
return render_template('map.html', last_modified=last_modified, json_api_key=os.getenv('JSON_API'))
if __name__ == "__main__":
app.debug = os.getenv('DEBUG', False)
app.threaded = os.getenv('THREADED', False)
app.run()
|
Python
| 0
|
@@ -1525,18 +1525,16 @@
_df)%0A
- #
all_bus
@@ -2021,18 +2021,16 @@
ist)%0A
- #
l3 = ge
@@ -2431,17 +2431,16 @@
%0A%0A #
-j
all_buse
|
ec295698b683dd5f04df1fff49f9d1e2afdf0a86
|
fix bugs in writing out failures
|
tests/selenium/remotecontrol/test_ol.py
|
tests/selenium/remotecontrol/test_ol.py
|
from selenium import selenium
import time
import sys
from ConfigParser import ConfigParser
if len(sys.argv) > 2:
filename = sys.argv[2]
else:
filename = "config.cfg"
c = ConfigParser()
c.read(filename)
targets = {}
sections = c.sections()
for s in sections:
targets[s] = dict(c.items(s))
targets[s]['name'] = s
if sys.argv[1] == "all":
browsers = list(targets.values())
elif sys.argv[1] not in targets:
print "Invalid target"
sys.exit()
else:
browsers = [targets[sys.argv[1]]]
if 1:
for b in browsers:
print "Running %s on %s" % (b['name'], b['host'])
s = selenium(b['host'], 4444, "*%s" % b['browsercmd'], "http://openlayers.org/")
s.start()
try:
s.open("/dev/tests/run-tests.html?run=all")
count = 0
while count == 0:
count = int(s.get_eval("window.document.getElementById('testtable').getElementsByTagName('tr').length"))
time.sleep(5)
ok = 0
fail = 0
while True:
ok = int(s.get_eval('window.Test.AnotherWay._g_ok_pages'))
fail = int(s.get_eval('window.Test.AnotherWay._g_fail_pages'))
if (ok + fail) >= count:
break
time.sleep(10)
if fail:
print "Failed: %s" % fail
html = s.get_eval("window.document.getElementById('results').innerHTML").decode("utf-8")
all_html = """<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type" />
</head>
<body>%s</body></html>""" % html
f = open("fail.%s.%s.html" % (time.time(), b['name']), "w")
f.write(all_html.encode)
f.close()
except Exception, E:
print "Error: ", E
s.stop()
|
Python
| 0.007053
|
@@ -1508,18 +1508,18 @@
rHTML%22).
-d
e
+n
code(%22ut
@@ -1802,23 +1802,16 @@
all_html
-.encode
)%0A
|
be92cf747a718bc004bd2024dbdcb527205d9b44
|
remove an extra import
|
scripts/lib/flattened.py
|
scripts/lib/flattened.py
|
import collections
def flatten(l):
# from http://stackoverflow.com/a/2158532/2347774
for el in l:
if isinstance(el, list) and not isinstance(el, str):
yield from flatten(el)
else:
yield el
|
Python
| 0.000006
|
@@ -1,24 +1,4 @@
-import collections%0A%0A
def
|
56ade9d8c571c3836148ecbd1c306fa3c7796279
|
use parser.error() instead of raise ValueError
|
Lib/fontmake/__main__.py
|
Lib/fontmake/__main__.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import ArgumentParser
from fontmake.font_project import FontProject
def main():
parser = ArgumentParser()
parser.add_argument('-g', '--glyphs-path')
parser.add_argument('-u', '--ufo-paths', nargs='+')
parser.add_argument('-m', '--mm-designspace')
parser.add_argument('-o', '--output', nargs='+', default=('otf', 'ttf'),
choices=('ufo', 'otf', 'ttf', 'ttf-interpolatable'))
parser.add_argument('-i', '--interpolate', action='store_true',
help='interpolate masters (for Glyphs or MutatorMath '
'sources only)')
parser.add_argument('--mti-source')
parser.add_argument('--family-name', help='Family name to use for masters,'
' and to filter output instances by')
parser.add_argument('--use-afdko', action='store_true')
parser.add_argument('--keep-overlaps', dest="remove_overlaps",
action='store_false',
help='Do not remove any overlap.')
group = parser.add_mutually_exclusive_group(required=False)
group.add_argument('--production-names', dest='use_production_names',
action='store_true', help='Rename glyphs with '
'production names if available otherwise use uninames.')
group.add_argument('--no-production-names', dest='use_production_names',
action='store_false',
help='Do not rename glyphs with production names. '
'Keeps original glyph names')
parser.set_defaults(use_production_names=None)
parser.add_argument('--timing', action='store_true')
args = vars(parser.parse_args())
project = FontProject(timing=args.pop('timing'))
glyphs_path = args.pop('glyphs_path')
ufo_paths = args.pop('ufo_paths')
designspace_path = args.pop('mm_designspace')
if not sum(1 for p in [glyphs_path, ufo_paths, designspace_path] if p) == 1:
raise ValueError('Exactly one source type required (Glyphs, UFO, or '
'MutatorMath).')
if glyphs_path:
project.run_from_glyphs(glyphs_path, **args)
elif designspace_path:
project.run_from_designspace(designspace_path, **args)
else:
excluded = 'interpolate'
if args[excluded]:
raise ValueError(
'"%s" argument only available for Glyphs or MutatorMath source'
% excluded)
del args[excluded]
if ufo_paths:
project.run_from_ufos(ufo_paths, **args)
if __name__ == '__main__':
main()
|
Python
| 0.000007
|
@@ -2585,36 +2585,32 @@
1:%0A
-raise ValueE
+parser.e
rror('Exactl
@@ -2655,20 +2655,16 @@
O, or '%0A
-
@@ -2945,20 +2945,16 @@
-raise ValueE
+parser.e
rror
|
bf163f45d1e7a28db34396b20209778668103f0a
|
remove password option for redis
|
Run/main.py
|
Run/main.py
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: main.py
Description : 运行主函数
Author : JHao
date: 2017/4/1
-------------------------------------------------
Change Activity:
2017/4/1:
-------------------------------------------------
"""
__author__ = 'JHao'
import sys
from multiprocessing import Process
from Util.GetConfig import GetConfig
sys.path.append('../')
from Api.ProxyApi import run as ProxyApiRun
from Schedule.ProxyValidSchedule import run as ValidRun
from Schedule.ProxyRefreshSchedule import run as RefreshRun
def run(host,port):
p_list = list()
p1 = Process(target=ProxyApiRun, name='ProxyApiRun')
p_list.append(p1)
p2 = Process(target=ValidRun, name='ValidRun')
p_list.append(p2)
p3 = Process(target=RefreshRun, name='RefreshRun')
p_list.append(p3)
for p in p_list:
p.start()
for p in p_list:
p.join()
if __name__ == '__main__':
run()
|
Python
| 0.000003
|
@@ -399,44 +399,8 @@
ess%0A
-from Util.GetConfig import GetConfig
%0A%0Asy
@@ -581,16 +581,52 @@
reshRun%0A
+from Util.GetConfig import GetConfig
%0A%0Adef ru
|
a1dd1c0a8b91cb75ef773ed9566fc93b232bc2b7
|
Fix a broken test_dbm_gnu as introducted by r67380.
|
Lib/test/test_dbm_gnu.py
|
Lib/test/test_dbm_gnu.py
|
import dbm.gnu as gdbm
import unittest
import os
from test.support import verbose, TESTFN, run_unittest, unlink
filename = TESTFN
class TestGdbm(unittest.TestCase):
def setUp(self):
self.g = None
def tearDown(self):
if self.g is not None:
self.g.close()
unlink(filename)
def test_key_methods(self):
self.g = gdbm.open(filename, 'c')
self.assertEqual(self.g.keys(), [])
self.g['a'] = 'b'
self.g['12345678910'] = '019237410982340912840198242'
self.g[b'bytes'] = b'data'
key_set = set(self.g.keys())
self.assertEqual(key_set, set([b'a', b'12345678910']))
self.assert_(b'a' in self.g)
self.assertEqual(self.g[b'bytes'], b'data')
key = self.g.firstkey()
while key:
self.assert_(key in key_set)
key_set.remove(key)
key = self.g.nextkey(key)
self.assertRaises(KeyError, lambda: self.g['xxx'])
def test_error_conditions(self):
# Try to open a non-existent database.
unlink(filename)
self.assertRaises(gdbm.error, gdbm.open, filename, 'r')
# Try to access a closed database.
self.g = gdbm.open(filename, 'c')
self.g.close()
self.assertRaises(gdbm.error, lambda: self.g['a'])
# try pass an invalid open flag
self.assertRaises(gdbm.error, lambda: gdbm.open(filename, 'rx').close())
def test_flags(self):
# Test the flag parameter open() by trying all supported flag modes.
all = set(gdbm.open_flags)
# Test standard flags (presumably "crwn").
modes = all - set('fsu')
for mode in modes:
self.g = gdbm.open(filename, mode)
self.g.close()
# Test additional flags (presumably "fsu").
flags = all - set('crwn')
for mode in modes:
for flag in flags:
self.g = gdbm.open(filename, mode + flag)
self.g.close()
def test_reorganize(self):
self.g = gdbm.open(filename, 'c')
size0 = os.path.getsize(filename)
self.g['x'] = 'x' * 10000
size1 = os.path.getsize(filename)
self.assert_(size0 < size1)
del self.g['x']
# 'size' is supposed to be the same even after deleting an entry.
self.assertEqual(os.path.getsize(filename), size1)
self.g.reorganize()
size2 = os.path.getsize(filename)
self.assert_(size1 > size2 >= size0)
def test_main():
run_unittest(TestGdbm)
if __name__ == '__main__':
test_main()
|
Python
| 0.000002
|
@@ -635,16 +635,26 @@
t(%5Bb'a',
+ b'bytes',
b'12345
|
01923b0c16732277e64bcf10b101eb339bd8c0e5
|
Add tests for fnmatch.filter and translate.
|
Lib/test/test_fnmatch.py
|
Lib/test/test_fnmatch.py
|
"""Test cases for the fnmatch module."""
from test import support
import unittest
from fnmatch import fnmatch, fnmatchcase, _MAXCACHE, _cache, _cacheb, purge
class FnmatchTestCase(unittest.TestCase):
def tearDown(self):
purge()
def check_match(self, filename, pattern, should_match=1, fn=fnmatch):
if should_match:
self.assertTrue(fn(filename, pattern),
"expected %r to match pattern %r"
% (filename, pattern))
else:
self.assertTrue(not fn(filename, pattern),
"expected %r not to match pattern %r"
% (filename, pattern))
def test_fnmatch(self):
check = self.check_match
check('abc', 'abc')
check('abc', '?*?')
check('abc', '???*')
check('abc', '*???')
check('abc', '???')
check('abc', '*')
check('abc', 'ab[cd]')
check('abc', 'ab[!de]')
check('abc', 'ab[de]', 0)
check('a', '??', 0)
check('a', 'b', 0)
# these test that '\' is handled correctly in character sets;
# see SF bug #409651
check('\\', r'[\]')
check('a', r'[!\]')
check('\\', r'[!\]', 0)
# test that filenames with newlines in them are handled correctly.
# http://bugs.python.org/issue6665
check('foo\nbar', 'foo*')
check('foo\nbar\n', 'foo*')
check('\nfoo', 'foo*', False)
check('\n', '*')
def test_mix_bytes_str(self):
self.assertRaises(TypeError, fnmatch, 'test', b'*')
self.assertRaises(TypeError, fnmatch, b'test', '*')
self.assertRaises(TypeError, fnmatchcase, 'test', b'*')
self.assertRaises(TypeError, fnmatchcase, b'test', '*')
def test_fnmatchcase(self):
check = self.check_match
check('AbC', 'abc', 0, fnmatchcase)
check('abc', 'AbC', 0, fnmatchcase)
def test_bytes(self):
self.check_match(b'test', b'te*')
self.check_match(b'test\xff', b'te*\xff')
self.check_match(b'foo\nbar', b'foo*')
def test_cache_clearing(self):
# check that caches do not grow too large
# http://bugs.python.org/issue7846
# string pattern cache
for i in range(_MAXCACHE + 1):
fnmatch('foo', '?' * i)
self.assertLessEqual(len(_cache), _MAXCACHE)
# bytes pattern cache
for i in range(_MAXCACHE + 1):
fnmatch(b'foo', b'?' * i)
self.assertLessEqual(len(_cacheb), _MAXCACHE)
def test_main():
support.run_unittest(FnmatchTestCase)
if __name__ == "__main__":
test_main()
|
Python
| 0
|
@@ -97,16 +97,17 @@
import
+(
fnmatch,
@@ -153,16 +153,60 @@
b, purge
+,%0A translate, filter)
%0A%0A%0Aclass
@@ -2604,57 +2604,809 @@
)%0A%0A%0A
-def test_main():%0A support.run_unittest(Fnmatch
+class TranslateTestCase(unittest.TestCase):%0A%0A def test_translate(self):%0A self.assertEqual(translate('*'), '.*%5CZ(?ms)')%0A self.assertEqual(translate('?'), '.%5CZ(?ms)')%0A self.assertEqual(translate('a?b*'), 'a.b.*%5CZ(?ms)')%0A self.assertEqual(translate('%5Babc%5D'), '%5Babc%5D%5CZ(?ms)')%0A self.assertEqual(translate('%5B%5D%5D'), '%5B%5D%5D%5CZ(?ms)')%0A self.assertEqual(translate('%5B!x%5D'), '%5B%5Ex%5D%5CZ(?ms)')%0A self.assertEqual(translate('%5B%5Ex%5D'), '%5B%5C%5C%5Ex%5D%5CZ(?ms)')%0A self.assertEqual(translate('%5Bx'), '%5C%5C%5Bx%5CZ(?ms)')%0A%0A%0Aclass FilterTestCase(unittest.TestCase):%0A%0A def test_filter(self):%0A self.assertEqual(filter(%5B'a', 'b'%5D, 'a'), %5B'a'%5D)%0A%0A%0Adef test_main():%0A support.run_unittest(FnmatchTestCase,%0A TranslateTestCase,%0A Filter
Test
|
a157ee8bc8c740ba7482f8e4e9116213fb18c935
|
fix of type in merging
|
src/gl_lr.py
|
src/gl_lr.py
|
from __future__ import division
__author__ = 'Vladimir Iglovikov'
'''
I will try to use logistic regression from Graphlab to predict
'''
import graphlab as gl
import os
print 'reading train'
train = gl.SFrame(os.path.join('..', 'data', 'trainSearch_1'))
print
print 'train shape'
print train.shape
print 'reading AdsInfo'
ads = gl.SFrame(os.path.join('..', 'data', 'ads_1'))
print
print 'ads shape'
print ads.shape
print 'merging train and ads'
training = train.join(ads, on='AdId')
print
print 'training shape'
print training.shape
print 'splitting set'
sf_train, sf_test = training.random_split(0.5,
seed=42)
features=['Position',
'HistCTR',
'Price',
'CategoryID',
'AdID',
'LocationID']
model = gl.logistic_classifier.create(training,
target='IsClick',
features=features)
|
Python
| 0.000001
|
@@ -482,17 +482,17 @@
on='AdI
-d
+D
')%0A%0Aprin
|
b0614a15cedd53ba752beca9107698636ee0f8cf
|
replace wtf-deprecated stuff
|
app.py
|
app.py
|
from flask import Flask, render_template, request
from flask_bootstrap import Bootstrap
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.orm.properties import ColumnProperty
from flask_wtf import Form
from wtforms import TextField, SubmitField, validators
from wtforms.validators import ValidationError
import praw
import os
from utils import youtube_video_id, is_live_stream
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
app = Flask(__name__)
app.secret_key = os.environ['SECRET_KEY']
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DATABASE_URL']
Bootstrap(app)
db = SQLAlchemy(app)
reddit_user_agent = "/r/WatchPeopleCode app"
youtube_api_key = os.environ['ytokkey']
class CurrentLiveStreams:
ids = None
_last_time_checked = None
@classmethod
def get_ids(self):
if self._last_time_checked is None or datetime.now() - self._last_time_checked > timedelta(seconds=59):
print "GETTING IDS"
self._last_time_checked = datetime.now()
self.ids = self._get_current_live_streams_ids()
else:
print "CACHED"
return self.ids
@classmethod
def _extract_links_from_selftexts(self, selftext_html):
soup = BeautifulSoup(selftext_html)
return [a['href'] for a in soup.findAll('a')]
@classmethod
def _get_current_live_streams_ids(self):
r = praw.Reddit(user_agent=reddit_user_agent)
r.config.decode_html_entities = True
submissions = list(r.get_subreddit('watchpeoplecode').get_new(limit=20))
submission_urls = [s.url for s in submissions]
selfposts_urls = sum([self._extract_links_from_selftexts(s.selftext_html) for s in submissions if s.selftext_html], [])
youtube_ids = set(filter(None, [youtube_video_id(s) for s in selfposts_urls + submission_urls]))
live_stream_ids = [yt_id for yt_id in youtube_ids if is_live_stream(yt_id, youtube_api_key)]
return live_stream_ids
class CaseInsensitiveComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return db.func.lower(self.__clause_element__()) == db.func.lower(other)
class Subscriber(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.column_property(db.Column(db.String(256), unique=True, nullable=False), comparator_factory=CaseInsensitiveComparator)
def validate_email_unique(form, field):
email = field.data
if Subscriber.query.filter_by(email=email).first() is not None:
raise ValidationError('This email is already in the database.')
class SubscribeForm(Form):
email = TextField("Email address", [validators.Required(), validators.Email(), validate_email_unique])
submit_button = SubmitField('Subscribe')
@app.route('/', methods=['GET', 'POST'])
def index():
live_stream_ids = CurrentLiveStreams.get_ids()
form = SubscribeForm()
added_successfully = False
if request.method == "POST" and form.validate_on_submit():
subscriber = Subscriber()
form.populate_obj(subscriber)
db.session.add(subscriber)
db.session.commit()
added_successfully = True
return render_template('index.html', form=form, live_stream_ids=live_stream_ids, added_successfully=added_successfully)
if __name__ == '__main__':
app.run(debug=True)
|
Python
| 0.000152
|
@@ -225,20 +225,22 @@
import
-Text
+String
Field, S
@@ -2620,12 +2620,14 @@
l =
-Text
+String
Fiel
@@ -2657,16 +2657,20 @@
idators.
+Data
Required
|
a9a3b1532d7835156703319491648da7c107ca58
|
Fix secret key to env
|
app.py
|
app.py
|
#!/usr/bin/env python
# Setting up twitter API conection requiren oauth authentication
import oauth2 as oauth
import urllib2 as urllib
import json
from csv import DictReader
import os # Personal keys
apiKey = os.environ['apiKey']
apiSecret = os.environ['apiSecret']
accessTokenKey = os.environ['accessTokenKey']
accessTokenSecret = os.environ['accessTokenSecret']
# Create oauth tokens and signature
oauthToken = oauth.Token(key=accessTokenKey, secret=accessTokenSecret)
oauthConsumer = oauth.Consumer(key=apiKey, secret=apiSecret)
signatureMethod = oauth.SignatureMethod_HMAC_SHA1()
# Create req handler
httpMethod = 'GET'
httpHandler = urllib.HTTPHandler(debuglevel=0)
httpsHandler = urllib.HTTPSHandler(debuglevel=0)
def twitterreq(url, method, parameters):
'''Request handler for twitter API'''
req = oauth.Request.from_consumer_and_token(oauthConsumer,
token=oauthToken,
http_method=httpMethod,
http_url=url,
parameters=parameters)
req.sign_request(signatureMethod, oauthConsumer, oauthToken)
headers = req.to_header()
if httpMethod == 'POST':
encodedPostData = req.to_postdata()
else:
encodedPostData = None
url = req.to_url()
opener = urllib.OpenerDirector()
opener.add_handler(httpHandler)
opener.add_handler(httpsHandler)
response = opener.open(url, encodedPostData)
return response
def fetch(term):
if term:
url = 'https://stream.twitter.com/1.1/statuses/filter.json?language=en&track=' + term
else:
url = 'https://stream.twitter.com/1.1/statuses/sample.json'
parameters = [] # FEATURE: Could ask for specific parameters from API
response = twitterreq(url, "GET", parameters)
for line in response:
yield line
# Emotional processing
cols = ['anger',
'anticipation',
'disgust',
'fear',
'joy',
'negative',
'positive',
'sadness',
'surprise',
'trust']
dictFile = 'static/dict.csv'
mainDict = {}
# Read dictionary from csv
with open(dictFile) as csvFile:
reader = DictReader(csvFile)
for row in reader:
mainDict[row['Word']] = [int(row[i]) for i in cols]
def score(data):
'''Score tweet by lexical analysis'''
global mainDict
try:
tweet = json.loads(data)
# FEATURE: change replace for regex
line = tweet[u'text'].replace('.','').replace(',','').replace(';','').replace(':','').replace('\t',' ').replace('\n',' ')
words = line.split(' ')
tweetScore = [0] * 10
for word in words:
if word in mainDict:
for i in range(len(tweetScore)):
tweetScore[i] += mainDict[word][i]
if tweetScore:
return tweetScore #, tweet[u'user'][u'screen_name'], tweet[u'text']
except: # If there is any error while reading the json, skip
pass
# Setting variable asyncMode
asyncMode = None
if asyncMode is None:
try:
import eventlet
asyncMode = 'eventlet'
except ImportError:
pass
if asyncMode is None:
try:
from gevent import monkey
asyncMode = 'gevent'
except ImportError:
pass
if asyncMode is None:
asyncMode = 'threading'
print('asyncMode is ' + asyncMode)
# monkey patching is necessary because this application uses a background
# thread
if asyncMode == 'eventlet':
import eventlet
eventlet.monkey_patch()
elif asyncMode == 'gevent':
from gevent import monkey
monkey.patch_all()
# Start flask app
from flask import Flask, render_template, session, request
from flask_socketio import SocketIO, emit, disconnect
from threading import Thread
app = Flask(__name__)
app.config['SECRET_KEY'] = myKeys.secretKey
socketio = SocketIO(app, async_mode=asyncMode)
streamThread = None
term = ''
def backgroundThread():
'''Constantly emiting vectors'''
global term
try:
for line in fetch(term):
vector = score(line)
if vector == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]:
continue
if vector == None:
continue
socketio.emit('vector', vector, namespace='/', broadcast=True)
except:
print "ERROR: Stream stopped"
raise
@app.route("/")
def index():
global streamThread
if streamThread is None:
streamThread = Thread(target=backgroundThread)
streamThread.daemon = True
streamThread.start()
return render_template('index.html', name=index)
@socketio.on('term') # FEATURE: authentication can be added here
def threadStream(word):
'''Change the term being looked up'''
global term
term = word[u'data']
# print "Looking up term:", word[u'data']
@socketio.on('connect')
def connect():
emit('status', {'data': 'Connected'})
@socketio.on('disconnect')
def test_disconnect():
print('Client disconnected', request.sid)
if __name__ == "__main__":
socketio.run(app, host='0.0.0.0')
|
Python
| 0.99919
|
@@ -3949,24 +3949,32 @@
%5D =
-myKeys.secretKey
+os.environ%5B'SECRET_KEY'%5D
%0Asoc
|
44d7f67eec87957dbb024e85713de01d778fe6e5
|
Add extra app routes for stability
|
app.py
|
app.py
|
import os
import module
from flask import Flask, render_template, request, session, redirect, url_for, send_from_directory
from werkzeug import secure_filename
app = Flask(__name__)
# Configure upload locations
app.config['UPLOAD_FOLDER'] = 'uploads/'
app.config['ALLOWED_EXTENSIONS'] = set(['txt']) # Change this to whatever filetype to accept
# Checks if uploaded file is a valid file
def allowed_file(filename):
return '.' in filename and filename.rsplit('.',1)[1] in app.config['ALLOWED_EXTENSIONS']
@app.route("/")
@app.route("/home")
def home():
if 'username' in session:
return redirect(url_for('profile'))
return render_template("home.html")
@app.route("/login", methods=["GET","POST"])
def login():
if request.method == "GET":
return render_template("login.html")
else:
if 'username' in request.form and 'pass' in request.form and module.authenticate(request.form['username'], request.form['pass']):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for('home'))
return render_template("login.html")
@app.route("/register", methods=["POST"])
def register():
if 'username' in request.form and 'pass' in request.form and 'pass2' in request.form:
if not request.form["pass"] == request.form["pass2"]:
return redirect(url_for('home'))
if module.newUser(request.form["username"], request.form["pass"]):
session['authenticated'] = True
session['username'] = request.form['username']
return redirect(url_for('home'))
return redirect(url_for('home'))
@app.route("/about")
def about():
if 'username' in session:
return render_template("about.html", un=session['username']) # Jinja for username stuff
return render_template("about.html")
@app.route("/download", methods=["GET", "POST"])
def download():
if 'username' in session:
return render_template('download.html', un=session['username']) # For when the Jinja is configured
return redirect(url_for('home'))
# Uploads the file to the upload folder with the format of USER_bot.ext
@app.route("/upload", methods=["GET","POST"])
def upload():
if 'username' in session and session['username'] !=0:
if request.method=="GET":
return render_template("upload.html")
else:
file = request.files['upload_bot']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], session['username'] + '_bot.ext'))
return redirect(url_for('profile'))
else:
return redirect(url_for('home'))
@app.route("/leaderboards", methods=["GET", "POST"])
def leaderboards():
table = [];
table.append({"rank":1,"name":"Jijiglobe","elo":1200})
table.append({"rank":2,"name":"name2","elo":1300})
table.append({"rank":3,"name":"name3","elo":1400})
if 'username' in session and session['username']!=0:
return render_template("loginleaderboards.html", table=table)
return render_template("leaderboards.html", table=table)
@app.route("/profile", methods=["GET","POST"])
def profile():
if 'username' in session and session['username']!=0:
#retrieve user data here
dict = module.getUser(session['username'])
#dict = {"rank":1,"elo":1400,"wins":100,"losses":50,"stalemates":0}
return render_template("profile.html", username=session['username'],dict=dict)
return render_template("home.html")
if __name__ == "__main__":
app.debug = True
app.secret_key = str(os.urandom(24))
app.run(host="0.0.0.0", port=8000)
|
Python
| 0
|
@@ -541,16 +541,37 @@
/home%22)%0A
+@app.route(%22/home/%22)%0A
def home
@@ -730,24 +730,70 @@
T%22,%22POST%22%5D)%0A
+@app.route(%22/login/%22, methods=%5B%22GET%22,%22POST%22%5D)%0A
def login():
@@ -1250,24 +1250,67 @@
s=%5B%22POST%22%5D)%0A
+@app.route(%22/register/%22, methods=%5B%22POST%22%5D)%0A
def register
@@ -1792,16 +1792,38 @@
about%22)%0A
+@app.route(%22/about/%22)%0A
def abou
@@ -2040,24 +2040,74 @@
%22, %22POST%22%5D)%0A
+@app.route(%22/download/%22, methods=%5B%22GET%22, %22POST%22%5D)%0A
def download
@@ -2399,24 +2399,71 @@
T%22,%22POST%22%5D)%0A
+@app.route(%22/upload/%22, methods=%5B%22GET%22,%22POST%22%5D)%0A
def upload()
@@ -3034,24 +3034,78 @@
%22, %22POST%22%5D)%0A
+@app.route(%22/leaderboards/%22, methods=%5B%22GET%22, %22POST%22%5D)%0A
def leaderbo
@@ -3486,16 +3486,16 @@
table)%0A%0A
-
@app.rou
@@ -3529,24 +3529,72 @@
T%22,%22POST%22%5D)%0A
+@app.route(%22/profile/%22, methods=%5B%22GET%22,%22POST%22%5D)%0A
def profile(
|
b914f700687f6fbf6ccc0aac08d92ffaac76f89a
|
bump retry timeout, 20s is too low during meltdowns
|
flumine/streams/basestream.py
|
flumine/streams/basestream.py
|
import threading
import queue
import logging
import betfairlightweight
from betfairlightweight import StreamListener
from tenacity import wait_exponential
logger = logging.getLogger(__name__)
class BaseStream(threading.Thread):
LISTENER = StreamListener
MAX_LATENCY = 0.5
RETRY_WAIT = wait_exponential(multiplier=1, min=2, max=20)
def __init__(
self,
flumine,
stream_id: int,
streaming_timeout: float, # snaps listener if no update
conflate_ms: int,
market_filter: dict = None,
market_data_filter: dict = None,
client=None,
output_queue: bool = True,
event_processing: bool = False,
event_id: str = None,
operation: str = "marketSubscription",
**listener_kwargs,
):
threading.Thread.__init__(self, daemon=True, name=self.__class__.__name__)
self.flumine = flumine
self.stream_id = stream_id
self.market_filter = market_filter
self.market_data_filter = market_data_filter
self.streaming_timeout = streaming_timeout
self.conflate_ms = conflate_ms
self._client = client
self._stream = None
self._output_queue = queue.Queue() if output_queue else None
self.event_processing = event_processing
self.event_id = event_id
self.operation = operation
self._listener = self.LISTENER(
output_queue=self._output_queue,
max_latency=self.MAX_LATENCY,
**listener_kwargs,
)
self._output_thread = threading.Thread(
name="{0}_output_thread".format(self.name),
target=self.handle_output,
daemon=True,
)
def run(self) -> None:
raise NotImplementedError
def handle_output(self) -> None:
raise NotImplementedError
def stop(self) -> None:
if self._stream:
self._stream.stop()
@property
def betting_client(self) -> betfairlightweight.APIClient:
return self.client.betting_client
@property
def client(self):
if self._client:
return self._client
else:
return self.flumine.client
|
Python
| 0
|
@@ -336,17 +336,17 @@
=2, max=
-2
+6
0)%0A%0A
|
c3529def6c32bdf7d9f948374ff3aba634d5b8f7
|
UPDATE 4th-Trial
|
app.py
|
app.py
|
#!/usr/bin/env python
import urllib
import json
import os
from flask import Flask
from flask import request
from flask import make_response
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = processRequest(req)
res = json.dumps(res, indent=4)
# print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def processRequest(req):
if req.get("result").get("action") != "yahooWeatherForecast":
return {
res = {
"speech": "Esto es una prueba del servidor",
"displayText": "Esto es una prueba del servidor",
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
}
baseurl = "https://query.yahooapis.com/v1/public/yql?"
yql_query = makeYqlQuery(req)
if yql_query is None:
return {}
yql_url = baseurl + urllib.urlencode({'q': yql_query}) + "&format=json"
result = urllib.urlopen(yql_url).read()
data = json.loads(result)
res = makeWebhookResult(data)
return res
def makeYqlQuery(req):
result = req.get("result")
parameters = result.get("parameters")
city = parameters.get("geo-city")
if city is None:
return None
return "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='" + city + "')"
def makeWebhookResult(data):
query = data.get('query')
if query is None:
return {}
result = query.get('results')
if result is None:
return {}
channel = result.get('channel')
if channel is None:
return {}
item = channel.get('item')
location = channel.get('location')
units = channel.get('units')
if (location is None) or (item is None) or (units is None):
return {}
condition = item.get('condition')
if condition is None:
return {}
# print(json.dumps(item, indent=4))
speech = "Hoy la temperatura en Madrid es de + " + condition.get('temp') + " " + units.get('temperature')
print("Response:")
print(speech)
return {
"speech": speech,
"displayText": speech,
# "data": data,
# "contextOut": [],
"source": "apiai-weather-webhook-sample"
}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
print "Starting app on port %d" % port
app.run(debug=False, port=port, host='0.0.0.0')
|
Python
| 0
|
@@ -660,33 +660,8 @@
n %7B%0A
- res = %7B%0A
@@ -721,28 +721,24 @@
-
-
%22displayText
@@ -783,28 +783,24 @@
-
# %22data%22: da
@@ -811,28 +811,24 @@
-
-
# %22contextOu
@@ -831,28 +831,24 @@
xtOut%22: %5B%5D,%0A
-
@@ -892,22 +892,8 @@
le%22%0A
- %7D%0A
|
277e3ef7544a64ddb2fa9f31b66597036a38e65b
|
Remove widget and test paths.
|
app.py
|
app.py
|
#!/usr/bin/env python
import argparse
from flask import Flask, render_template
import app_config
from render_utils import make_context, urlencode_filter
import static
app = Flask(app_config.PROJECT_NAME)
app.jinja_env.filters['urlencode'] = urlencode_filter
# Example application views
@app.route('/')
def index():
"""
Example view demonstrating rendering a simple HTML page.
"""
context = make_context()
# Nav needs to be a list of lists.
# The inner list should only have four objects max.
# Because of reasons.
context['nav'] = []
contents = list(context['COPY']['content'])
not_yet_four = []
for idx, row in enumerate(contents):
row = dict(zip(row.__dict__['_columns'], row.__dict__['_row']))
row_title = row.get('chapter_title', None)
if row_title:
if row_title not in [u'chapter_title']:
not_yet_four.append(row)
if len(not_yet_four) == 4:
context['nav'].append(not_yet_four)
not_yet_four = []
if (idx + 1) == len(contents):
if len(not_yet_four) > 0:
context['nav'].append(not_yet_four)
return render_template('index.html', **context)
@app.route('/widget.html')
def widget():
"""
Embeddable widget example page.
"""
return render_template('widget.html', **make_context())
@app.route('/test_widget.html')
def test_widget():
"""
Example page displaying widget at different embed sizes.
"""
return render_template('test_widget.html', **make_context())
@app.route('/test/test.html')
def test_dir():
return render_template('index.html', **make_context())
app.register_blueprint(static.static)
# Boilerplate
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
args = parser.parse_args()
server_port = 8000
if args.port:
server_port = int(args.port)
app.run(host='0.0.0.0', port=server_port, debug=app_config.DEBUG)
|
Python
| 0
|
@@ -1242,462 +1242,8 @@
t)%0A%0A
-@app.route('/widget.html')%0Adef widget():%0A %22%22%22%0A Embeddable widget example page.%0A %22%22%22%0A return render_template('widget.html', **make_context())%0A%0A@app.route('/test_widget.html')%0Adef test_widget():%0A %22%22%22%0A Example page displaying widget at different embed sizes.%0A %22%22%22%0A return render_template('test_widget.html', **make_context())%0A%0A@app.route('/test/test.html')%0Adef test_dir():%0A return render_template('index.html', **make_context())%0A%0A
app.
|
a0883d386f6c35f8cb70c6d21ad1cc37dccb90b6
|
Update host
|
app.py
|
app.py
|
from flask import Flask
# from image_classification import ImageClassifier
app = Flask(__name__)
PORT = 33507
@app.route('/')
def home():
return 'Hello classification world!'
if __name__ == '__main__':
app.run(port=PORT)
|
Python
| 0
|
@@ -105,16 +105,33 @@
= 33507%0A
+HOST = '0.0.0.0'%0A
%0A%0A@app.r
@@ -234,16 +234,27 @@
app.run(
+host=HOST,
port=POR
|
355629e1e2e2423a4ea1ad859506e380e6ddbc89
|
define special route for twitter
|
app.py
|
app.py
|
# ingredients: tinydb joblib.Memory
from flask import Flask
from ml import store_feedback
from ml import learn
from proxy import proxy
import sys
import trace
import logging as log
# should be set at the project level
log.basicConfig(filename="./log", level=log.INFO)
# create a Trace object, telling it what to ignore, and whether to
# do tracing or line-counting or both.
_tracer = trace.Trace(
ignoredirs=[sys.prefix, sys.exec_prefix], trace=1, count=0)
app = Flask(__name__)
# app.wsgiapp = ProfilerMiddleware(app.wsgiapp)
@app.route('/feed/<path:url>')
def _feed(url):
return proxy(url)
@app.route('/feedback/<feedback>/<path:url>')
def _feedback(feedback, url):
store_feedback(url=url, feedback=feedback == 'l', explicit=True)
log.info("storing feedback {feedback} for {url}".format(
feedback=feedback, url=url))
return ('''
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-US" xml:lang="en-US">
<head>
<title>Thanks for your feedback</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
</head>
<body onload="self.close()">
<h1>Thank you for your feedback, your filter has been updated</h1>
</body>
</html>
''', 200, {})
@app.route('/learn')
def _learn():
learn()
return ("Done", 204, {})
if __name__ == 'main':
app.run()
|
Python
| 0.000184
|
@@ -603,16 +603,141 @@
(url)%0A%0A%0A
+@app.route('/twitter/%3Cstring:id%3E')%0Adef _twitter(id):%0A return proxy('https://twitrss.me/twitter_user_to_rss/?user=' + id)%0A%0A
%0A@app.ro
|
07f342a2690558b8f4ce3d1bbb545c6ef3c320d1
|
Change the expected number of fonts for the web target.
|
scripts/run_web_tests.py
|
scripts/run_web_tests.py
|
#!/usr/bin/python
"""Test assumptions that web fonts rely on."""
import glob
import json
import unittest
from fontTools import ttLib
from nototools import coverage
from nototools import font_data
from nototools import render
from nototools import unicode_data
def load_fonts():
"""Load all web fonts."""
all_font_files = glob.glob('out/web/*.ttf')
all_fonts = [ttLib.TTFont(font) for font in all_font_files]
assert len(all_font_files) == 12
return all_font_files, all_fonts
class TestVerticalMetrics(unittest.TestCase):
"""Test the vertical metrics of fonts."""
def setUp(self):
_, self.fonts = load_fonts()
def test_ymin_ymax(self):
"""Tests yMin and yMax to be equal to the old values."""
for font in self.fonts:
head_table = font['head']
self.assertEqual(head_table.yMin, -555)
self.assertEqual(head_table.yMax, 2163)
def test_other_metrics(self):
"""Tests other vertical metrics to be equal to the old values."""
for font in self.fonts:
hhea_table = font['hhea']
self.assertEqual(hhea_table.descent, -500)
self.assertEqual(hhea_table.ascent, 1900)
os2_table = font['OS/2']
self.assertEqual(os2_table.sTypoDescender, -512)
self.assertEqual(os2_table.sTypoAscender, 1536)
self.assertEqual(os2_table.sTypoLineGap, 102)
self.assertEqual(os2_table.usWinDescent, 512)
self.assertEqual(os2_table.usWinAscent, 1946)
class TestCharacterCoverage(unittest.TestCase):
"""Tests character coverage."""
def setUp(self):
_, self.fonts = load_fonts()
class TestNames(unittest.TestCase):
"""Tests various strings in the name table."""
def setUp(self):
self.family_name = 'RobotoDraft'
_, self.fonts = load_fonts()
self.names = []
for font in self.fonts:
self.names.append(font_data.get_name_records(font))
def test_copyright(self):
"""Tests the copyright message."""
for records in self.names:
self.assertEqual(
records[0],
'Copyright 2014 Google Inc. All Rights Reserved.')
def test_family_name(self):
"""Tests the family name."""
for records in self.names:
self.assertEqual(records[1], self.family_name)
if 16 in records:
self.assertEqual(records[16], self.family_name)
def test_unique_identifier_and_full_name(self):
"""Tests the unique identifier and full name."""
for records in self.names:
expected_name = records[1] + ' ' + records[2]
self.assertEqual(records[3], expected_name)
self.assertEqual(records[4], expected_name)
self.assertFalse(records.has_key(18))
def test_postscript_name(self):
"""Tests the postscript name."""
for records in self.names:
expected_name = records[1] + '-' + records[2].replace(' ', '')
self.assertEqual(records[6], expected_name)
class TestHints(unittest.TestCase):
"""Tests hints."""
def setUp(self):
_, self.fonts = load_fonts()
def test_existance_of_hints(self):
"""Tests all glyphs and makes sure non-composite ones have hints."""
missing_hints = []
for font in self.fonts:
glyf_table = font['glyf']
for glyph_name in font.getGlyphOrder():
glyph = glyf_table[glyph_name]
if glyph.numberOfContours <= 0: # composite or empty glyph
continue
if len(glyph.program.bytecode) <= 0:
missing_hints.append(
(glyph_name, font_data.font_name(font)))
self.assertTrue(missing_hints == [])
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000278
|
@@ -452,17 +452,17 @@
es) == 1
-2
+8
%0A ret
|
b61ba8c62a9a954e1d172e0e9fca6c20fdc1c617
|
Update web tests
|
scripts/run_web_tests.py
|
scripts/run_web_tests.py
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test assumptions that web fonts rely on."""
import unittest
from nototools.unittests import font_tests
import run_general_tests
FONTS = font_tests.load_fonts(
['out/web/*.ttf'],
expected_count=18)
class TestItalicAngle(run_general_tests.TestItalicAngle):
loaded_fonts = FONTS
class TestMetaInfo(font_tests.TestMetaInfo):
loaded_fonts = FONTS
mark_heavier_as_bold = True
# Since different font files are hinted at different times, the actual
# outlines differ slightly. So we are keeping the version numbers as a hint.
test_version_numbers = None
# fsType of 0 marks the font free for installation, embedding, etc.
expected_os2_fsType = 0
expected_os2_achVendID = 'GOOG'
class TestNames(run_general_tests.TestNames):
"""Bugs:
https://github.com/google/roboto/issues/37
"""
loaded_fonts = FONTS
def expected_unique_id(self, family, style):
expected = family
if style != 'Regular':
expected += ' ' + style
return expected
class TestDigitWidths(font_tests.TestDigitWidths):
loaded_fonts = FONTS
# disable this test while *.frac and *superior glyphs are separate
# the webfont glyph subset contains *.frac but not *superior
test_superscript_digits = False
class TestCharacterCoverage(font_tests.TestCharacterCoverage):
loaded_fonts = FONTS
include = frozenset([
0xEE01, 0xEE02, 0xF6C3]) # legacy PUA
exclude = frozenset([
0x2072, 0x2073, 0x208F] + # unassigned characters
range(0xE000, 0xF8FF + 1) + range(0xF0000, 0x10FFFF + 1) # other PUA
) - include # don't exclude legacy PUA
class TestVerticalMetrics(font_tests.TestVerticalMetrics):
loaded_fonts = FONTS
# tests yMin and yMax to be equal to Roboto v1 values
# android requires this, and web fonts expect this
expected_head_yMin = -555
expected_head_yMax = 2163
# test ascent, descent, and lineGap to be equal to Roboto v1 values
expected_hhea_descent = -500
expected_hhea_ascent = 1900
expected_hhea_lineGap = 0
# test OS/2 vertical metrics to be equal to the old values
expected_os2_sTypoDescender = -512
expected_os2_sTypoAscender = 1536
expected_os2_sTypoLineGap = 102
expected_os2_usWinDescent = 512
expected_os2_usWinAscent = 1946
class TestLigatures(run_general_tests.TestLigatures):
loaded_fonts = FONTS
class TestFeatures(run_general_tests.TestFeatures):
loaded_fonts = FONTS
class TestGlyphBounds(run_general_tests.TestGlyphBounds):
loaded_fonts = FONTS
# a bug in which monotonic and polytonic glyphs extend too far left is
# fixed in the unhinted output, but still present in the hinted binaries and
# not fixed by the web target
should_not_exceed = ()
class TestHints(font_tests.TestHints):
loaded_fonts = FONTS
if __name__ == '__main__':
unittest.main()
|
Python
| 0
|
@@ -3036,87 +3036,8 @@
S%0A%0A%0A
-class TestFeatures(run_general_tests.TestFeatures):%0A loaded_fonts = FONTS%0A%0A%0A
clas
|
3f051ae379e47c5163050a488ea9ca7e5951d4e6
|
test GP DB
|
app.py
|
app.py
|
#!/usr/bin/env python
from Data import Database
import urllib
import json
import os
from flask import Flask
from flask import request
from flask import make_response
import psycopg2
import urlparse
global name
global singletonObject
singletonObject = None
# Flask app should start in global layout
app = Flask(__name__)
@app.route('/webhook', methods=['POST','GET'])
def webhook():
req = request.get_json(silent=True, force=True)
print("Request:")
print(json.dumps(req, indent=4))
res = makeWebhookResult(req)
res = json.dumps(res, indent=4)
print(res)
r = make_response(res)
r.headers['Content-Type'] = 'application/json'
return r
def requestGame(req):
originalRequest = req.get("originalRequest")
data = originalRequest.get("data")
sender = data.get("sender")
id = sender.get("id")
parameter = req.get("result").get("parameters").get("requestParam")
print "-------------- " + parameter + " ------------"
return {
"speech" : "playing a game",
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def connectDB():
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
### conn = psycopg2.connect(database="testpgdp", user="postgres", password="pgAdmin_postgreSQL", host="127.0.0.1", port="5432")
print "Opened database successfully"
return conn
def createTable(conn):
cur = conn.cursor()
cur.execute('''CREATE TABLE "USER"
(ID INT PRIMARY KEY NOT NULL,
NAME TEXT NOT NULL,
AGE INT NOT NULL);''')
print "Table created successfully"
def createTable_Answers(conn):
print "--------in Database createTable_Answers--------"
cur = conn.cursor()
cur.execute('''CREATE TABLE "AnswersOut"
(ID SERIAL PRIMARY KEY NOT NULL,
Answer TEXT NOT NULL);''')
print "--------Table Answers created successfully--------"
def insertIntoDB(conn):
cur = conn.cursor()
cur.execute("INSERT INTO \"USER\" (ID,NAME,AGE) \
VALUES (1, 'Paul', 32)");
cur.execute("INSERT INTO \"USER\" (ID,NAME,AGE) \
VALUES (2, 'Allen', 25)");
conn.commit()
print "Records created successfully";
def selectDB(conn):
cur = conn.cursor()
cur.execute("SELECT id, name from \"USER\"")
rows = cur.fetchall()
for row in rows:
global name
name = row[1]
print "ID = ", row[0]
print "NAME = ", row[1], "\n"
print "Operation done successfully";
return name
def requestDB(req):
name = "Empty";
conn = connectDB()
createTable_Answers(conn)
createTable(conn)
### insertIntoDB(conn)
print "before " + name
name = selectDB(conn)
print "after " + name
conn.close()
return {
"speech" : name,
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def requestEvent(req):
return {
"speech" : "",
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python",
"followupEvent":{
"name":"test-event",
"data":{
"event":"inside event"
}
}
}
def requestSingleton(req):
global singletonObject
if singletonObject is None:
print "---- not singleton -----"
singletonObject = "updated"
print singletonObject
return {
"speech" : singletonObject,
"displayText": "",
"data": {},
"contextOut": [],
"source": "test-python"
}
def makeWebhookResult(req):
if req.get("result").get("action") == "request-game":
return requestGame(req)
elif req.get("result").get("action") == "get-from-db":
return requestDB(req)
elif req.get("result").get("action") == "test-event":
return requestEvent(req)
elif req.get("result").get("action") == "test-singleton":
return requestSingleton(req)
elif req.get("result").get("action") == "createDB":
requestDB(req)
conn = Database.Database()
return conn.__createTables__()
else:
return {}
if __name__ == '__main__':
port = int(os.getenv('PORT', 5000))
#print "Starting app on port %d" % port
app.run(debug=True, port=port, host='0.0.0.0')
|
Python
| 0.000001
|
@@ -1678,16 +1678,20 @@
LE %22USER
+NEXT
%22%0A
@@ -2873,24 +2873,27 @@
s(conn)%0A
+###
createTable(
|
027033d55efc1be05b6dc2ffdc422fdfe2b2db1b
|
Add csrf_token to cookies if there isn't one
|
app.py
|
app.py
|
import os
import datetime
from flask import (Flask, render_template, redirect, request, abort, url_for,
make_response)
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_wtf.csrf import CSRFProtect, generate_csrf
app = Flask(__name__)
CSRFProtect().init_app(app)
# Configurations
# Using environmental variables for secret key and database url
# Secret key is used to sign cookies, so it's secure to keep it hidden
app.config.update(
SECRET_KEY=os.environ["SECRET_KEY"],
SQLALCHEMY_DATABASE_URI=os.environ["DATABASE_URL"],
SQLALCHEMY_TRACK_MODIFICATIONS=False,
)
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
# Must import after db is defined, not pretty
from models import User
@app.after_request
def add_csrf_to_cookie(response):
return_response = make_response(response)
if request.cookies.get("csrf_token") is not None:
return_response.set_cookie("csrf_token", generate_csrf())
return return_response
@app.route('/')
def index():
return render_template("index.html")
# For testing purposes
@app.route('/users')
def users():
return "<br>".join(map(lambda x: str(x), User.query.all()))
@app.route('/register')
def register():
errormessage = request.args.get("errormessage", default="")
return render_template("register.html", errormessage=errormessage)
@app.route('/register/submit', methods=["POST"])
def submit():
username = request.form.get("username")
name = request.form.get("name", default="")
email = request.form.get("email")
password = request.form.get("password")
# Quick way to check if any of these is empty
# Should implement javascript password checker to prevent this
if "" in [username, email, password]:
errormessage = "Username, email, and password are required."
return redirect(url_for("register", errormessage=errormessage))
new_user = User(username, email, name, password)
db.session.add(new_user)
db.session.commit()
response = redirect(url_for("user"))
return add_logged_in_cookie(response, username)
@app.route('/user')
def user():
username = request.cookies.get("username")
return render_template("user.html", username=username)
@app.route('/login')
def login():
return render_template("login.html")
@app.route('/login/validate', methods=["POST"])
def validate():
username = request.form.get("username")
password = request.form.get("password")
if "" in [username, password]:
return redirect(url_for("login"))
if validate_password(username, password):
response = redirect(url_for("user"))
return add_logged_in_cookie(response, username)
else:
return redirect(url_for("login"))
def validate_password(username, password):
user = User.query.filter_by(username=username).first()
if user:
return user.check_password(password)
else:
return False
def add_logged_in_cookie(response, username):
return_response = make_response(response)
expiry_date = datetime.datetime.now()
expiry_date += datetime.timedelta(days=30)
return_response.set_cookie("username", username, expires=expiry_date)
return return_response
|
Python
| 0
|
@@ -880,20 +880,16 @@
en%22) is
-not
None:%0A
|
58dfa1e8df073cafc23871e76d317172758b05a6
|
change app.py
|
app.py
|
app.py
|
from bottle import route, run
from bottle import static_file, request
from bottle import template, get, error
import os
# static routes
@get('/<filename:re:.*\.css>')
def stylesheets(filename):
return static_file(filename, root='static/css')
@get('/<filename:re:.*\.js>')
def javascripts(filename):
return static_file(filename, root='static/js')
@get('/<filename:re:.*\.(jpg|png|gif|ico)>')
def images(filename):
return static_file(filename, root='static/img')
@get('/<filename:re:.*\.(eot|ttf|woff|svg)>')
def fonts(filename):
return static_file(filename, root='static/fonts')
@route('/')
def index():
return template('index')
@route('/skills')
def skills():
return template('skills')
@route('/about')
def about():
return template('about')
@route('/courses')
def courses():
return template('courses')
@route('/contact', method='POST')
def acao_login():
name = request.forms.get('name')
email = request.forms.get('email')
site = request.forms.get('site')
message = request.forms.get('message')
print(message)
return template('contact', name=name)
@error(404)
def error404(error):
return template('oops')
if __name__ == "__main__":
if os.environ.get('APP_LOCATION') == 'heroku':
run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000)))
else:
print('ENTROU AQUI')
run(host='localhost', port=8080, debug=True, reloader=True)
|
Python
| 0.000003
|
@@ -1271,31 +1271,8 @@
se:%0A
-%09%09print('ENTROU AQUI')%0A
%09%09ru
|
d6d67b8a831959d79a94e927bae3373bcbd4ef0a
|
print request args
|
app.py
|
app.py
|
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
@app.route("/paid", methods=['POST'])
def paid():
# print(request.args.get('invoice'))
return jsonify(request.args)
if __name__ == "__main__":
app.run()
|
Python
| 0.000003
|
@@ -1,8 +1,20 @@
+import json%0A
from fla
@@ -184,17 +184,26 @@
%0A
- #
print(
+json.dumps(
requ
@@ -214,22 +214,8 @@
args
-.get('invoice'
))%0A
|
48c777958bb1a5dd6c7e975733ddca62134c2941
|
Fix indentation error
|
app.py
|
app.py
|
from flask import Flask
from flask import render_template
from flask import render_template_string
from flask import request
from networkx.readwrite import json_graph
import networkx as nx
import git
import json
app = Flask(__name__)
@app.route('/', methods=['POST', 'GET'])
def form_post():
if request.method == 'POST':
repo_path = request.form['text']
return render_template("index.html", repo_path=repo_path)
else:
return render_template("index.html")
def branch_name(node, repo):
for b in repo.branches:
if b.commit.hexsha == node:
return b.name
return None
def head_name(node, repo):
if node == repo.head.commit.hexsha:
return "HEAD"
else:
return None
def breadth_first_add(networkx_graph, commit, N):
"""
Traverse a graph breadth first and add commits on the way
N is number of commmits you want to traverse
"""
# add the commit to a queue
queue = []
queue.append(commit)
# add the commit to the graph
networkx_graph.add_node(commit.hexsha, message=commit.message.split("\n")[0])
while len(networkx_graph.nodes()) < N:
# if queue is empty -> break
if len(queue)==0:
break
# get the commit in the queue and add all its parents to the graph and to the queue
commit = queue.pop()
for c in commit.parents:
networkx_graph.add_edge(commit.hexsha, c.hexsha)
networkx_graph.add_node(c.hexsha, message=c.message.split("\n")[0])
queue.append(c)
@app.route("/data")
def data():
repo_path = request.args.get('repo_path', '')
repo = git.Repo(repo_path)
networkx_graph = nx.DiGraph()
commit = repo.head.commit
diff = commit.diff(create_patch=True)
workingdiff = commit.diff(None, create_patch=True)
breadth_first_add(networkx_graph, commit, 200)
position=nx.graphviz_layout(networkx_graph, prog='dot')
add_diff_to(networkx_graph, position, workingdiff, diff)
data = json_graph.node_link_data(networkx_graph)
store_branch_labels(data, position, repo)
# store_diff_in(data, diff, workingdiff)
j = json.dumps(data)
return(j)
def add_diff_to(networkx_graph, position, workingdiff=[], diff=[]):
maximumX, maximumY = find_max_xy(position)
if len(workingdiff) > 0:
networkx_graph.add_node("diff", message="working")
position['diff'] = [200+300,maximumY+100]
elif len(diff) > 0:
networkx_graph.add_node("diff", message="diff between previous commit")
else:
try:
position['diff'] = [0,0]
except Exception, e:
print(e)
def find_max_xy(position):
tempx = 0
tempy = 0
for node in position:
tempx = max(position[node][0], tempx)
tempy = max(position[node][1], tempy)
return tempx, tempy
def store_branch_labels(data, position, repo):
data['labels'] = []
# search all the nodes if they are either "HEAD" or branch names
for node in data['nodes']:
if branch_name(node['id'], repo):
# e.g. data["master"] = 8e007c2a86789b88ffe5ce350746750bf78bfdfb
data[branch_name(node['id'], repo)] = node['id']
# e.g. data['labels'] = ['HEAD', 'master']
data['labels'].append(branch_name(node['id'], repo))
if head_name(node['id'], repo):
# e.g. data["HEAD"] = 8e007c2a86789b88ffe5ce350746750bf78bfdfb
data[head_name(node['id'], repo)] = node['id']
# store the position of every node
node['pos'] = position[node['id']]
if __name__ == "__main__":
import os
port = 5000
# Open a web browser pointing at the app.
# os.system("open http://localhost:{0}".format(port))
app.config['DEBUG'] = True
app.debug = True
app.run(port=port)
|
Python
| 0.000285
|
@@ -2690,20 +2690,17 @@
rint(e)%0A
-
+%0A
def find
|
1599c85d2ff27ed46580679ed119cb487c230a8d
|
add port, host, noconfig parameters
|
app.py
|
app.py
|
#!/usr/bin/env python2.7
import sys
from ConfigParser import SafeConfigParser
from bottle import route, post, run, request, view, response, static_file
from sh import cmus_remote
def read_config(config_file):
r = {}
parser = SafeConfigParser()
n = parser.read(config_file)
if not len(n): raise(Exception('File not found: {}.'.format(config_file)))
section = 'cmus_app'
required = ['cmus_host', 'cmus_passwd']
for S in required:
try:
r[S] = parser.get(section, S)
except:
raise(Exception('{} does not specify {}.'.format(config_file,S)))
optional = [('app_host', r['cmus_host']), ('app_port',8080)]
for S in optional:
try:
r[S[0]] = parser.get(section, S[0])
except:
r[S[0]] = S[1]
return r
@route('/')
@view('main')
def index():
return {'host':settings['cmus_host']}
@post('/cmd')
def run_command():
legal_commands = {'Play':'player-play', 'Stop':'player-stop', 'Next':'player-next', 'Previous':'player-prev', 'Increase Volume':'vol +1%', 'Reduce Volume':'vol -1%', 'Mute':'vol 0'}
command = request.POST.get('command', default=None)
if legal_commands.has_key(command):
try:
out = Remote('-C', legal_commands[command])
return {'result':out.exit_code, 'output':out.stdout}
except:
return {'result':False}
else:
pass
@route('/status')
def get_status():
try:
out = Remote('-Q').stdout.split('\n')
r = {}
play = out[0].split()[1]
if play == 'playing': r['playing'] = True
elif play == 'stopped': r['playing'] = False
info = filter(lambda x: x if x.startswith('tag') or x.startswith('set') else None, out)
for i in info:
k, v = i.split()[1], i.split()[2:]
if len(v): r[k] = ' '.join(v)
return r
except:
pass
@route('/static/<file>')
def static(file):
response.set_header('Cache-Control', 'max-age=604800')
return static_file(file, root='static')
@route('/favicon.ico')
def favicon():
response.set_header('Cache-Control', 'max-age=604800')
return static_file('favicon.ico', root='static')
if __name__ == '__main__':
# configuration file either supplied via command line
# or assumed to be in one of the default locations
if len(sys.argv) > 1:
print sys.argv
CONFIG=sys.argv[1:]
else:
CONFIG=['config','config.ini','.config']
settings = read_config(CONFIG)
Remote = cmus_remote.bake('--server', settings['cmus_host'],'--passwd', settings['cmus_passwd'])
run(host=settings['app_host'], port=settings['app_port'])
|
Python
| 0.000011
|
@@ -172,16 +172,432 @@
_remote%0A
+from optparse import OptionParser%0A%0Aparser = OptionParser()%0Aparser.add_option(%22-n%22, %22--noconfig%22, action=%22store_true%22, dest=%22noconfig%22,%0A help=%22do not use config file%22, default=False)%0Aparser.add_option(%22-p%22, %22--port%22, dest=%22port%22,%0A help=%22cmus_app port%22, default=8080)%0Aparser.add_option(%22-a%22, %22--app-host%22, dest=%22app_host%22,%0A help=%22cmus_app host%22, default='localhost')%0A
%0A%0Adef re
@@ -2765,16 +2765,264 @@
cations%0A
+%0A (options, args) = parser.parse_args()%0A if options.noconfig is True:%0A settings = %7B%0A 'cmus_host': 'localhost'%0A %7D%0A Remote = cmus_remote.bake()%0A run(host=options.app_host, port=options.port)%0A else:%0A
if l
@@ -3039,16 +3039,20 @@
v) %3E 1:%0A
+
@@ -3070,24 +3070,28 @@
rgv%0A
+
+
CONFIG=sys.a
@@ -3098,16 +3098,20 @@
rgv%5B1:%5D%0A
+
else
@@ -3104,32 +3104,36 @@
%5D%0A else:%0A
+
CONFIG=%5B
@@ -3165,16 +3165,21 @@
onfig'%5D%0A
+%0A
sett
@@ -3205,16 +3205,20 @@
CONFIG)%0A
+
Remo
@@ -3239,16 +3239,17 @@
te.bake(
+%5B
'--serve
@@ -3274,16 +3274,17 @@
_host'%5D,
+
'--passw
@@ -3310,19 +3310,23 @@
passwd'%5D
-) %0A
+%5D)%0A
run(
|
c274325f89ef9a8fa25128b85b6d25dc634fe4a2
|
Fix flood control exception
|
bot.py
|
bot.py
|
import os
import sys
import logging
from time import sleep
from flask import request
import telegram
from telegram.error import NetworkError, Unauthorized
from leonard import Leonard
from libs import shrt
WEBHOOK_HOSTNAME = os.environ.get('WEBHOOK_HOSTNAME', 'https://leonardbot.herokuapp.com')
debug = False
if 'BOT_DEBUG' in os.environ and os.environ['BOT_DEBUG'] == '1':
debug = True
print('Starting bot')
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('leonard')
logger.setLevel(logging.INFO)
print('Creating bot')
telegram_client = telegram.Bot(os.environ['BOT_TOKEN'])
bot = Leonard(telegram_client, debug)
print('Collecting plugins')
bot.collect_plugins()
print('Setting routes')
@bot.app.route('/webhook/<token>', methods=['POST'])
def webhook(token):
if token != os.environ['BOT_TOKEN']:
return 'bad token'
update = telegram.Update.de_json(request.get_json(force=True), bot.telegram)
bot.process_update(update)
return 'ok'
shrt.get_link_route = bot.app.route('/l/<query>')(shrt.get_link_route)
if len(sys.argv) > 1 and sys.argv[1] == 'polling':
bot.telegram.setWebhook('')
try:
update_id = telegram_client.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
for update in telegram_client.getUpdates(offset=update_id, timeout=10):
update_id = update.update_id + 1
bot.process_update(update)
except NetworkError:
sleep(1)
except Unauthorized:
update_id += 1
exit()
print('Setting webhook')
# Register webhook
webhook_url = WEBHOOK_HOSTNAME + '/webhook/' + os.environ['BOT_TOKEN']
try:
bot.telegram.setWebhook(webhook_url)
except NetworkError:
sleep(1)
bot.telegram.setWebhook(webhook_url)
if __name__ == '__main__':
bot.app.run(port=8888)
|
Python
| 0.000005
|
@@ -149,16 +149,28 @@
thorized
+, RetryAfter
%0A%0Afrom l
@@ -1803,24 +1803,25 @@
url)%0Aexcept
+(
NetworkError
@@ -1816,24 +1816,37 @@
NetworkError
+, RetryAfter)
:%0A sleep(
|
1a522469da3a9ca96b43bace7fdd2d4047c52e32
|
Update handled exception
|
bot.py
|
bot.py
|
import json
import requests
import boto3
import validatesns
from flask import Flask, request, abort
from flow import Flow
from raven.contrib.flask import Sentry
from config import ORG_ID, CHANNEL_MAP, SENTRY_DSN, BOTNAME, BOTPW
app = Flask(__name__)
try:
flow = Flow(BOTNAME)
except flow.FlowError as e:
flow = Flow()
flow.create_device(BOTNAME, BOTPW)
app.logger.info('Device for bot {} created'.format(BOTNAME))
if SENTRY_DSN:
sentry = Sentry(app, dsn=SENTRY_DSN)
@app.route('/')
def index():
channel_id = CHANNEL_MAP['semabot']
flow.send_message(ORG_ID, channel_id, 'botbotbot')
return 'foo'
@app.route('/pong/')
def pong():
try:
from deployment import DEPLOYMENT_ID
except ImportError:
abort(401)
return DEPLOYMENT_ID
@app.route('/deployments/', methods=['POST'])
def deployments():
data = json.loads(request.data.decode('utf-8'))
try:
validatesns.validate(data)
except validatesns.ValidationError:
abort(400)
message_type = data['Type']
if message_type == 'SubscriptionConfirmation':
requests.get(data['SubscribeURL'])
elif message_type == 'Notification':
try:
message_data = json.loads(data['Message'])
except KeyError:
# This handles the case where the notification is not an actual
# deployment. This happens when you setup a new trigger
channel_id = CHANNEL_MAP['semabot']
flow.send_message(ORG_ID, channel_id, data['Message'])
return 'foop'
logs = []
if message_data['status'] == 'FAILED':
client = boto3.client('codedeploy')
group = client.get_deployment_group(applicationName=message_data['applicationName'],
deploymentGroupName=message_data['deploymentGroupName'])
ec2_filters = []
for setlist in group['deploymentGroupInfo']['ec2TagSet']['ec2TagSetList']:
for tag in setlist:
filters = [
{'Name': 'tag-key', 'Values': [tag['Key']]},
{'Name': 'tag-value', 'Values': [tag['Value']]}
]
ec2_filters.extend(filters)
client = boto3.client('ec2')
instances = client.describe_instances(Filters=ec2_filters)
instance_ids = []
for reservation in instances['Reservations']:
for instance in reservation['Instances']:
instance_ids.append(instance['InstanceId'])
client = boto3.client('codedeploy')
for instance in instance_ids:
deployment_instances = client.get_deployment_instance(deploymentId=message_data['deploymentId'],
instanceId=instance)
for event in deployment_instances['instanceSummary']['lifecycleEvents']:
if event['status'] == 'Failed':
logs.append({'event': event['lifecycleEventName'],
'log': event['diagnostics']['logTail']})
message = '**{subject} ({group})**'.format(subject=data['Subject'],
group=message_data['deploymentGroupName'])
if logs:
message += '\n\n'
for log in logs:
message += '**[Lifecycle Event: {event}]**\n\n```\n{log}\n```'.format(**log)
channel_id = CHANNEL_MAP[message_data['applicationName']]
flow.send_message(ORG_ID, channel_id, message)
return 'foop'
if __name__ == "__main__":
import sys
port = int(sys.argv[1])
app.run(port=port, debug=True)
|
Python
| 0.000001
|
@@ -4,16 +4,49 @@
ort json
+%0Afrom json import JSONDecodeError
%0A%0Aimport
@@ -1302,19 +1302,26 @@
except
-Key
+JSONDecode
Error:%0A
|
44f33a5aacb90c66f187d88417a80320545600e8
|
remove the line : 'ghe' : ghe_main,
|
bot.py
|
bot.py
|
import boto3
import logging
import urllib2
import json
import re
import wukong.ghe_command
import wukong.config
#setup simple logging for INFO
log = logging.getLogger()
log.setLevel(logging.DEBUG)
s3 = boto3.resource('s3')
# Starts a instance given an Amazon instance
def start_instance(instance):
try:
log.info("Starting instance " + instance.id)
instance.start()
log.info("Finish running start instance")
return 'starting the ec2 instance {0} with IP address {1}'.format(instance.id, instance.private_ip_address)
except Exception, e2:
log.error("Unable to start instance " + instance.id)
error2 = "Error2: %s" % str(e2)
return 'start the ec2 instance ' + instance.id + ' failed: ' + error2
# starts a machine given the IP address of the machine
def start_machine(ip):
# Filter criteria
filters = [
{
'Name' : 'private-ip-address',
'Values': [ip]
}
]
ec2 = boto3.resource('ec2')
filtered = ec2.instances.filter(Filters=filters)
#return start_instance(filtered[0]);
# Should return only 1 instance, but we need to use for loop
# as we can't access individual array element
for instance in filtered:
return start_instance(instance)
# identify what problem we have and what instance is causing the problem
def identify_problem(err_msg):
regex_str = r'^PROBLEM Service Alert: (.*) for ghe-primary \((.*)\) is (.+): (.*)'
regex = re.compile(regex_str)
match = regex.search(err_msg)
if match:
what = match.group(1)
ip = match.group(2)
severity = match.group(3)
msg = match.group(4)
return start_machine(ip)
print "What - {0}".format(what);
print "IP/Hostname - {0}".format(ip)
print "severity - {0}".format(severity)
print "msg - {0}".format(msg)
else:
return "No match : Unable to find in {0}".format(err_msg)
# shows the help page
def show_help_and_exit():
return """
help - print this help page
ghe orgs - Lists orgs using GHEs
ghe users - List github users
ghe repos - List github reposes
ghe license - Show Github license status
ghe monitor cpu [1d,1w,1mon] - Show the cpu monitor gragh of github server
ghe monitor memory [1d,1w,1mon] - Show the memory monitor gragh of github server
"""
def alert():
print "alert";
def lambda_handler(event, context):
feature_list = {
'help' : show_help_and_exit,
'ghe' : ghe_main,
'alert': alert
}
#assert context
#log.debug(event)
bot_event = event
trigger_word = bot_event['trigger_word']
raw_text = bot_event['text']
raw_args = raw_text.replace(trigger_word, '').strip()
args = raw_args.split()
log.debug("[lambda_handler] args:{0}".format(args))
if len(args) >= 1:
feature = args[0]
command = None
if len(args) >= 2:
command = args[1]
options = ''
if len(args) >= 3:
options = args[2:]
log.debug("[lambda_handler] feature:'{0}' command:'{1}' options:'{2}'".format(
feature, command, options))
log.debug ('feature: ' + str(feature))
if (feature == 'help'):
log.debug("showing help and exiting..")
return {
'text' : show_help_and_exit()
}
if (feature == 'ghe'):
return {
'text' : wukong.ghe_command.ghe_main(command, options)
}
if (feature == 'PROBLEM'):
log.debug("Problem encountered")
return {
'text' : identify_problem(raw_args)
}
return {
'text': "{0}".format('sorry, it is too complex for me...')
}
|
Python
| 0.999999
|
@@ -2580,35 +2580,8 @@
it,%0A
- 'ghe' : ghe_main,%0A
|
e671dc2daa225437d6fcf9eebc839071a3a51cd8
|
remove password from user-agent, fix #8
|
bot.py
|
bot.py
|
"""
@author Harrison Shoebridge <@paked/@hcwool>
The r/WatchPeopleCode sidebar updater script/bot!
In order to use the bot you will need a config.json file, following the format of the provided example_config.json
"""
from praw import *
import random
import os
import requests
import json
import time
from urllib.parse import urlparse, parse_qs
class Bot:
"""
Bot represents a basic bot, with a reddit username, password and subreddit
"""
def __init__(self, username, password, subreddit="watchpeoplecode", debug=False):
self._username = username
self._password = password
self._subreddit = subreddit
self.debug = debug
self.r = Reddit("{0} a bot for /r/{1} to set top WPC streams!".format(self._username, self._password))
self.login()
def login(self):
self.log("I'm logging in now!")
self.r.login(self._username, self._password)
def log(self, message):
if not self.debug:
return
print("[{}] {}".format(self._username, message))
class SidebarBot(Bot):
"""
Sidebar bot, a bot to update your sidebar!
"""
def __init__(self, username, password, description, subreddit="watchpeoplecode", mode="live", debug=False):
super().__init__(username, password, subreddit, debug)
self.description = description
self.mode = mode
def update(self):
self.log("Just about to start making that sweet description of yours!")
description = self.generate_description(self.choose_streams())
self.log("Whew! Just gonna get that specified subreddit for you!")
subreddit = self.r.get_subreddit(self._subreddit)
self.log("Updating your sidebar now...")
subreddit.update_settings(description=description.encode('utf8'))
self.log("Updated your sidebar")
def generate_description(self, streams):
output = self.description["pre"] + "\n\n"
output += self.description["viewers_template"].format(str(self._get_total_viewers())) + "\n\n"
for stream in streams:
output += self.description["template"].format(stream["username"], stream["title"], stream["url"], self._get_viewers(stream)) + "\n\n"
output += "\n\n" + self.description["post"]
return output
def choose_streams(self):
streams = self._get_streams()
live_streams = streams[self.mode]
if len(live_streams) <= 3:
return live_streams
else:
return random.sample(live_streams, 3)
def _get_total_viewers(self):
streams = self._get_streams()
live_streams = streams[self.mode]
viewers = 0
if len(live_streams) == 0:
return viewers
else:
for stream in live_streams:
viewers += self._get_viewers(stream)
return viewers
def _get_viewers(self, stream):
if "twitch.tv" in stream["url"]:
twitch_name = stream["url"].split("/")
if twitch_name[-1] == "":
twitch_name = twitch_name[-2]
else:
twitch_name = twitch_name[-1]
stream_json = requests.get("https://api.twitch.tv/kraken/streams?channel=" + twitch_name).json()
return stream_json["streams"][0]["viewers"]
elif "youtube.com" in stream["url"]:
# WARNING - Only accepts Youtube urls in the yt.com/watch?v=... format.
url_data = urlparse(stream["url"])
video_id = parse_qs(url_data.query)["v"][0]
stream_viewers = requests.get("https://www.youtube.com/live_stats?v=" + video_id)
return int(stream_viewers.text)
def _get_streams(self):
return requests.get("http://www.watchpeoplecode.com/json").json()
if __name__ == '__main__':
if not os.environ.get('ENV_MODE'):
print("In JSON mode")
config_data = open('config.json')
config = json.load(config_data)
username = config["bot"]["username"]
password = config["bot"]["password"]
description = config["description"]
subreddit = config["subreddit"]
timer = int(config["timer"])
mode = config["mode"]
debug = config["debug"]
config_data.close()
else:
username = os.environ['BOT_USERNAME']
password = os.environ['BOT_PASSWORD']
mode = os.environ['MODE']
description = {"pre": os.environ['DESCRIPTION_PRE'],
"viewers_template": os.environ['DESCRIPTION_VIEWERS_TEMPLATE'],
"template": os.environ['DESCRIPTION_TEMPLATE'],
"post": os.environ['DESCRIPTION_POST']}
subreddit = os.environ['SUBREDDIT']
debug = os.environ['DEBUG']
timer = int(os.environ['TIMER'])
sb = SidebarBot(username,
password,
description,
subreddit=subreddit,
mode=mode,
debug=debug)
while True:
try:
sb.update()
except errors.InvalidCaptcha:
print("WARNING: No Captcha Supplied\nIt worked anyway though...\nSuppress this message by getting 2 link karma.")
except Exception as e:
print("FAILED: todo mail harrison and aaron")
print(e)
# mail harrison, and aaron.
time.sleep(timer)
|
Python
| 0.000019
|
@@ -725,24 +725,25 @@
, self._
-password
+subreddit
))%0A%0A%09%09se
|
0153ff44dc484cb0d74a33e202a5decb8b714b81
|
Bot responds to tweets
|
bot.py
|
bot.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tweepy, time, sys
from keys import *
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
infile = open('response.txt','r')
f = infile.readlines()
infile.close()
cursor = tweepy.Cursor(api.search, q="tweetthewolf")
#print("cursor:\n" + str(cursor.items()))
for status in cursor.items():
#if bot has not replied:
print("status:\n" + str(status.id))
print("file:\n" + str(f[0]))
outfile = open('request.txt','w')
outfile.write(str(status.id))
outfile.close()
#api.update_status(status=str(f[0]), in_reply_to_status_id = status.id)
|
Python
| 0.999976
|
@@ -44,52 +44,178 @@
*-%0A%0A
-import tweepy, time, sys%0Afrom keys import *%0A
+from wolf import Wolf%0Aimport json%0Aimport requests%0Aimport sys%0Aimport time%0Aimport tweepy%0Aimport wolframalpha%0Afrom keys import *%0A%0A# authenticate with twitter & wolfram alpha
%0Aaut
@@ -346,80 +346,46 @@
h)%0A%0A
-infile = open('response.txt','r')%0Af = infile.readlines()%0Ainfile.close()%0A
+# grab all tweets with %22#tweetthewolf%22
%0Acur
@@ -438,51 +438,8 @@
f%22)%0A
-#print(%22cursor:%5Cn%22 + str(cursor.items()))%0A%0A
for
@@ -502,242 +502,1017 @@
+#
print(%22
-status:%5Cn%22 + str(status.id))%0A print(%22file:%5Cn%22 + str(f%5B0%5D))%0A outfile = open('request.txt','w')%0A outfile.write(str(status.id))%0A outfile.close()%0A%0A #api.update_status(status=str(f%5B0%5D), in_reply_to_status_id = status.id)
+%5Cn%5Cn%5CnTWEEPY STATUS:%5Cn%22 + json.dumps(status._json, indent=4, sort_keys=True))%0A #try:%0A print(%22status.text: %22 + str(status.text))%0A%0A wolf = Wolf(WOLFRAM_KEY, status.text)%0A print(wolf.result())%0A #print(json.dumps(wolf.request()%5B%22queryresult%22%5D%5B%22pods%22%5D%5B1%5D%5B%22subpods%22%5D%5B0%5D%5B%22plaintext%22%5D, indent=2, separators=(',',':'), sort_keys=True))%0A answer = str(wolf.request()%5B%22queryresult%22%5D%5B%22pods%22%5D%5B1%5D%5B%22subpods%22%5D%5B0%5D%5B%22plaintext%22%5D)%0A%0A api.update_status(status=answer, in_reply_to_status_id = status.id)%0A #except TweepError:%0A # print(%22%5Cn%5Cnlel.%5Cn%5Cn%22)%0A%0A '''%0A res = client.query(%2212*12%22)#str(status.text))%0A res = list(res)%0A for n in res:%0A print(%22%5Cnbruh: %22 + str(n.text))%0A #print(%22%5Cn%5Cn%5CnWOLFRAM%7CALPHA RESULT:%5Cn%22 + str(res.results%5B0%5D.text))%0A '''%0A%0A '''%0A outfile = open('request.txt','w')%0A outfile.write(str(status.id))%0A outfile.close()%0A%0A infile = open('response.txt','r')%0A f = infile.readlines()%0A infile.close()%0A%0A #print(%22file:%5Cn%22 + str(f%5B0%5D))%0A '''
%0A
|
1eb70787368fb6b1f825818b5c8d290ba9d73cd2
|
add error handler
|
bot.py
|
bot.py
|
import re
import json
import asyncio
from urllib.parse import urljoin
import telepot
import telepot.async
import aiohttp
class ComposerBot(telepot.async.Bot):
def __init__(self, *args, config=None, **kwargs):
super(ComposerBot, self).__init__(*args, **kwargs)
self.config = config
self.mp3_regex = re.compile('<audio.*?<source src="(.*?)".*?<\/audio>', re.DOTALL)
self.msg_regex = re.compile('<pre>.*?T: (.*?)\n.*?</pre>', re.DOTALL)
async def send_result(self, chat_id, content):
mp3_path = urljoin(self.config['base_url'], self.mp3_regex.findall(content)[0])
pre_text = self.msg_regex.findall(content)[0]
self.loop.create_task(self.sendChatAction(chat_id, 'upload_audio'))
async with aiohttp.ClientSession(loop=self.loop) as client:
async with client.get(mp3_path) as resp:
await self.sendAudio(chat_id, await resp.content.read(), title=pre_text)
async def status_poll(self, chat_id, location):
url = urljoin(self.config['base_url'], location)
print('start polling {}'.format(url))
for _ in range(600):
await asyncio.sleep(2)
print('checking url {}'.format(url))
async with aiohttp.ClientSession(loop=self.loop) as client:
async with client.get(url) as resp:
content = await resp.text()
if content.find('<audio') != -1:
print("sending result {}".format(url))
self.loop.create_task(self.send_result(chat_id, content))
break
else:
print("wait {} {}".format(url, content))
async def compose(self, chat_id):
url = urljoin(self.config['base_url'], 'song/compose/')
async with aiohttp.ClientSession(loop=self.loop) as client:
async with client.get(url) as resp:
if resp.status == 200:
location = resp.history[0].headers.get("Location")
self.loop.create_task(self.status_poll(chat_id, location))
self.loop.create_task(self.sendMessage(chat_id, '\U0001f40c please wait...'))
async def on_chat_message(self, msg):
content_type, chat_type, chat_id = telepot.glance(msg)
print(msg)
if msg['text'].find('/compose') != -1:
self.loop.create_task(self.compose(chat_id=chat_id))
elif msg['text'].find('/start') != -1:
self.loop.create_task(self.sendMessage(chat_id, "Hello! Please use /compose command!"))
else:
self.loop.create_task(self.sendMessage(chat_id, "I don't know what you mean. Try /compose command."))
with open('conf/config.json') as f:
config = json.loads(f.read())
token = config.pop("telegram_token")
loop = asyncio.get_event_loop()
bot = ComposerBot(token=token, config=config, loop=loop)
loop.create_task(bot.message_loop())
print("listening...")
try:
loop.run_forever()
finally:
loop.close()
|
Python
| 0.000001
|
@@ -1630,34 +1630,69 @@
el
-se
+if content.find('Composing...') != -1
:%0A
@@ -1741,16 +1741,256 @@
ontent))
+%0A else:%0A print(%22error with url %7B%7D%22.format(url))%0A self.loop.create_task(self.sendMessage(chat_id, 'something went wrong with url %7B%7D'.format(url)))%0A break
%0A%0A as
|
d2179432d37a10e13d2ba03b160aed8d61d83d0c
|
Decrease chroma instead of hard clipping to fit colors into gamut
|
cam.py
|
cam.py
|
"""Converts between the sRGB and CIECAM02 JCh (lightness/chroma/hue) color spaces.
See https://en.wikipedia.org/wiki/SRGB and https://en.wikipedia.org/wiki/CIECAM02. 'sRGB' here is
defined as an RGB color space with the sRGB primaries and gamma 2.2 - i.e. it does not use the
piecewise sRGB transfer function but goes with the most common actual implementation of sRGB in
display hardware.
"""
import colour
import numpy as np
def sRGB_to_JCh(RGB, RGB_b, surround='average', epsilon=1e-6):
"""Converts an sRGB foreground color to CIECAM02 JCh (lightness/chroma/hue).
Input sRGB values are nonlinear and range from 0 to 1.
Args:
RGB: The foreground color sRGB value.
RGB_b: The background color sRGB value.
surround: The CIECAM02 viewing conditions.
epsilon: A numerical fuzz factor to use in place of true black.
Returns:
The converted foreground color in JCh space.
"""
RGB_linear = np.maximum(epsilon, np.float64(RGB)**2.2)
RGB_b_linear = np.maximum(epsilon, np.float64(RGB_b)**2.2)
XYZ = colour.sRGB_to_XYZ(RGB_linear, apply_decoding_cctf=False) * 100
XYZ_w = colour.sRGB_to_XYZ([1, 1, 1], apply_decoding_cctf=False) * 100
L_A = 20
Y_b = colour.sRGB_to_XYZ(RGB_b_linear, apply_decoding_cctf=False)[1] * 100
if isinstance(surround, str):
surround = colour.appearance.ciecam02.CIECAM02_VIEWING_CONDITIONS[surround]
return np.float64(colour.XYZ_to_CIECAM02(XYZ, XYZ_w, L_A, Y_b, surround, True)[:3])
def JCh_to_sRGB(JCh, RGB_b, surround='average', epsilon=1e-6):
"""Converts a CIECAM02 JCh (lightness/chroma/hue) foreground color to sRGB.
Input and output sRGB values are nonlinear and range from 0 to 1. This routine will clamp
output RGB values to (0, 1] rather than generate an out-of-gamut color.
Args:
JCh: The foreground color JCh value. Can come from sRGB_to_JCh().
RGB_b: The background color sRGB value.
surround: The CIECAM02 viewing conditions.
epsilon: A numerical fuzz factor to use in place of true black.
Returns:
The converted foreground color in sRGB space.
"""
J, C, h = JCh
RGB_b_linear = np.maximum(epsilon, np.float64(RGB_b)**2.2)
XYZ_w = colour.sRGB_to_XYZ([1, 1, 1], apply_decoding_cctf=False) * 100
L_A = 20
Y_b = colour.sRGB_to_XYZ(RGB_b_linear, apply_decoding_cctf=False)[1] * 100
if isinstance(surround, str):
surround = colour.appearance.ciecam02.CIECAM02_VIEWING_CONDITIONS[surround]
XYZ = colour.CIECAM02_to_XYZ(J, C, h, XYZ_w, L_A, Y_b, surround, True) / 100
return np.clip(colour.XYZ_to_sRGB(XYZ, apply_encoding_cctf=False), 0, 1)**(1 / 2.2)
def translate(fg, bg_src, bg_dst, J_factor=1, C_factor=1):
"""Returns a foreground color, intended for use on bg_dst, that appears like the given
foreground color on background bg_src.
Args:
fg: The foreground color sRGB value to translate.
bg_src: The source background sRGB value.
bg_dst: The destination background sRGB value.
J_factor: Scales output lightness by this factor.
C_factor: Scales output chroma by this factor.
"""
JCh = sRGB_to_JCh(fg, bg_src)
JCh[0] *= J_factor
JCh[1] *= C_factor
return JCh_to_sRGB(JCh, bg_dst)
|
Python
| 0
|
@@ -2510,24 +2510,87 @@
S%5Bsurround%5D%0A
+ for _ in range(100): # limit the number of iterations%0A
XYZ = co
@@ -2666,23 +2666,18 @@
-return np.clip(
+ RGB =
colo
@@ -2726,11 +2726,194 @@
lse)
-, 0
+%0A out_of_gamut = ((RGB %3C epsilon) + (RGB %3E 1)).any(axis=1)%0A if out_of_gamut.any():%0A C%5Bout_of_gamut%5D *= 0.95%0A else:%0A return np.clip(RGB, epsilon
, 1)
|
eaf48d7d89f159d2c196985ab93d5c63eb2ae0c9
|
Fix typo in include file path.
|
cct.py
|
cct.py
|
#!/usr/bin/env python
#
# Distributed under GPLv2.1 or any later
#
# Copyright (C) 2014 Tomas Gavenciak <gavento@ucw.cz>
# Copyright (C) 2014 Cyril Hrubis <metan@ucw.cz>
#
import re
import getopt
from sys import argv, exit
from os import path, remove, system
DEFAULT_INDENT = 4
def perror(filename, line, lineno, row, error):
print('%s:%i:%i: error: %s\n' % (filename, lineno, row, error))
print(line)
print(' ' * row + '^\n')
exit(1)
def transform(filename, lines, include_dirs, startindent):
out = []
lastindent = 0
lineno = 0
for l in lines:
lineno += 1
l = l.rstrip('\n')
if re.match('\s*@.*', l):
padd = l[:len(l) - len(l.lstrip())]
l = l.lstrip()
# lines with '@ end' only decrease the indent
if re.match('@\s*end\s*', l):
lastindent -= DEFAULT_INDENT
elif re.match('@\s*include.*', l):
include_filename = re.sub('@\s*include\s*', '', l)
include_path = ''
if not include_filename:
perror(filename, l, lineno, len(l), 'Expected filename')
for dirname in include_dirs:
if path.isfile(dirname + '/' + include_filename):
include_path = dirname + '/' + include_filename
break
if not include_path:
perror(filename, l, lineno, len(l) - len(include_filename),
"Failed to locate '%s' in %s" %
(include_filename, include_dirs))
try:
infile = open(include_filename, 'r')
except Exception as err:
perror(filename, l, lineno, len(l) - len(include_filename), str(err))
out = out + transform(include_filename, infile.readlines(), include_dirs, lastindent)
infile.close()
else:
code = re.sub('\t', ' ', l[2:]).rstrip()
# full-line comments do not change last indent
if code and not re.match('^[ ]*#', code):
lastindent = len(code) - len(code.lstrip())
if code.endswith(':'):
lastindent += DEFAULT_INDENT
if re.match('\s*return\s+.*', code):
lastindent -= DEFAULT_INDENT
if (padd):
out.append(' ' * startindent + 'cct.set_padd("%s")' % padd)
out.append(' ' * startindent + code)
if (padd):
out.append(' ' * startindent + 'cct.set_padd("")')
else:
# parse {{ expression }} blocks
tokens = re.split('({{|}})', l)
code = 'cct.write("'
row = 0
in_code = False
for token in tokens:
if token == '{{':
if in_code:
perror(filename, l, lineno, row, 'Unexpected {{')
else:
in_code = True
code = code + '" + str('
elif token == '}}':
if in_code:
in_code = False
code = code + ') + "'
else:
perror(filename, l, lineno, row, 'Unexpected }}')
else:
# escape \ and " but only in verbatim mode
if not in_code:
token = token.replace("\\", "\\\\").replace('"', '\\"')
code = code + token
row += len(token)
if in_code:
perror(filename, l, lineno, row, 'Unterminated {{')
out.append(' ' * (lastindent + startindent) + code + '")')
return out
header = [
"#!/usr/bin/env python",
"#",
"# Generated file do _not_ edit by hand!",
"#",
"from sys import exit",
"from os import remove, path",
"",
"class cct:",
" def __init__(self, outfile_path, filename):",
" self.first = True",
" self.filename = filename",
" self.outfile_path = outfile_path",
" self.padd = ''",
" try:",
" self.outfile = open(outfile_path, 'w')",
" except Exception as err:",
" self.error('Failed to open file: ' + outfile_path + ' : ' + str(err))",
"",
" def error(self, string):",
" self.outfile.close()",
" remove(self.outfile_path)",
" print('cct: error: ' + string)",
" exit(1)",
"",
" def write(self, line):",
" if self.first:",
" self.first = False",
" if 'cct_header' in globals():",
" cct_header(path.basename(self.outfile_path), self.filename)",
" self.outfile.write(self.padd + line + '\\n')",
"",
" def set_padd(self, padd):",
" self.padd = padd",
"",
" def close(self):",
" if 'cct_footer' in globals():",
" cct_footer(path.basename(self.outfile_path), self.filename)",
"",
" try:",
" self.outfile.close()",
" except Exception as err:",
" self.error('Failed to write ' + self.outfile_path + ' : ' + str(err))",
"",
]
footer = [
"cct.close()",
]
def generate(filename, lines, include_dirs, outfile):
out = header
out.append("cct = cct('%s', '%s')" % (outfile, filename))
out.append("")
out = out + transform(filename, lines, include_dirs, 0)
out = out + footer
return '\n'.join(out)
def error(error):
print(error)
exit(1)
def usage():
print('Usage:\ncct [-Idir] [-v] [-o outfile] file.c.t\n')
print('-I\n\tAdds include path(s)')
print('-o\n\tSets output file')
print('-v\n\tSets verbose mode')
print('-h | --help\n\tPrints this help.')
def main():
try:
opts, args = getopt.getopt(argv[1:], 'c:ho:I:v', ['help'])
except getopt.GetoptError as err:
print(str(err))
usage()
exit(1)
include_dirs = ['.']
verbose = False
outfile = ''
config = ''
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
exit(0)
elif opt == '-I':
include_dirs.append(arg)
elif opt == '-v':
verbose = True
elif opt == '-o':
outfile = arg
if len(args) != 1:
error('No input files.')
if not outfile:
if not args[0].endswith('.t'):
error('No outfile set and template does not end with .t')
outfile = args[0][:-2]
if verbose:
print("Settings\n--------")
print("Include Dirs: %s" % include_dirs)
print("Template File: %s" % args[0])
print("Output File: %s" % outfile)
with open(args[0], 'rt') as f:
t = generate(args[0], f.readlines(), include_dirs, outfile)
script_name = outfile + '.py'
try:
result = open(script_name, 'w')
except Exception as err:
error('Failed to open file: ' + script_name + ' : ' + str(err))
result.write(t)
try:
result.close()
except Exception as err:
error('Failed to close file: ' + script_name + ' : ' + str(err))
system('python ' + script_name)
if __name__ == '__main__':
main()
|
Python
| 0.000001
|
@@ -1663,24 +1663,20 @@
include_
-filename
+path
, 'r')%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.